Line data Source code
1 : /* Optimize by combining instructions for GNU compiler.
2 : Copyright (C) 1987-2026 Free Software Foundation, Inc.
3 :
4 : This file is part of GCC.
5 :
6 : GCC is free software; you can redistribute it and/or modify it under
7 : the terms of the GNU General Public License as published by the Free
8 : Software Foundation; either version 3, or (at your option) any later
9 : version.
10 :
11 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : for more details.
15 :
16 : You should have received a copy of the GNU General Public License
17 : along with GCC; see the file COPYING3. If not see
18 : <http://www.gnu.org/licenses/>. */
19 :
20 : /* This module is essentially the "combiner" phase of the U. of Arizona
21 : Portable Optimizer, but redone to work on our list-structured
22 : representation for RTL instead of their string representation.
23 :
24 : The LOG_LINKS of each insn identify the most recent assignment
25 : to each REG used in the insn. It is a list of previous insns,
26 : each of which contains a SET for a REG that is used in this insn
27 : and not used or set in between. LOG_LINKs never cross basic blocks.
28 : They were set up by the preceding pass (lifetime analysis).
29 :
30 : We try to combine each pair of insns joined by a logical link.
31 : We also try to combine triplets of insns A, B and C when C has
32 : a link back to B and B has a link back to A. Likewise for a
33 : small number of quadruplets of insns A, B, C and D for which
34 : there's high likelihood of success.
35 :
36 : We check (with modified_between_p) to avoid combining in such a way
37 : as to move a computation to a place where its value would be different.
38 :
39 : Combination is done by mathematically substituting the previous
40 : insn(s) values for the regs they set into the expressions in
41 : the later insns that refer to these regs. If the result is a valid insn
42 : for our target machine, according to the machine description,
43 : we install it, delete the earlier insns, and update the data flow
44 : information (LOG_LINKS and REG_NOTES) for what we did.
45 :
46 : There are a few exceptions where the dataflow information isn't
47 : completely updated (however this is only a local issue since it is
48 : regenerated before the next pass that uses it):
49 :
50 : - reg_live_length is not updated
51 : - reg_n_refs is not adjusted in the rare case when a register is
52 : no longer required in a computation
53 : - there are extremely rare cases (see distribute_notes) when a
54 : REG_DEAD note is lost
55 : - a LOG_LINKS entry that refers to an insn with multiple SETs may be
56 : removed because there is no way to know which register it was
57 : linking
58 :
59 : To simplify substitution, we combine only when the earlier insn(s)
60 : consist of only a single assignment. To simplify updating afterward,
61 : we never combine when a subroutine call appears in the middle. */
62 :
63 : #include "config.h"
64 : #include "system.h"
65 : #include "coretypes.h"
66 : #include "backend.h"
67 : #include "target.h"
68 : #include "rtl.h"
69 : #include "tree.h"
70 : #include "cfghooks.h"
71 : #include "predict.h"
72 : #include "df.h"
73 : #include "memmodel.h"
74 : #include "tm_p.h"
75 : #include "optabs.h"
76 : #include "regs.h"
77 : #include "emit-rtl.h"
78 : #include "recog.h"
79 : #include "cgraph.h"
80 : #include "stor-layout.h"
81 : #include "cfgrtl.h"
82 : #include "cfgcleanup.h"
83 : /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
84 : #include "explow.h"
85 : #include "insn-attr.h"
86 : #include "rtlhooks-def.h"
87 : #include "expr.h"
88 : #include "tree-pass.h"
89 : #include "valtrack.h"
90 : #include "rtl-iter.h"
91 : #include "print-rtl.h"
92 : #include "function-abi.h"
93 : #include "rtlanal.h"
94 :
95 : /* Number of attempts to combine instructions in this function. */
96 :
97 : static int combine_attempts;
98 :
99 : /* Number of attempts that got as far as substitution in this function. */
100 :
101 : static int combine_merges;
102 :
103 : /* Number of instructions combined with added SETs in this function. */
104 :
105 : static int combine_extras;
106 :
107 : /* Number of instructions combined in this function. */
108 :
109 : static int combine_successes;
110 :
111 : /* combine_instructions may try to replace the right hand side of the
112 : second instruction with the value of an associated REG_EQUAL note
113 : before throwing it at try_combine. That is problematic when there
114 : is a REG_DEAD note for a register used in the old right hand side
115 : and can cause distribute_notes to do wrong things. This is the
116 : second instruction if it has been so modified, null otherwise. */
117 :
118 : static rtx_insn *i2mod;
119 :
120 : /* When I2MOD is nonnull, this is a copy of the old right hand side. */
121 :
122 : static rtx i2mod_old_rhs;
123 :
124 : /* When I2MOD is nonnull, this is a copy of the new right hand side. */
125 :
126 : static rtx i2mod_new_rhs;
127 :
128 : struct reg_stat_type {
129 : /* Record last point of death of (hard or pseudo) register n. */
130 : rtx_insn *last_death;
131 :
132 : /* Record last point of modification of (hard or pseudo) register n. */
133 : rtx_insn *last_set;
134 :
135 : /* The next group of fields allows the recording of the last value assigned
136 : to (hard or pseudo) register n. We use this information to see if an
137 : operation being processed is redundant given a prior operation performed
138 : on the register. For example, an `and' with a constant is redundant if
139 : all the zero bits are already known to be turned off.
140 :
141 : We use an approach similar to that used by cse, but change it in the
142 : following ways:
143 :
144 : (1) We do not want to reinitialize at each label.
145 : (2) It is useful, but not critical, to know the actual value assigned
146 : to a register. Often just its form is helpful.
147 :
148 : Therefore, we maintain the following fields:
149 :
150 : last_set_value the last value assigned
151 : last_set_label records the value of label_tick when the
152 : register was assigned
153 : last_set_table_tick records the value of label_tick when a
154 : value using the register is assigned
155 : last_set_invalid set to true when it is not valid
156 : to use the value of this register in some
157 : register's value
158 :
159 : To understand the usage of these tables, it is important to understand
160 : the distinction between the value in last_set_value being valid and
161 : the register being validly contained in some other expression in the
162 : table.
163 :
164 : (The next two parameters are out of date).
165 :
166 : reg_stat[i].last_set_value is valid if it is nonzero, and either
167 : reg_n_sets[i] is 1 or reg_stat[i].last_set_label == label_tick.
168 :
169 : Register I may validly appear in any expression returned for the value
170 : of another register if reg_n_sets[i] is 1. It may also appear in the
171 : value for register J if reg_stat[j].last_set_invalid is zero, or
172 : reg_stat[i].last_set_label < reg_stat[j].last_set_label.
173 :
174 : If an expression is found in the table containing a register which may
175 : not validly appear in an expression, the register is replaced by
176 : something that won't match, (clobber (const_int 0)). */
177 :
178 : /* Record last value assigned to (hard or pseudo) register n. */
179 :
180 : rtx last_set_value;
181 :
182 : /* Record the value of label_tick when an expression involving register n
183 : is placed in last_set_value. */
184 :
185 : int last_set_table_tick;
186 :
187 : /* Record the value of label_tick when the value for register n is placed in
188 : last_set_value. */
189 :
190 : int last_set_label;
191 :
192 : /* These fields are maintained in parallel with last_set_value and are
193 : used to store the mode in which the register was last set, the bits
194 : that were known to be zero when it was last set, and the number of
195 : sign bits copies it was known to have when it was last set. */
196 :
197 : unsigned HOST_WIDE_INT last_set_nonzero_bits;
198 : unsigned short last_set_sign_bit_copies;
199 : ENUM_BITFIELD(machine_mode) last_set_mode : MACHINE_MODE_BITSIZE;
200 :
201 : /* Set to true if references to register n in expressions should not be
202 : used. last_set_invalid is set nonzero when this register is being
203 : assigned to and last_set_table_tick == label_tick. */
204 :
205 : bool last_set_invalid;
206 :
207 : /* Some registers that are set more than once and used in more than one
208 : basic block are nevertheless always set in similar ways. For example,
209 : a QImode register may be loaded from memory in two places on a machine
210 : where byte loads zero extend.
211 :
212 : We record in the following fields if a register has some leading bits
213 : that are always equal to the sign bit, and what we know about the
214 : nonzero bits of a register, specifically which bits are known to be
215 : zero.
216 :
217 : If an entry is zero, it means that we don't know anything special. */
218 :
219 : unsigned short sign_bit_copies;
220 :
221 : unsigned HOST_WIDE_INT nonzero_bits;
222 :
223 : /* Record the value of the label_tick when the last truncation
224 : happened. The field truncated_to_mode is only valid if
225 : truncation_label == label_tick. */
226 :
227 : int truncation_label;
228 :
229 : /* Record the last truncation seen for this register. If truncation
230 : is not a nop to this mode we might be able to save an explicit
231 : truncation if we know that value already contains a truncated
232 : value. */
233 :
234 : ENUM_BITFIELD(machine_mode) truncated_to_mode : MACHINE_MODE_BITSIZE;
235 : };
236 :
237 :
238 : static vec<reg_stat_type> reg_stat;
239 :
240 : /* One plus the highest pseudo for which we track REG_N_SETS.
241 : regstat_init_n_sets_and_refs allocates the array for REG_N_SETS just once,
242 : but during combine_split_insns new pseudos can be created. As we don't have
243 : updated DF information in that case, it is hard to initialize the array
244 : after growing. The combiner only cares about REG_N_SETS (regno) == 1,
245 : so instead of growing the arrays, just assume all newly created pseudos
246 : during combine might be set multiple times. */
247 :
248 : static unsigned int reg_n_sets_max;
249 :
250 : /* Record the luid of the last insn that invalidated memory
251 : (anything that writes memory, and subroutine calls, but not pushes). */
252 :
253 : static int mem_last_set;
254 :
255 : /* Record the luid of the last CALL_INSN
256 : so we can tell whether a potential combination crosses any calls. */
257 :
258 : static int last_call_luid;
259 :
260 : /* When `subst' is called, this is the insn that is being modified
261 : (by combining in a previous insn). The PATTERN of this insn
262 : is still the old pattern partially modified and it should not be
263 : looked at, but this may be used to examine the successors of the insn
264 : to judge whether a simplification is valid. */
265 :
266 : static rtx_insn *subst_insn;
267 :
268 : /* This is the lowest LUID that `subst' is currently dealing with.
269 : get_last_value will not return a value if the register was set at or
270 : after this LUID. If not for this mechanism, we could get confused if
271 : I2 or I1 in try_combine were an insn that used the old value of a register
272 : to obtain a new value. In that case, we might erroneously get the
273 : new value of the register when we wanted the old one. */
274 :
275 : static int subst_low_luid;
276 :
277 : /* This contains any hard registers that are used in newpat; reg_dead_at_p
278 : must consider all these registers to be always live. */
279 :
280 : static HARD_REG_SET newpat_used_regs;
281 :
282 : /* This is an insn to which a LOG_LINKS entry has been added. If this
283 : insn is the earlier than I2 or I3, combine should rescan starting at
284 : that location. */
285 :
286 : static rtx_insn *added_links_insn;
287 :
288 : /* And similarly, for notes. */
289 :
290 : static rtx_insn *added_notes_insn;
291 :
292 : /* Basic block in which we are performing combines. */
293 : static basic_block this_basic_block;
294 : static bool optimize_this_for_speed_p;
295 :
296 :
297 : /* Length of the currently allocated uid_insn_cost array. */
298 :
299 : static int max_uid_known;
300 :
301 : /* The following array records the insn_cost for every insn
302 : in the instruction stream. */
303 :
304 : static int *uid_insn_cost;
305 :
306 : /* The following array records the LOG_LINKS for every insn in the
307 : instruction stream as struct insn_link pointers. */
308 :
309 : struct insn_link {
310 : rtx_insn *insn;
311 : unsigned int regno;
312 : int insn_count;
313 : struct insn_link *next;
314 : };
315 :
316 : static struct insn_link **uid_log_links;
317 :
318 : static inline int
319 748371930 : insn_uid_check (const_rtx insn)
320 : {
321 748371930 : int uid = INSN_UID (insn);
322 748371930 : gcc_checking_assert (uid <= max_uid_known);
323 748371930 : return uid;
324 : }
325 :
326 : #define INSN_COST(INSN) (uid_insn_cost[insn_uid_check (INSN)])
327 : #define LOG_LINKS(INSN) (uid_log_links[insn_uid_check (INSN)])
328 :
329 : #define FOR_EACH_LOG_LINK(L, INSN) \
330 : for ((L) = LOG_LINKS (INSN); (L); (L) = (L)->next)
331 :
332 : /* Links for LOG_LINKS are allocated from this obstack. */
333 :
334 : static struct obstack insn_link_obstack;
335 :
336 : /* Allocate a link. */
337 :
338 : static inline struct insn_link *
339 37659721 : alloc_insn_link (rtx_insn *insn, unsigned int regno, struct insn_link *next)
340 : {
341 37659721 : struct insn_link *l
342 37659721 : = (struct insn_link *) obstack_alloc (&insn_link_obstack,
343 : sizeof (struct insn_link));
344 37659721 : l->insn = insn;
345 37659721 : l->regno = regno;
346 37659721 : l->insn_count = 0;
347 37659721 : l->next = next;
348 37659721 : return l;
349 : }
350 :
351 : /* Incremented for each basic block. */
352 :
353 : static int label_tick;
354 :
355 : /* Reset to label_tick for each extended basic block in scanning order. */
356 :
357 : static int label_tick_ebb_start;
358 :
359 : /* Mode used to compute significance in reg_stat[].nonzero_bits. It is the
360 : largest integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
361 :
362 : static scalar_int_mode nonzero_bits_mode;
363 :
364 : /* Nonzero when reg_stat[].nonzero_bits and reg_stat[].sign_bit_copies can
365 : be safely used. It is zero while computing them and after combine has
366 : completed. This former test prevents propagating values based on
367 : previously set values, which can be incorrect if a variable is modified
368 : in a loop. */
369 :
370 : static int nonzero_sign_valid;
371 :
372 :
373 : /* Record one modification to rtl structure
374 : to be undone by storing old_contents into *where. */
375 :
376 : enum undo_kind { UNDO_RTX, UNDO_INT, UNDO_MODE, UNDO_LINKS };
377 :
378 : struct undo
379 : {
380 : struct undo *next;
381 : enum undo_kind kind;
382 : union { rtx r; int i; machine_mode m; struct insn_link *l; } old_contents;
383 : union { rtx *r; int *i; int regno; struct insn_link **l; } where;
384 : };
385 :
386 : /* Record a bunch of changes to be undone, up to MAX_UNDO of them.
387 : num_undo says how many are currently recorded.
388 :
389 : other_insn is nonzero if we have modified some other insn in the process
390 : of working on subst_insn. It must be verified too. */
391 :
392 : struct undobuf
393 : {
394 : struct undo *undos;
395 : struct undo *frees;
396 : rtx_insn *other_insn;
397 : };
398 :
399 : static struct undobuf undobuf;
400 :
401 : /* Number of times the pseudo being substituted for
402 : was found and replaced. */
403 :
404 : static int n_occurrences;
405 :
406 : static rtx reg_nonzero_bits_for_combine (const_rtx, scalar_int_mode,
407 : scalar_int_mode,
408 : unsigned HOST_WIDE_INT *);
409 : static rtx reg_num_sign_bit_copies_for_combine (const_rtx, scalar_int_mode,
410 : scalar_int_mode,
411 : unsigned int *);
412 : static void do_SUBST (rtx *, rtx);
413 : static void do_SUBST_INT (int *, int);
414 : static void init_reg_last (void);
415 : static void setup_incoming_promotions (rtx_insn *);
416 : static void set_nonzero_bits_and_sign_copies (rtx, const_rtx, void *);
417 : static bool cant_combine_insn_p (rtx_insn *);
418 : static bool can_combine_p (rtx_insn *, rtx_insn *, rtx_insn *, rtx_insn *,
419 : rtx_insn *, rtx_insn *, rtx *, rtx *);
420 : static bool combinable_i3pat (rtx_insn *, rtx *, rtx, rtx, rtx,
421 : bool, bool, rtx *);
422 : static bool contains_muldiv (rtx);
423 : static rtx_insn *try_combine (rtx_insn *, rtx_insn *, rtx_insn *, rtx_insn *,
424 : bool *, rtx_insn *);
425 : static void undo_all (void);
426 : static void undo_commit (void);
427 : static rtx *find_split_point (rtx *, rtx_insn *, bool);
428 : static rtx subst (rtx, rtx, rtx, bool, bool, bool);
429 : static rtx combine_simplify_rtx (rtx, machine_mode, bool, bool);
430 : static rtx simplify_if_then_else (rtx);
431 : static rtx simplify_set (rtx);
432 : static rtx simplify_logical (rtx);
433 : static rtx expand_compound_operation (rtx);
434 : static const_rtx expand_field_assignment (const_rtx);
435 : static rtx make_extraction (machine_mode, rtx, HOST_WIDE_INT, rtx,
436 : unsigned HOST_WIDE_INT, bool, bool, bool);
437 : static int get_pos_from_mask (unsigned HOST_WIDE_INT,
438 : unsigned HOST_WIDE_INT *);
439 : static rtx canon_reg_for_combine (rtx, rtx);
440 : static rtx force_int_to_mode (rtx, scalar_int_mode, scalar_int_mode,
441 : scalar_int_mode, unsigned HOST_WIDE_INT, bool);
442 : static rtx force_to_mode (rtx, machine_mode,
443 : unsigned HOST_WIDE_INT, bool);
444 : static rtx if_then_else_cond (rtx, rtx *, rtx *);
445 : static rtx known_cond (rtx, enum rtx_code, rtx, rtx);
446 : static bool rtx_equal_for_field_assignment_p (rtx, rtx, bool = false);
447 : static rtx make_field_assignment (rtx);
448 : static rtx apply_distributive_law (rtx);
449 : static rtx distribute_and_simplify_rtx (rtx, int);
450 : static rtx simplify_and_const_int_1 (scalar_int_mode, rtx,
451 : unsigned HOST_WIDE_INT);
452 : static rtx simplify_and_const_int (rtx, scalar_int_mode, rtx,
453 : unsigned HOST_WIDE_INT);
454 : static bool merge_outer_ops (enum rtx_code *, HOST_WIDE_INT *, enum rtx_code,
455 : HOST_WIDE_INT, machine_mode, bool *);
456 : static rtx simplify_shift_const_1 (enum rtx_code, machine_mode, rtx, int);
457 : static rtx simplify_shift_const (rtx, enum rtx_code, machine_mode, rtx,
458 : int);
459 : static int recog_for_combine (rtx *, rtx_insn *, rtx *, unsigned = 0, unsigned = 0);
460 : static rtx gen_lowpart_for_combine (machine_mode, rtx);
461 : static rtx gen_lowpart_for_combine_no_emit (machine_mode, rtx);
462 : static enum rtx_code simplify_compare_const (enum rtx_code, machine_mode,
463 : rtx *, rtx *);
464 : static enum rtx_code simplify_comparison (enum rtx_code, rtx *, rtx *);
465 : static void update_table_tick (rtx);
466 : static void record_value_for_reg (rtx, rtx_insn *, rtx);
467 : static void check_promoted_subreg (rtx_insn *, rtx);
468 : static void record_dead_and_set_regs_1 (rtx, const_rtx, void *);
469 : static void record_dead_and_set_regs (rtx_insn *);
470 : static bool get_last_value_validate (rtx *, rtx_insn *, int, bool);
471 : static rtx get_last_value (const_rtx);
472 : static void reg_dead_at_p_1 (rtx, const_rtx, void *);
473 : static bool reg_dead_at_p (rtx, rtx_insn *);
474 : static void move_deaths (rtx, rtx, int, rtx_insn *, rtx *);
475 : static bool reg_bitfield_target_p (rtx, rtx);
476 : static void distribute_notes (rtx, rtx_insn *, rtx_insn *, rtx_insn *,
477 : rtx, rtx, rtx);
478 : static void distribute_links (struct insn_link *, rtx_insn * = nullptr,
479 : int limit = INT_MAX);
480 : static void mark_used_regs_combine (rtx);
481 : static void record_promoted_value (rtx_insn *, rtx);
482 : static bool unmentioned_reg_p (rtx, rtx);
483 : static void record_truncated_values (rtx *, void *);
484 : static bool reg_truncated_to_mode (machine_mode, const_rtx);
485 : static rtx gen_lowpart_or_truncate (machine_mode, rtx);
486 :
487 :
488 : /* It is not safe to use ordinary gen_lowpart in combine.
489 : See comments in gen_lowpart_for_combine. */
490 : #undef RTL_HOOKS_GEN_LOWPART
491 : #define RTL_HOOKS_GEN_LOWPART gen_lowpart_for_combine
492 :
493 : /* Our implementation of gen_lowpart never emits a new pseudo. */
494 : #undef RTL_HOOKS_GEN_LOWPART_NO_EMIT
495 : #define RTL_HOOKS_GEN_LOWPART_NO_EMIT gen_lowpart_for_combine_no_emit
496 :
497 : #undef RTL_HOOKS_REG_NONZERO_REG_BITS
498 : #define RTL_HOOKS_REG_NONZERO_REG_BITS reg_nonzero_bits_for_combine
499 :
500 : #undef RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES
501 : #define RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES reg_num_sign_bit_copies_for_combine
502 :
503 : #undef RTL_HOOKS_REG_TRUNCATED_TO_MODE
504 : #define RTL_HOOKS_REG_TRUNCATED_TO_MODE reg_truncated_to_mode
505 :
506 : static const struct rtl_hooks combine_rtl_hooks = RTL_HOOKS_INITIALIZER;
507 :
508 :
509 : /* Convenience wrapper for the canonicalize_comparison target hook.
510 : Target hooks cannot use enum rtx_code. */
511 : static inline void
512 23819037 : target_canonicalize_comparison (enum rtx_code *code, rtx *op0, rtx *op1,
513 : bool op0_preserve_value)
514 : {
515 23819037 : int code_int = (int)*code;
516 23819037 : targetm.canonicalize_comparison (&code_int, op0, op1, op0_preserve_value);
517 23819037 : *code = (enum rtx_code)code_int;
518 799763 : }
519 :
520 : /* Try to split PATTERN found in INSN. This returns NULL_RTX if
521 : PATTERN cannot be split. Otherwise, it returns an insn sequence.
522 : Updates OLD_NREGS with the max number of regs before the split
523 : and NEW_NREGS after the split.
524 : This is a wrapper around split_insns which ensures that the
525 : reg_stat vector is made larger if the splitter creates a new
526 : register. */
527 :
528 : static rtx_insn *
529 11482782 : combine_split_insns (rtx pattern, rtx_insn *insn,
530 : unsigned int *old_nregs,
531 : unsigned int *new_regs)
532 : {
533 11482782 : rtx_insn *ret;
534 11482782 : unsigned int nregs;
535 11482782 : *old_nregs = max_reg_num ();
536 11482782 : ret = split_insns (pattern, insn);
537 11482782 : *new_regs = nregs = max_reg_num ();
538 22965564 : if (nregs > reg_stat.length ())
539 2012 : reg_stat.safe_grow_cleared (nregs, true);
540 11482782 : return ret;
541 : }
542 :
543 : /* This is used by find_single_use to locate an rtx in LOC that
544 : contains exactly one use of DEST, which is typically a REG.
545 : It returns a pointer to the innermost rtx expression
546 : containing DEST. Appearances of DEST that are being used to
547 : totally replace it are not counted. */
548 :
549 : static rtx *
550 32306117 : find_single_use_1 (rtx dest, rtx *loc)
551 : {
552 39092543 : rtx x = *loc;
553 39092543 : enum rtx_code code = GET_CODE (x);
554 39092543 : rtx *result = NULL;
555 39092543 : rtx *this_result;
556 39092543 : int i;
557 39092543 : const char *fmt;
558 :
559 39092543 : switch (code)
560 : {
561 : case CONST:
562 : case LABEL_REF:
563 : case SYMBOL_REF:
564 : CASE_CONST_ANY:
565 : case CLOBBER:
566 : return 0;
567 :
568 6743482 : case SET:
569 : /* If the destination is anything other than PC, a REG or a SUBREG
570 : of a REG that occupies all of the REG, the insn uses DEST if
571 : it is mentioned in the destination or the source. Otherwise, we
572 : need just check the source. */
573 6743482 : if (GET_CODE (SET_DEST (x)) != PC
574 6743482 : && !REG_P (SET_DEST (x))
575 6744786 : && ! (GET_CODE (SET_DEST (x)) == SUBREG
576 1304 : && REG_P (SUBREG_REG (SET_DEST (x)))
577 1304 : && !read_modify_subreg_p (SET_DEST (x))))
578 : break;
579 :
580 6742353 : return find_single_use_1 (dest, &SET_SRC (x));
581 :
582 44073 : case MEM:
583 44073 : case SUBREG:
584 44073 : return find_single_use_1 (dest, &XEXP (x, 0));
585 :
586 : default:
587 : break;
588 : }
589 :
590 : /* If it wasn't one of the common cases above, check each expression and
591 : vector of this code. Look for a unique usage of DEST. */
592 :
593 19443695 : fmt = GET_RTX_FORMAT (code);
594 51969725 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
595 : {
596 32533349 : if (fmt[i] == 'e')
597 : {
598 32210305 : if (dest == XEXP (x, i)
599 32210305 : || (REG_P (dest) && REG_P (XEXP (x, i))
600 782424 : && REGNO (dest) == REGNO (XEXP (x, i))))
601 : this_result = loc;
602 : else
603 25464100 : this_result = find_single_use_1 (dest, &XEXP (x, i));
604 :
605 32210305 : if (result == NULL)
606 : result = this_result;
607 40054 : else if (this_result)
608 : /* Duplicate usage. */
609 : return NULL;
610 : }
611 323044 : else if (fmt[i] == 'E')
612 : {
613 49763 : int j;
614 :
615 151466 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
616 : {
617 105722 : if (XVECEXP (x, i, j) == dest
618 105722 : || (REG_P (dest)
619 105722 : && REG_P (XVECEXP (x, i, j))
620 4521 : && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
621 : this_result = loc;
622 : else
623 105722 : this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
624 :
625 105722 : if (result == NULL)
626 : result = this_result;
627 16733 : else if (this_result)
628 : return NULL;
629 : }
630 : }
631 : }
632 :
633 : return result;
634 : }
635 :
636 :
637 : /* See if DEST, produced in INSN, is used only a single time in the
638 : sequel. If so, return a pointer to the innermost rtx expression in which
639 : it is used.
640 :
641 : If PLOC is nonzero, *PLOC is set to the insn containing the single use.
642 :
643 : Otherwise, we find the single use by finding an insn that has a
644 : LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
645 : only referenced once in that insn, we know that it must be the first
646 : and last insn referencing DEST. */
647 :
648 : static rtx *
649 7319429 : find_single_use (rtx dest, rtx_insn *insn, rtx_insn **ploc)
650 : {
651 7319429 : basic_block bb;
652 7319429 : rtx_insn *next;
653 7319429 : rtx *result;
654 7319429 : struct insn_link *link;
655 :
656 7319429 : if (!REG_P (dest))
657 : return 0;
658 :
659 7319429 : bb = BLOCK_FOR_INSN (insn);
660 10051106 : for (next = NEXT_INSN (insn);
661 10051106 : next && BLOCK_FOR_INSN (next) == bb;
662 2731677 : next = NEXT_INSN (next))
663 9467972 : if (NONDEBUG_INSN_P (next) && dead_or_set_p (next, dest))
664 : {
665 8996442 : FOR_EACH_LOG_LINK (link, next)
666 7992236 : if (link->insn == insn && link->regno == REGNO (dest))
667 : break;
668 :
669 7740501 : if (link)
670 : {
671 6736295 : result = find_single_use_1 (dest, &PATTERN (next));
672 6736295 : if (ploc)
673 6736295 : *ploc = next;
674 6736295 : return result;
675 : }
676 : }
677 :
678 : return 0;
679 : }
680 :
681 : /* Substitute NEWVAL, an rtx expression, into INTO, a place in some
682 : insn. The substitution can be undone by undo_all. If INTO is already
683 : set to NEWVAL, do not record this change. Because computing NEWVAL might
684 : also call SUBST, we have to compute it before we put anything into
685 : the undo table. */
686 :
687 : static void
688 727343910 : do_SUBST (rtx *into, rtx newval)
689 : {
690 727343910 : struct undo *buf;
691 727343910 : rtx oldval = *into;
692 :
693 727343910 : if (oldval == newval)
694 : return;
695 :
696 : /* We'd like to catch as many invalid transformations here as
697 : possible. Unfortunately, there are way too many mode changes
698 : that are perfectly valid, so we'd waste too much effort for
699 : little gain doing the checks here. Focus on catching invalid
700 : transformations involving integer constants. */
701 93640672 : if (GET_MODE_CLASS (GET_MODE (oldval)) == MODE_INT
702 57038120 : && CONST_INT_P (newval))
703 : {
704 : /* Sanity check that we're replacing oldval with a CONST_INT
705 : that is a valid sign-extension for the original mode. */
706 1761593 : gcc_assert (INTVAL (newval)
707 : == trunc_int_for_mode (INTVAL (newval), GET_MODE (oldval)));
708 :
709 : /* Replacing the operand of a SUBREG or a ZERO_EXTEND with a
710 : CONST_INT is not valid, because after the replacement, the
711 : original mode would be gone. Unfortunately, we can't tell
712 : when do_SUBST is called to replace the operand thereof, so we
713 : perform this test on oldval instead, checking whether an
714 : invalid replacement took place before we got here. */
715 1761593 : gcc_assert (!(GET_CODE (oldval) == SUBREG
716 : && CONST_INT_P (SUBREG_REG (oldval))));
717 1761593 : gcc_assert (!(GET_CODE (oldval) == ZERO_EXTEND
718 : && CONST_INT_P (XEXP (oldval, 0))));
719 : }
720 :
721 93640672 : if (undobuf.frees)
722 89613927 : buf = undobuf.frees, undobuf.frees = buf->next;
723 : else
724 4026745 : buf = XNEW (struct undo);
725 :
726 93640672 : buf->kind = UNDO_RTX;
727 93640672 : buf->where.r = into;
728 93640672 : buf->old_contents.r = oldval;
729 93640672 : *into = newval;
730 :
731 93640672 : buf->next = undobuf.undos, undobuf.undos = buf;
732 : }
733 :
734 : #define SUBST(INTO, NEWVAL) do_SUBST (&(INTO), (NEWVAL))
735 :
736 : /* Similar to SUBST, but NEWVAL is an int expression. Note that substitution
737 : for the value of a HOST_WIDE_INT value (including CONST_INT) is
738 : not safe. */
739 :
740 : static void
741 15649811 : do_SUBST_INT (int *into, int newval)
742 : {
743 15649811 : struct undo *buf;
744 15649811 : int oldval = *into;
745 :
746 15649811 : if (oldval == newval)
747 : return;
748 :
749 6627680 : if (undobuf.frees)
750 6126505 : buf = undobuf.frees, undobuf.frees = buf->next;
751 : else
752 501175 : buf = XNEW (struct undo);
753 :
754 6627680 : buf->kind = UNDO_INT;
755 6627680 : buf->where.i = into;
756 6627680 : buf->old_contents.i = oldval;
757 6627680 : *into = newval;
758 :
759 6627680 : buf->next = undobuf.undos, undobuf.undos = buf;
760 : }
761 :
762 : #define SUBST_INT(INTO, NEWVAL) do_SUBST_INT (&(INTO), (NEWVAL))
763 :
764 : /* Similar to SUBST, but just substitute the mode. This is used when
765 : changing the mode of a pseudo-register, so that any other
766 : references to the entry in the regno_reg_rtx array will change as
767 : well. */
768 :
769 : static void
770 1416530 : subst_mode (int regno, machine_mode newval)
771 : {
772 1416530 : struct undo *buf;
773 1416530 : rtx reg = regno_reg_rtx[regno];
774 1416530 : machine_mode oldval = GET_MODE (reg);
775 :
776 1416530 : if (oldval == newval)
777 : return;
778 :
779 1416530 : if (undobuf.frees)
780 1341900 : buf = undobuf.frees, undobuf.frees = buf->next;
781 : else
782 74630 : buf = XNEW (struct undo);
783 :
784 1416530 : buf->kind = UNDO_MODE;
785 1416530 : buf->where.regno = regno;
786 1416530 : buf->old_contents.m = oldval;
787 1416530 : adjust_reg_mode (reg, newval);
788 :
789 1416530 : buf->next = undobuf.undos, undobuf.undos = buf;
790 : }
791 :
792 : /* Similar to SUBST, but NEWVAL is a LOG_LINKS expression. */
793 :
794 : static void
795 69723 : do_SUBST_LINK (struct insn_link **into, struct insn_link *newval)
796 : {
797 69723 : struct undo *buf;
798 69723 : struct insn_link * oldval = *into;
799 :
800 69723 : if (oldval == newval)
801 : return;
802 :
803 69723 : if (undobuf.frees)
804 66682 : buf = undobuf.frees, undobuf.frees = buf->next;
805 : else
806 3041 : buf = XNEW (struct undo);
807 :
808 69723 : buf->kind = UNDO_LINKS;
809 69723 : buf->where.l = into;
810 69723 : buf->old_contents.l = oldval;
811 69723 : *into = newval;
812 :
813 69723 : buf->next = undobuf.undos, undobuf.undos = buf;
814 : }
815 :
816 : #define SUBST_LINK(oldval, newval) do_SUBST_LINK (&oldval, newval)
817 :
818 : /* Subroutine of try_combine. Determine whether the replacement patterns
819 : NEWPAT, NEWI2PAT and NEWOTHERPAT are more expensive according to insn_cost
820 : than the original sequence I0, I1, I2, I3 and undobuf.other_insn. Note
821 : that I0, I1 and/or NEWI2PAT may be NULL_RTX. Similarly, NEWOTHERPAT and
822 : undobuf.other_insn may also both be NULL_RTX. Return false if the cost
823 : of all the instructions can be estimated and the replacements are more
824 : expensive than the original sequence. */
825 :
826 : static bool
827 4210973 : combine_validate_cost (rtx_insn *i0, rtx_insn *i1, rtx_insn *i2, rtx_insn *i3,
828 : rtx newpat, rtx newi2pat, rtx newotherpat)
829 : {
830 4210973 : int i0_cost, i1_cost, i2_cost, i3_cost;
831 4210973 : int new_i2_cost, new_i3_cost;
832 4210973 : int old_cost, new_cost;
833 :
834 : /* Lookup the original insn_costs. */
835 4210973 : i2_cost = INSN_COST (i2);
836 4210973 : i3_cost = INSN_COST (i3);
837 :
838 4210973 : if (i1)
839 : {
840 118435 : i1_cost = INSN_COST (i1);
841 118435 : if (i0)
842 : {
843 4201 : i0_cost = INSN_COST (i0);
844 4081 : old_cost = (i0_cost > 0 && i1_cost > 0 && i2_cost > 0 && i3_cost > 0
845 8270 : ? i0_cost + i1_cost + i2_cost + i3_cost : 0);
846 : }
847 : else
848 : {
849 109684 : old_cost = (i1_cost > 0 && i2_cost > 0 && i3_cost > 0
850 223916 : ? i1_cost + i2_cost + i3_cost : 0);
851 : i0_cost = 0;
852 : }
853 : }
854 : else
855 : {
856 4092538 : old_cost = (i2_cost > 0 && i3_cost > 0) ? i2_cost + i3_cost : 0;
857 : i1_cost = i0_cost = 0;
858 : }
859 :
860 : /* If we have split a PARALLEL I2 to I1,I2, we have counted its cost twice;
861 : correct that. */
862 4210973 : if (old_cost && i1 && INSN_UID (i1) == INSN_UID (i2))
863 2323 : old_cost -= i1_cost;
864 :
865 :
866 : /* Calculate the replacement insn_costs. */
867 4210973 : rtx tmp = PATTERN (i3);
868 4210973 : PATTERN (i3) = newpat;
869 4210973 : int tmpi = INSN_CODE (i3);
870 4210973 : INSN_CODE (i3) = -1;
871 4210973 : new_i3_cost = insn_cost (i3, optimize_this_for_speed_p);
872 4210973 : PATTERN (i3) = tmp;
873 4210973 : INSN_CODE (i3) = tmpi;
874 4210973 : if (newi2pat)
875 : {
876 205011 : tmp = PATTERN (i2);
877 205011 : PATTERN (i2) = newi2pat;
878 205011 : tmpi = INSN_CODE (i2);
879 205011 : INSN_CODE (i2) = -1;
880 205011 : new_i2_cost = insn_cost (i2, optimize_this_for_speed_p);
881 205011 : PATTERN (i2) = tmp;
882 205011 : INSN_CODE (i2) = tmpi;
883 205011 : new_cost = (new_i2_cost > 0 && new_i3_cost > 0)
884 205011 : ? new_i2_cost + new_i3_cost : 0;
885 : }
886 : else
887 : {
888 : new_cost = new_i3_cost;
889 : new_i2_cost = 0;
890 : }
891 :
892 4210973 : if (undobuf.other_insn)
893 : {
894 194743 : int old_other_cost, new_other_cost;
895 :
896 194743 : old_other_cost = INSN_COST (undobuf.other_insn);
897 194743 : tmp = PATTERN (undobuf.other_insn);
898 194743 : PATTERN (undobuf.other_insn) = newotherpat;
899 194743 : tmpi = INSN_CODE (undobuf.other_insn);
900 194743 : INSN_CODE (undobuf.other_insn) = -1;
901 194743 : new_other_cost = insn_cost (undobuf.other_insn,
902 : optimize_this_for_speed_p);
903 194743 : PATTERN (undobuf.other_insn) = tmp;
904 194743 : INSN_CODE (undobuf.other_insn) = tmpi;
905 194743 : if (old_other_cost > 0 && new_other_cost > 0)
906 : {
907 194743 : old_cost += old_other_cost;
908 194743 : new_cost += new_other_cost;
909 : }
910 : else
911 : old_cost = 0;
912 : }
913 :
914 : /* Disallow this combination if both new_cost and old_cost are greater than
915 : zero, and new_cost is greater than old cost. */
916 4210973 : bool reject = old_cost > 0 && new_cost > old_cost;
917 :
918 4210973 : if (dump_file)
919 : {
920 484 : fprintf (dump_file, "%s combination of insns ",
921 : reject ? "rejecting" : "allowing");
922 244 : if (i0)
923 0 : fprintf (dump_file, "%d, ", INSN_UID (i0));
924 244 : if (i1 && INSN_UID (i1) != INSN_UID (i2))
925 1 : fprintf (dump_file, "%d, ", INSN_UID (i1));
926 244 : fprintf (dump_file, "%d and %d\n", INSN_UID (i2), INSN_UID (i3));
927 :
928 244 : fprintf (dump_file, "original costs ");
929 244 : if (i0)
930 0 : fprintf (dump_file, "%d + ", i0_cost);
931 244 : if (i1 && INSN_UID (i1) != INSN_UID (i2))
932 1 : fprintf (dump_file, "%d + ", i1_cost);
933 244 : fprintf (dump_file, "%d + %d = %d\n", i2_cost, i3_cost, old_cost);
934 :
935 244 : if (newi2pat)
936 19 : fprintf (dump_file, "replacement costs %d + %d = %d\n",
937 : new_i2_cost, new_i3_cost, new_cost);
938 : else
939 225 : fprintf (dump_file, "replacement cost %d\n", new_cost);
940 : }
941 :
942 4210973 : if (reject)
943 : return false;
944 :
945 : /* Update the uid_insn_cost array with the replacement costs. */
946 4000667 : INSN_COST (i2) = new_i2_cost;
947 4000667 : INSN_COST (i3) = new_i3_cost;
948 4000667 : if (i1)
949 : {
950 99088 : INSN_COST (i1) = 0;
951 99088 : if (i0)
952 4053 : INSN_COST (i0) = 0;
953 : }
954 :
955 : return true;
956 : }
957 :
958 :
959 : /* Delete any insns that copy a register to itself.
960 : Return true if the CFG was changed. */
961 :
962 : static bool
963 997153 : delete_noop_moves (void)
964 : {
965 997153 : rtx_insn *insn, *next;
966 997153 : basic_block bb;
967 :
968 997153 : bool edges_deleted = false;
969 :
970 11253538 : FOR_EACH_BB_FN (bb, cfun)
971 : {
972 135553051 : for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb)); insn = next)
973 : {
974 125296666 : next = NEXT_INSN (insn);
975 125296666 : if (INSN_P (insn) && noop_move_p (insn))
976 : {
977 6652 : if (dump_file)
978 0 : fprintf (dump_file, "deleting noop move %d\n", INSN_UID (insn));
979 :
980 6652 : edges_deleted |= delete_insn_and_edges (insn);
981 : }
982 : }
983 : }
984 :
985 997153 : return edges_deleted;
986 : }
987 :
988 :
989 : /* Return false if we do not want to (or cannot) combine DEF. */
990 : static bool
991 41465875 : can_combine_def_p (df_ref def)
992 : {
993 : /* Do not consider if it is pre/post modification in MEM. */
994 41465875 : if (DF_REF_FLAGS (def) & DF_REF_PRE_POST_MODIFY)
995 : return false;
996 :
997 39794475 : unsigned int regno = DF_REF_REGNO (def);
998 :
999 : /* Do not combine frame pointer adjustments. */
1000 39794475 : if ((regno == FRAME_POINTER_REGNUM
1001 0 : && (!reload_completed || frame_pointer_needed))
1002 2062 : || (!HARD_FRAME_POINTER_IS_FRAME_POINTER
1003 39794475 : && regno == HARD_FRAME_POINTER_REGNUM
1004 : && (!reload_completed || frame_pointer_needed))
1005 39792413 : || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1006 0 : && regno == ARG_POINTER_REGNUM && fixed_regs[regno]))
1007 2062 : return false;
1008 :
1009 : return true;
1010 : }
1011 :
1012 : /* Return false if we do not want to (or cannot) combine USE. */
1013 : static bool
1014 77017201 : can_combine_use_p (df_ref use)
1015 : {
1016 : /* Do not consider the usage of the stack pointer by function call. */
1017 0 : if (DF_REF_FLAGS (use) & DF_REF_CALL_STACK_USAGE)
1018 0 : return false;
1019 :
1020 : return true;
1021 : }
1022 :
1023 : /* Fill in log links field for all insns. */
1024 :
1025 : static void
1026 997153 : create_log_links (void)
1027 : {
1028 997153 : basic_block bb;
1029 997153 : rtx_insn **next_use;
1030 997153 : rtx_insn *insn;
1031 997153 : df_ref def, use;
1032 :
1033 997153 : next_use = XCNEWVEC (rtx_insn *, max_reg_num ());
1034 :
1035 : /* Pass through each block from the end, recording the uses of each
1036 : register and establishing log links when def is encountered.
1037 : Note that we do not clear next_use array in order to save time,
1038 : so we have to test whether the use is in the same basic block as def.
1039 :
1040 : There are a few cases below when we do not consider the definition or
1041 : usage -- these are taken from original flow.c did. Don't ask me why it is
1042 : done this way; I don't know and if it works, I don't want to know. */
1043 :
1044 11253538 : FOR_EACH_BB_FN (bb, cfun)
1045 : {
1046 135537261 : FOR_BB_INSNS_REVERSE (bb, insn)
1047 : {
1048 125280876 : if (!NONDEBUG_INSN_P (insn))
1049 64080998 : continue;
1050 :
1051 : /* Log links are created only once. */
1052 61199878 : gcc_assert (!LOG_LINKS (insn));
1053 :
1054 491496158 : FOR_EACH_INSN_DEF (def, insn)
1055 : {
1056 430296280 : unsigned int regno = DF_REF_REGNO (def);
1057 430296280 : rtx_insn *use_insn;
1058 :
1059 430296280 : if (!next_use[regno])
1060 388830405 : continue;
1061 :
1062 41465875 : if (!can_combine_def_p (def))
1063 1673462 : continue;
1064 :
1065 39792413 : use_insn = next_use[regno];
1066 39792413 : next_use[regno] = NULL;
1067 :
1068 39792413 : if (BLOCK_FOR_INSN (use_insn) != bb)
1069 2220077 : continue;
1070 :
1071 : /* flow.c claimed:
1072 :
1073 : We don't build a LOG_LINK for hard registers contained
1074 : in ASM_OPERANDs. If these registers get replaced,
1075 : we might wind up changing the semantics of the insn,
1076 : even if reload can make what appear to be valid
1077 : assignments later. */
1078 37573188 : if (regno < FIRST_PSEUDO_REGISTER
1079 37572336 : && asm_noperands (PATTERN (use_insn)) >= 0)
1080 852 : continue;
1081 :
1082 : /* Don't add duplicate links between instructions. */
1083 37571484 : struct insn_link *links;
1084 50269034 : FOR_EACH_LOG_LINK (links, use_insn)
1085 12697550 : if (insn == links->insn && regno == links->regno)
1086 : break;
1087 :
1088 37571484 : if (!links)
1089 37571484 : LOG_LINKS (use_insn)
1090 75142968 : = alloc_insn_link (insn, regno, LOG_LINKS (use_insn));
1091 : }
1092 :
1093 138217079 : FOR_EACH_INSN_USE (use, insn)
1094 149413420 : if (can_combine_use_p (use))
1095 72396219 : next_use[DF_REF_REGNO (use)] = insn;
1096 : }
1097 : }
1098 :
1099 997153 : free (next_use);
1100 997153 : }
1101 :
1102 : /* Walk the LOG_LINKS of insn B to see if we find a reference to A. Return
1103 : true if we found a LOG_LINK that proves that A feeds B. This only works
1104 : if there are no instructions between A and B which could have a link
1105 : depending on A, since in that case we would not record a link for B. */
1106 :
1107 : static bool
1108 12610097 : insn_a_feeds_b (rtx_insn *a, rtx_insn *b)
1109 : {
1110 12610097 : struct insn_link *links;
1111 15926046 : FOR_EACH_LOG_LINK (links, b)
1112 13447113 : if (links->insn == a)
1113 : return true;
1114 : return false;
1115 : }
1116 :
1117 : /* Main entry point for combiner. F is the first insn of the function.
1118 : NREGS is the first unused pseudo-reg number.
1119 :
1120 : Return nonzero if the CFG was changed (e.g. if the combiner has
1121 : turned an indirect jump instruction into a direct jump). */
1122 : static bool
1123 1041492 : combine_instructions (rtx_insn *f, unsigned int nregs)
1124 : {
1125 1041492 : rtx_insn *insn, *next;
1126 1041492 : struct insn_link *links, *nextlinks;
1127 1041492 : rtx_insn *first;
1128 1041492 : basic_block last_bb;
1129 :
1130 1041492 : bool new_direct_jump_p = false;
1131 :
1132 3097835 : for (first = f; first && !NONDEBUG_INSN_P (first); )
1133 2056343 : first = NEXT_INSN (first);
1134 1041492 : if (!first)
1135 : return false;
1136 :
1137 997153 : combine_attempts = 0;
1138 997153 : combine_merges = 0;
1139 997153 : combine_extras = 0;
1140 997153 : combine_successes = 0;
1141 :
1142 997153 : rtl_hooks = combine_rtl_hooks;
1143 :
1144 997153 : reg_stat.safe_grow_cleared (nregs, true);
1145 :
1146 997153 : init_recog_no_volatile ();
1147 :
1148 : /* Allocate array for insn info. */
1149 997153 : max_uid_known = get_max_uid ();
1150 997153 : uid_log_links = XCNEWVEC (struct insn_link *, max_uid_known + 1);
1151 997153 : uid_insn_cost = XCNEWVEC (int, max_uid_known + 1);
1152 997153 : gcc_obstack_init (&insn_link_obstack);
1153 :
1154 997153 : nonzero_bits_mode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, 0).require ();
1155 :
1156 : /* Don't use reg_stat[].nonzero_bits when computing it. This can cause
1157 : problems when, for example, we have j <<= 1 in a loop. */
1158 :
1159 997153 : nonzero_sign_valid = 0;
1160 997153 : label_tick = label_tick_ebb_start = 1;
1161 :
1162 : /* Scan all SETs and see if we can deduce anything about what
1163 : bits are known to be zero for some registers and how many copies
1164 : of the sign bit are known to exist for those registers.
1165 :
1166 : Also set any known values so that we can use it while searching
1167 : for what bits are known to be set. */
1168 :
1169 997153 : setup_incoming_promotions (first);
1170 : /* Allow the entry block and the first block to fall into the same EBB.
1171 : Conceptually the incoming promotions are assigned to the entry block. */
1172 997153 : last_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1173 :
1174 997153 : create_log_links ();
1175 11253538 : FOR_EACH_BB_FN (this_basic_block, cfun)
1176 : {
1177 10256385 : optimize_this_for_speed_p = optimize_bb_for_speed_p (this_basic_block);
1178 10256385 : last_call_luid = 0;
1179 10256385 : mem_last_set = -1;
1180 :
1181 10256385 : label_tick++;
1182 10256385 : if (!single_pred_p (this_basic_block)
1183 10256385 : || single_pred (this_basic_block) != last_bb)
1184 4917966 : label_tick_ebb_start = label_tick;
1185 10256385 : last_bb = this_basic_block;
1186 :
1187 135537261 : FOR_BB_INSNS (this_basic_block, insn)
1188 125280876 : if (INSN_P (insn) && BLOCK_FOR_INSN (insn))
1189 : {
1190 109025658 : rtx links;
1191 :
1192 109025658 : subst_low_luid = DF_INSN_LUID (insn);
1193 109025658 : subst_insn = insn;
1194 :
1195 109025658 : note_stores (insn, set_nonzero_bits_and_sign_copies, insn);
1196 109025658 : record_dead_and_set_regs (insn);
1197 :
1198 109025658 : if (AUTO_INC_DEC)
1199 : for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
1200 : if (REG_NOTE_KIND (links) == REG_INC)
1201 : set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX,
1202 : insn);
1203 :
1204 : /* Record the current insn_cost of this instruction. */
1205 109025658 : INSN_COST (insn) = insn_cost (insn, optimize_this_for_speed_p);
1206 109025658 : if (dump_file)
1207 : {
1208 1695 : fprintf (dump_file, "insn_cost %d for ", INSN_COST (insn));
1209 1695 : dump_insn_slim (dump_file, insn);
1210 : }
1211 : }
1212 : }
1213 :
1214 997153 : nonzero_sign_valid = 1;
1215 :
1216 : /* Now scan all the insns in forward order. */
1217 997153 : label_tick = label_tick_ebb_start = 1;
1218 997153 : init_reg_last ();
1219 997153 : setup_incoming_promotions (first);
1220 997153 : last_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1221 997153 : int max_combine = param_max_combine_insns;
1222 :
1223 11253538 : FOR_EACH_BB_FN (this_basic_block, cfun)
1224 : {
1225 10256385 : rtx_insn *last_combined_insn = NULL;
1226 :
1227 : /* Ignore instruction combination in basic blocks that are going to
1228 : be removed as unreachable anyway. See PR82386. */
1229 10256385 : if (EDGE_COUNT (this_basic_block->preds) == 0)
1230 1592 : continue;
1231 :
1232 10254793 : optimize_this_for_speed_p = optimize_bb_for_speed_p (this_basic_block);
1233 10254793 : last_call_luid = 0;
1234 10254793 : mem_last_set = -1;
1235 :
1236 10254793 : label_tick++;
1237 10254793 : if (!single_pred_p (this_basic_block)
1238 10254793 : || single_pred (this_basic_block) != last_bb)
1239 4917650 : label_tick_ebb_start = label_tick;
1240 10254793 : last_bb = this_basic_block;
1241 :
1242 10254793 : rtl_profile_for_bb (this_basic_block);
1243 10254793 : for (insn = BB_HEAD (this_basic_block);
1244 140065351 : insn != NEXT_INSN (BB_END (this_basic_block));
1245 125809891 : insn = next ? next : NEXT_INSN (insn))
1246 : {
1247 129810558 : next = 0;
1248 129810558 : if (!NONDEBUG_INSN_P (insn))
1249 64311539 : continue;
1250 :
1251 : while (last_combined_insn
1252 65500753 : && (!NONDEBUG_INSN_P (last_combined_insn)
1253 55429545 : || last_combined_insn->deleted ()))
1254 1734 : last_combined_insn = PREV_INSN (last_combined_insn);
1255 65499019 : if (last_combined_insn == NULL_RTX
1256 55428978 : || BLOCK_FOR_INSN (last_combined_insn) != this_basic_block
1257 120927792 : || DF_INSN_LUID (last_combined_insn) <= DF_INSN_LUID (insn))
1258 : last_combined_insn = insn;
1259 :
1260 : /* See if we know about function return values before this
1261 : insn based upon SUBREG flags. */
1262 65499019 : check_promoted_subreg (insn, PATTERN (insn));
1263 :
1264 : /* See if we can find hardregs and subreg of pseudos in
1265 : narrower modes. This could help turning TRUNCATEs
1266 : into SUBREGs. */
1267 65499019 : note_uses (&PATTERN (insn), record_truncated_values, NULL);
1268 :
1269 : /* Try this insn with each insn it links back to. */
1270 :
1271 102098179 : FOR_EACH_LOG_LINK (links, insn)
1272 40477304 : if ((next = try_combine (insn, links->insn, NULL,
1273 : NULL, &new_direct_jump_p,
1274 : last_combined_insn)) != 0)
1275 : {
1276 3878144 : statistics_counter_event (cfun, "two-insn combine", 1);
1277 3878144 : goto retry;
1278 : }
1279 :
1280 : /* Try each sequence of three linked insns ending with this one. */
1281 :
1282 61620875 : if (max_combine >= 3)
1283 97652320 : FOR_EACH_LOG_LINK (links, insn)
1284 : {
1285 36211460 : rtx_insn *link = links->insn;
1286 :
1287 : /* If the linked insn has been replaced by a note, then there
1288 : is no point in pursuing this chain any further. */
1289 36211460 : if (NOTE_P (link))
1290 238 : continue;
1291 :
1292 53876828 : FOR_EACH_LOG_LINK (nextlinks, link)
1293 17743805 : if ((next = try_combine (insn, link, nextlinks->insn,
1294 : NULL, &new_direct_jump_p,
1295 : last_combined_insn)) != 0)
1296 : {
1297 78199 : statistics_counter_event (cfun, "three-insn combine", 1);
1298 78199 : goto retry;
1299 : }
1300 : }
1301 :
1302 : /* Try combining an insn with two different insns whose results it
1303 : uses. */
1304 61440860 : if (max_combine >= 3)
1305 97539551 : FOR_EACH_LOG_LINK (links, insn)
1306 48490461 : for (nextlinks = links->next; nextlinks;
1307 12377494 : nextlinks = nextlinks->next)
1308 12391770 : if ((next = try_combine (insn, links->insn,
1309 : nextlinks->insn, NULL,
1310 : &new_direct_jump_p,
1311 : last_combined_insn)) != 0)
1312 :
1313 : {
1314 14276 : statistics_counter_event (cfun, "three-insn combine", 1);
1315 14276 : goto retry;
1316 : }
1317 :
1318 : /* Try four-instruction combinations. */
1319 61426584 : if (max_combine >= 4)
1320 97518503 : FOR_EACH_LOG_LINK (links, insn)
1321 : {
1322 36095928 : struct insn_link *next1;
1323 36095928 : rtx_insn *link = links->insn;
1324 :
1325 : /* If the linked insn has been replaced by a note, then there
1326 : is no point in pursuing this chain any further. */
1327 36095928 : if (NOTE_P (link))
1328 237 : continue;
1329 :
1330 53739370 : FOR_EACH_LOG_LINK (next1, link)
1331 : {
1332 17645020 : rtx_insn *link1 = next1->insn;
1333 17645020 : if (NOTE_P (link1))
1334 76 : continue;
1335 : /* I0 -> I1 -> I2 -> I3. */
1336 28835275 : FOR_EACH_LOG_LINK (nextlinks, link1)
1337 11191560 : if ((next = try_combine (insn, link, link1,
1338 : nextlinks->insn,
1339 : &new_direct_jump_p,
1340 : last_combined_insn)) != 0)
1341 : {
1342 1229 : statistics_counter_event (cfun, "four-insn combine", 1);
1343 1229 : goto retry;
1344 : }
1345 : /* I0, I1 -> I2, I2 -> I3. */
1346 21489313 : for (nextlinks = next1->next; nextlinks;
1347 3845598 : nextlinks = nextlinks->next)
1348 3845710 : if ((next = try_combine (insn, link, link1,
1349 : nextlinks->insn,
1350 : &new_direct_jump_p,
1351 : last_combined_insn)) != 0)
1352 : {
1353 112 : statistics_counter_event (cfun, "four-insn combine", 1);
1354 112 : goto retry;
1355 : }
1356 : }
1357 :
1358 48468922 : for (next1 = links->next; next1; next1 = next1->next)
1359 : {
1360 12377161 : rtx_insn *link1 = next1->insn;
1361 12377161 : if (NOTE_P (link1))
1362 8 : continue;
1363 : /* I0 -> I2; I1, I2 -> I3. */
1364 15628241 : FOR_EACH_LOG_LINK (nextlinks, link)
1365 3253487 : if ((next = try_combine (insn, link, link1,
1366 : nextlinks->insn,
1367 : &new_direct_jump_p,
1368 : last_combined_insn)) != 0)
1369 : {
1370 2399 : statistics_counter_event (cfun, "four-insn combine", 1);
1371 2399 : goto retry;
1372 : }
1373 : /* I0 -> I1; I1, I2 -> I3. */
1374 15828461 : FOR_EACH_LOG_LINK (nextlinks, link1)
1375 3453897 : if ((next = try_combine (insn, link, link1,
1376 : nextlinks->insn,
1377 : &new_direct_jump_p,
1378 : last_combined_insn)) != 0)
1379 : {
1380 190 : statistics_counter_event (cfun, "four-insn combine", 1);
1381 190 : goto retry;
1382 : }
1383 : }
1384 : }
1385 :
1386 : /* Try this insn with each REG_EQUAL note it links back to. */
1387 97642113 : FOR_EACH_LOG_LINK (links, insn)
1388 : {
1389 36143761 : rtx set, note;
1390 36143761 : rtx_insn *temp = links->insn;
1391 36143761 : if ((set = single_set (temp)) != 0
1392 35755684 : && (note = find_reg_equal_equiv_note (temp)) != 0
1393 2528104 : && (note = XEXP (note, 0), GET_CODE (note)) != EXPR_LIST
1394 2528104 : && ! side_effects_p (SET_SRC (set))
1395 : /* Avoid using a register that may already been marked
1396 : dead by an earlier instruction. */
1397 2528104 : && ! unmentioned_reg_p (note, SET_SRC (set))
1398 37369064 : && (GET_MODE (note) == VOIDmode
1399 28097 : ? SCALAR_INT_MODE_P (GET_MODE (SET_DEST (set)))
1400 1197206 : : (GET_MODE (SET_DEST (set)) == GET_MODE (note)
1401 1197176 : && (GET_CODE (SET_DEST (set)) != ZERO_EXTRACT
1402 0 : || (GET_MODE (XEXP (SET_DEST (set), 0))
1403 : == GET_MODE (note))))))
1404 : {
1405 : /* Temporarily replace the set's source with the
1406 : contents of the REG_EQUAL note. The insn will
1407 : be deleted or recognized by try_combine. */
1408 1225256 : rtx orig_src = SET_SRC (set);
1409 1225256 : rtx orig_dest = SET_DEST (set);
1410 1225256 : if (GET_CODE (SET_DEST (set)) == ZERO_EXTRACT)
1411 0 : SET_DEST (set) = XEXP (SET_DEST (set), 0);
1412 1225256 : SET_SRC (set) = note;
1413 1225256 : i2mod = temp;
1414 1225256 : i2mod_old_rhs = copy_rtx (orig_src);
1415 1225256 : i2mod_new_rhs = copy_rtx (note);
1416 1225256 : next = try_combine (insn, i2mod, NULL, NULL,
1417 : &new_direct_jump_p,
1418 : last_combined_insn);
1419 1225256 : i2mod = NULL;
1420 1225256 : if (next)
1421 : {
1422 26118 : statistics_counter_event (cfun, "insn-with-note combine", 1);
1423 26118 : goto retry;
1424 : }
1425 1199138 : INSN_CODE (temp) = -1;
1426 1199138 : SET_SRC (set) = orig_src;
1427 1199138 : SET_DEST (set) = orig_dest;
1428 : }
1429 : }
1430 :
1431 61498352 : if (!NOTE_P (insn))
1432 61498352 : record_dead_and_set_regs (insn);
1433 :
1434 129810558 : retry:
1435 129810558 : ;
1436 : }
1437 : }
1438 :
1439 997153 : default_rtl_profile ();
1440 997153 : clear_bb_flags ();
1441 :
1442 997153 : if (purge_all_dead_edges ())
1443 1361 : new_direct_jump_p = true;
1444 997153 : if (delete_noop_moves ())
1445 0 : new_direct_jump_p = true;
1446 :
1447 : /* Clean up. */
1448 997153 : obstack_free (&insn_link_obstack, NULL);
1449 997153 : free (uid_log_links);
1450 997153 : free (uid_insn_cost);
1451 997153 : reg_stat.release ();
1452 :
1453 997153 : {
1454 997153 : struct undo *undo, *next;
1455 5602744 : for (undo = undobuf.frees; undo; undo = next)
1456 : {
1457 4605591 : next = undo->next;
1458 4605591 : free (undo);
1459 : }
1460 997153 : undobuf.frees = 0;
1461 : }
1462 :
1463 997153 : statistics_counter_event (cfun, "attempts", combine_attempts);
1464 997153 : statistics_counter_event (cfun, "merges", combine_merges);
1465 997153 : statistics_counter_event (cfun, "extras", combine_extras);
1466 997153 : statistics_counter_event (cfun, "successes", combine_successes);
1467 :
1468 997153 : nonzero_sign_valid = 0;
1469 997153 : rtl_hooks = general_rtl_hooks;
1470 :
1471 : /* Make recognizer allow volatile MEMs again. */
1472 997153 : init_recog ();
1473 :
1474 997153 : return new_direct_jump_p;
1475 : }
1476 :
1477 : /* Wipe the last_xxx fields of reg_stat in preparation for another pass. */
1478 :
1479 : static void
1480 997153 : init_reg_last (void)
1481 : {
1482 997153 : unsigned int i;
1483 997153 : reg_stat_type *p;
1484 :
1485 141421767 : FOR_EACH_VEC_ELT (reg_stat, i, p)
1486 140424614 : memset (p, 0, offsetof (reg_stat_type, sign_bit_copies));
1487 997153 : }
1488 :
1489 : /* Set up any promoted values for incoming argument registers. */
1490 :
1491 : static void
1492 1994306 : setup_incoming_promotions (rtx_insn *first)
1493 : {
1494 1994306 : tree arg;
1495 1994306 : bool strictly_local = false;
1496 :
1497 5396664 : for (arg = DECL_ARGUMENTS (current_function_decl); arg;
1498 3402358 : arg = DECL_CHAIN (arg))
1499 : {
1500 3402358 : rtx x, reg = DECL_INCOMING_RTL (arg);
1501 3402358 : int uns1, uns3;
1502 3402358 : machine_mode mode1, mode2, mode3, mode4;
1503 :
1504 : /* Only continue if the incoming argument is in a register. */
1505 3402358 : if (!REG_P (reg))
1506 3402264 : continue;
1507 :
1508 : /* Determine, if possible, whether all call sites of the current
1509 : function lie within the current compilation unit. (This does
1510 : take into account the exporting of a function via taking its
1511 : address, and so forth.) */
1512 2664464 : strictly_local
1513 2664464 : = cgraph_node::local_info_node (current_function_decl)->local;
1514 :
1515 : /* The mode and signedness of the argument before any promotions happen
1516 : (equal to the mode of the pseudo holding it at that stage). */
1517 2664464 : mode1 = TYPE_MODE (TREE_TYPE (arg));
1518 2664464 : uns1 = TYPE_UNSIGNED (TREE_TYPE (arg));
1519 :
1520 : /* The mode and signedness of the argument after any source language and
1521 : TARGET_PROMOTE_PROTOTYPES-driven promotions. */
1522 2664464 : mode2 = TYPE_MODE (DECL_ARG_TYPE (arg));
1523 2664464 : uns3 = TYPE_UNSIGNED (DECL_ARG_TYPE (arg));
1524 :
1525 : /* The mode and signedness of the argument as it is actually passed,
1526 : see assign_parm_setup_reg in function.cc. */
1527 2664464 : mode3 = promote_function_mode (TREE_TYPE (arg), mode1, &uns3,
1528 2664464 : TREE_TYPE (cfun->decl), 0);
1529 :
1530 : /* The mode of the register in which the argument is being passed. */
1531 2664464 : mode4 = GET_MODE (reg);
1532 :
1533 : /* Eliminate sign extensions in the callee when:
1534 : (a) A mode promotion has occurred; */
1535 2664464 : if (mode1 == mode3)
1536 2664370 : continue;
1537 : /* (b) The mode of the register is the same as the mode of
1538 : the argument as it is passed; */
1539 94 : if (mode3 != mode4)
1540 0 : continue;
1541 : /* (c) There's no language level extension; */
1542 94 : if (mode1 == mode2)
1543 : ;
1544 : /* (c.1) All callers are from the current compilation unit. If that's
1545 : the case we don't have to rely on an ABI, we only have to know
1546 : what we're generating right now, and we know that we will do the
1547 : mode1 to mode2 promotion with the given sign. */
1548 0 : else if (!strictly_local)
1549 0 : continue;
1550 : /* (c.2) The combination of the two promotions is useful. This is
1551 : true when the signs match, or if the first promotion is unsigned.
1552 : In the later case, (sign_extend (zero_extend x)) is the same as
1553 : (zero_extend (zero_extend x)), so make sure to force UNS3 true. */
1554 0 : else if (uns1)
1555 0 : uns3 = true;
1556 0 : else if (uns3)
1557 0 : continue;
1558 :
1559 : /* Record that the value was promoted from mode1 to mode3,
1560 : so that any sign extension at the head of the current
1561 : function may be eliminated. */
1562 94 : x = gen_rtx_CLOBBER (mode1, const0_rtx);
1563 94 : x = gen_rtx_fmt_e ((uns3 ? ZERO_EXTEND : SIGN_EXTEND), mode3, x);
1564 94 : record_value_for_reg (reg, first, x);
1565 : }
1566 1994306 : }
1567 :
1568 : /* If MODE has a precision lower than PREC and SRC is a non-negative constant
1569 : that would appear negative in MODE, sign-extend SRC for use in nonzero_bits
1570 : because some machines (maybe most) will actually do the sign-extension and
1571 : this is the conservative approach.
1572 :
1573 : ??? For 2.5, try to tighten up the MD files in this regard instead of this
1574 : kludge. */
1575 :
1576 : static rtx
1577 0 : sign_extend_short_imm (rtx src, machine_mode mode, unsigned int prec)
1578 : {
1579 0 : scalar_int_mode int_mode;
1580 0 : if (CONST_INT_P (src)
1581 0 : && is_a <scalar_int_mode> (mode, &int_mode)
1582 0 : && GET_MODE_PRECISION (int_mode) < prec
1583 0 : && INTVAL (src) > 0
1584 0 : && val_signbit_known_set_p (int_mode, INTVAL (src)))
1585 0 : src = GEN_INT (INTVAL (src) | ~GET_MODE_MASK (int_mode));
1586 :
1587 0 : return src;
1588 : }
1589 :
1590 : /* Update RSP for pseudo-register X from INSN's REG_EQUAL note (if one exists)
1591 : and SET. */
1592 :
1593 : static void
1594 23056062 : update_rsp_from_reg_equal (reg_stat_type *rsp, rtx_insn *insn, const_rtx set,
1595 : rtx x)
1596 : {
1597 23056062 : rtx reg_equal_note = insn ? find_reg_equal_equiv_note (insn) : NULL_RTX;
1598 23056062 : unsigned HOST_WIDE_INT bits = 0;
1599 23056062 : rtx reg_equal = NULL, src = SET_SRC (set);
1600 23056062 : unsigned int num = 0;
1601 :
1602 23056062 : if (reg_equal_note)
1603 964156 : reg_equal = XEXP (reg_equal_note, 0);
1604 :
1605 23056062 : if (SHORT_IMMEDIATES_SIGN_EXTEND)
1606 : {
1607 : src = sign_extend_short_imm (src, GET_MODE (x), BITS_PER_WORD);
1608 : if (reg_equal)
1609 : reg_equal = sign_extend_short_imm (reg_equal, GET_MODE (x), BITS_PER_WORD);
1610 : }
1611 :
1612 : /* Don't call nonzero_bits if it cannot change anything. */
1613 23056062 : if (rsp->nonzero_bits != HOST_WIDE_INT_M1U)
1614 : {
1615 19903676 : machine_mode mode = GET_MODE (x);
1616 19903676 : if (GET_MODE_CLASS (mode) == MODE_INT
1617 19903676 : && HWI_COMPUTABLE_MODE_P (mode))
1618 19903544 : mode = nonzero_bits_mode;
1619 19903676 : bits = nonzero_bits (src, mode);
1620 19903676 : if (reg_equal && bits)
1621 914116 : bits &= nonzero_bits (reg_equal, mode);
1622 19903676 : rsp->nonzero_bits |= bits;
1623 : }
1624 :
1625 : /* Don't call num_sign_bit_copies if it cannot change anything. */
1626 23056062 : if (rsp->sign_bit_copies != 1)
1627 : {
1628 19761319 : num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
1629 19761319 : if (reg_equal && maybe_ne (num, GET_MODE_PRECISION (GET_MODE (x))))
1630 : {
1631 911576 : unsigned int numeq = num_sign_bit_copies (reg_equal, GET_MODE (x));
1632 911576 : if (num == 0 || numeq > num)
1633 19761319 : num = numeq;
1634 : }
1635 19761319 : if (rsp->sign_bit_copies == 0 || num < rsp->sign_bit_copies)
1636 19104209 : rsp->sign_bit_copies = num;
1637 : }
1638 23056062 : }
1639 :
1640 : /* Called via note_stores. If X is a pseudo that is narrower than
1641 : HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
1642 :
1643 : If we are setting only a portion of X and we can't figure out what
1644 : portion, assume all bits will be used since we don't know what will
1645 : be happening.
1646 :
1647 : Similarly, set how many bits of X are known to be copies of the sign bit
1648 : at all locations in the function. This is the smallest number implied
1649 : by any set of X. */
1650 :
1651 : static void
1652 71751623 : set_nonzero_bits_and_sign_copies (rtx x, const_rtx set, void *data)
1653 : {
1654 71751623 : rtx_insn *insn = (rtx_insn *) data;
1655 71751623 : scalar_int_mode mode;
1656 :
1657 71751623 : if (REG_P (x)
1658 57738706 : && REGNO (x) >= FIRST_PSEUDO_REGISTER
1659 : /* If this register is undefined at the start of the file, we can't
1660 : say what its contents were. */
1661 57389364 : && ! REGNO_REG_SET_P
1662 : (DF_LR_IN (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb), REGNO (x))
1663 28596640 : && is_a <scalar_int_mode> (GET_MODE (x), &mode)
1664 95712149 : && HWI_COMPUTABLE_MODE_P (mode))
1665 : {
1666 23255617 : reg_stat_type *rsp = ®_stat[REGNO (x)];
1667 :
1668 23255617 : if (set == 0 || GET_CODE (set) == CLOBBER)
1669 : {
1670 22413 : rsp->nonzero_bits = GET_MODE_MASK (mode);
1671 22413 : rsp->sign_bit_copies = 1;
1672 22413 : return;
1673 : }
1674 :
1675 : /* If this register is being initialized using itself, and the
1676 : register is uninitialized in this basic block, and there are
1677 : no LOG_LINKS which set the register, then part of the
1678 : register is uninitialized. In that case we can't assume
1679 : anything about the number of nonzero bits.
1680 :
1681 : ??? We could do better if we checked this in
1682 : reg_{nonzero_bits,num_sign_bit_copies}_for_combine. Then we
1683 : could avoid making assumptions about the insn which initially
1684 : sets the register, while still using the information in other
1685 : insns. We would have to be careful to check every insn
1686 : involved in the combination. */
1687 :
1688 23233204 : if (insn
1689 21820522 : && reg_referenced_p (x, PATTERN (insn))
1690 25767736 : && !REGNO_REG_SET_P (DF_LR_IN (BLOCK_FOR_INSN (insn)),
1691 : REGNO (x)))
1692 : {
1693 249119 : struct insn_link *link;
1694 :
1695 371516 : FOR_EACH_LOG_LINK (link, insn)
1696 287528 : if (dead_or_set_p (link->insn, x))
1697 : break;
1698 249119 : if (!link)
1699 : {
1700 83988 : rsp->nonzero_bits = GET_MODE_MASK (mode);
1701 83988 : rsp->sign_bit_copies = 1;
1702 83988 : return;
1703 : }
1704 : }
1705 :
1706 : /* If this is a complex assignment, see if we can convert it into a
1707 : simple assignment. */
1708 23149216 : set = expand_field_assignment (set);
1709 :
1710 : /* If this is a simple assignment, or we have a paradoxical SUBREG,
1711 : set what we know about X. */
1712 :
1713 23149216 : if (SET_DEST (set) == x
1714 23149216 : || (paradoxical_subreg_p (SET_DEST (set))
1715 4299 : && SUBREG_REG (SET_DEST (set)) == x))
1716 23056062 : update_rsp_from_reg_equal (rsp, insn, set, x);
1717 : else
1718 : {
1719 93154 : rsp->nonzero_bits = GET_MODE_MASK (mode);
1720 93154 : rsp->sign_bit_copies = 1;
1721 : }
1722 : }
1723 : }
1724 :
1725 : /* See if INSN can be combined into I3. PRED, PRED2, SUCC and SUCC2 are
1726 : optionally insns that were previously combined into I3 or that will be
1727 : combined into the merger of INSN and I3. The order is PRED, PRED2,
1728 : INSN, SUCC, SUCC2, I3.
1729 :
1730 : Return false if the combination is not allowed for any reason.
1731 :
1732 : If the combination is allowed, *PDEST will be set to the single
1733 : destination of INSN and *PSRC to the single source, and this function
1734 : will return true. */
1735 :
1736 : static bool
1737 60008885 : can_combine_p (rtx_insn *insn, rtx_insn *i3, rtx_insn *pred ATTRIBUTE_UNUSED,
1738 : rtx_insn *pred2 ATTRIBUTE_UNUSED, rtx_insn *succ, rtx_insn *succ2,
1739 : rtx *pdest, rtx *psrc)
1740 : {
1741 60008885 : int i;
1742 60008885 : const_rtx set = 0;
1743 60008885 : rtx src, dest;
1744 60008885 : rtx_insn *p;
1745 60008885 : rtx link;
1746 60008885 : bool all_adjacent = true;
1747 60008885 : bool (*is_volatile_p) (const_rtx);
1748 :
1749 60008885 : if (succ)
1750 : {
1751 13961108 : if (succ2)
1752 : {
1753 2029804 : if (next_active_insn (succ2) != i3)
1754 191212 : all_adjacent = false;
1755 2029804 : if (next_active_insn (succ) != succ2)
1756 1985391 : all_adjacent = false;
1757 : }
1758 11931304 : else if (next_active_insn (succ) != i3)
1759 1985391 : all_adjacent = false;
1760 13961108 : if (next_active_insn (insn) != succ)
1761 16747859 : all_adjacent = false;
1762 : }
1763 46047777 : else if (next_active_insn (insn) != i3)
1764 16747859 : all_adjacent = false;
1765 :
1766 : /* Can combine only if previous insn is a SET of a REG or a SUBREG,
1767 : or a PARALLEL consisting of such a SET and CLOBBERs.
1768 :
1769 : If INSN has CLOBBER parallel parts, ignore them for our processing.
1770 : By definition, these happen during the execution of the insn. When it
1771 : is merged with another insn, all bets are off. If they are, in fact,
1772 : needed and aren't also supplied in I3, they may be added by
1773 : recog_for_combine. Otherwise, it won't match.
1774 :
1775 : We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
1776 : note.
1777 :
1778 : Get the source and destination of INSN. If more than one, can't
1779 : combine. */
1780 :
1781 60008885 : if (GET_CODE (PATTERN (insn)) == SET)
1782 : set = PATTERN (insn);
1783 15683062 : else if (GET_CODE (PATTERN (insn)) == PARALLEL
1784 15683062 : && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1785 : {
1786 46964464 : for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1787 : {
1788 31801912 : rtx elt = XVECEXP (PATTERN (insn), 0, i);
1789 :
1790 31801912 : switch (GET_CODE (elt))
1791 : {
1792 : /* This is important to combine floating point insns
1793 : for the SH4 port. */
1794 125454 : case USE:
1795 : /* Combining an isolated USE doesn't make sense.
1796 : We depend here on combinable_i3pat to reject them. */
1797 : /* The code below this loop only verifies that the inputs of
1798 : the SET in INSN do not change. We call reg_set_between_p
1799 : to verify that the REG in the USE does not change between
1800 : I3 and INSN.
1801 : If the USE in INSN was for a pseudo register, the matching
1802 : insn pattern will likely match any register; combining this
1803 : with any other USE would only be safe if we knew that the
1804 : used registers have identical values, or if there was
1805 : something to tell them apart, e.g. different modes. For
1806 : now, we forgo such complicated tests and simply disallow
1807 : combining of USES of pseudo registers with any other USE. */
1808 125454 : if (REG_P (XEXP (elt, 0))
1809 125454 : && GET_CODE (PATTERN (i3)) == PARALLEL)
1810 : {
1811 227 : rtx i3pat = PATTERN (i3);
1812 227 : int i = XVECLEN (i3pat, 0) - 1;
1813 227 : unsigned int regno = REGNO (XEXP (elt, 0));
1814 :
1815 465 : do
1816 : {
1817 465 : rtx i3elt = XVECEXP (i3pat, 0, i);
1818 :
1819 465 : if (GET_CODE (i3elt) == USE
1820 209 : && REG_P (XEXP (i3elt, 0))
1821 701 : && (REGNO (XEXP (i3elt, 0)) == regno
1822 182 : ? reg_set_between_p (XEXP (elt, 0),
1823 27 : PREV_INSN (insn), i3)
1824 : : regno >= FIRST_PSEUDO_REGISTER))
1825 182 : return false;
1826 : }
1827 283 : while (--i >= 0);
1828 : }
1829 : break;
1830 :
1831 : /* We can ignore CLOBBERs. */
1832 : case CLOBBER:
1833 : break;
1834 :
1835 16265023 : case SET:
1836 : /* Ignore SETs whose result isn't used but not those that
1837 : have side-effects. */
1838 16265023 : if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
1839 182733 : && insn_nothrow_p (insn)
1840 16434579 : && !side_effects_p (elt))
1841 : break;
1842 :
1843 : /* If we have already found a SET, this is a second one and
1844 : so we cannot combine with this insn. */
1845 16178071 : if (set)
1846 : return false;
1847 :
1848 : set = elt;
1849 : break;
1850 :
1851 : default:
1852 : /* Anything else means we can't combine. */
1853 : return false;
1854 : }
1855 : }
1856 :
1857 15162552 : if (set == 0
1858 : /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
1859 : so don't do anything with it. */
1860 15162552 : || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
1861 : return false;
1862 : }
1863 : else
1864 : return false;
1865 :
1866 : if (set == 0)
1867 : return false;
1868 :
1869 : /* The simplification in expand_field_assignment may call back to
1870 : get_last_value, so set safe guard here. */
1871 59469475 : subst_low_luid = DF_INSN_LUID (insn);
1872 :
1873 59469475 : set = expand_field_assignment (set);
1874 59469475 : src = SET_SRC (set), dest = SET_DEST (set);
1875 :
1876 : /* Do not eliminate user-specified register if it is in an
1877 : asm input because we may break the register asm usage defined
1878 : in GCC manual if allow to do so.
1879 : Be aware that this may cover more cases than we expect but this
1880 : should be harmless. */
1881 58951884 : if (REG_P (dest) && REG_USERVAR_P (dest) && HARD_REGISTER_P (dest)
1882 59469478 : && extract_asm_operands (PATTERN (i3)))
1883 : return false;
1884 :
1885 : /* Don't eliminate a store in the stack pointer. */
1886 59469475 : if (dest == stack_pointer_rtx
1887 : /* Don't combine with an insn that sets a register to itself if it has
1888 : a REG_EQUAL note. This may be part of a LIBCALL sequence. */
1889 57569839 : || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
1890 : /* Can't merge an ASM_OPERANDS. */
1891 57569839 : || GET_CODE (src) == ASM_OPERANDS
1892 : /* Can't merge a function call. */
1893 57566225 : || GET_CODE (src) == CALL
1894 : /* Don't eliminate a function call argument. */
1895 57566225 : || (CALL_P (i3)
1896 8744343 : && (find_reg_fusage (i3, USE, dest)
1897 165956 : || (REG_P (dest)
1898 165956 : && REGNO (dest) < FIRST_PSEUDO_REGISTER
1899 283 : && global_regs[REGNO (dest)])))
1900 : /* Don't substitute into an incremented register. */
1901 : || FIND_REG_INC_NOTE (i3, dest)
1902 : || (succ && FIND_REG_INC_NOTE (succ, dest))
1903 57566225 : || (succ2 && FIND_REG_INC_NOTE (succ2, dest))
1904 : /* Don't substitute into a non-local goto, this confuses CFG. */
1905 48987835 : || (JUMP_P (i3) && find_reg_note (i3, REG_NON_LOCAL_GOTO, NULL_RTX))
1906 : /* Make sure that DEST is not used after INSN but before SUCC, or
1907 : after SUCC and before SUCC2, or after SUCC2 but before I3. */
1908 48987114 : || (!all_adjacent
1909 12017713 : && ((succ2
1910 902333 : && (reg_used_between_p (dest, succ2, i3)
1911 881889 : || reg_used_between_p (dest, succ, succ2)))
1912 11965004 : || (!succ2 && succ && reg_used_between_p (dest, succ, i3))
1913 11699118 : || (!succ2 && !succ && reg_used_between_p (dest, insn, i3))
1914 11699118 : || (succ
1915 : /* SUCC and SUCC2 can be split halves from a PARALLEL; in
1916 : that case SUCC is not in the insn stream, so use SUCC2
1917 : instead for this test. */
1918 9621090 : && reg_used_between_p (dest, insn,
1919 : succ2
1920 849624 : && INSN_UID (succ) == INSN_UID (succ2)
1921 : ? succ2 : succ))))
1922 : /* Make sure that the value that is to be substituted for the register
1923 : does not use any registers whose values alter in between. However,
1924 : If the insns are adjacent, a use can't cross a set even though we
1925 : think it might (this can happen for a sequence of insns each setting
1926 : the same destination; last_set of that register might point to
1927 : a NOTE). If INSN has a REG_EQUIV note, the register is always
1928 : equivalent to the memory so the substitution is valid even if there
1929 : are intervening stores. Also, don't move a volatile asm or
1930 : UNSPEC_VOLATILE across any other insns. */
1931 : || (! all_adjacent
1932 11699118 : && (((!MEM_P (src)
1933 3291132 : || ! find_reg_note (insn, REG_EQUIV, src))
1934 11584926 : && modified_between_p (src, insn, i3))
1935 10678405 : || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
1936 10678405 : || GET_CODE (src) == UNSPEC_VOLATILE))
1937 : /* Don't combine across a CALL_INSN, because that would possibly
1938 : change whether the life span of some REGs crosses calls or not,
1939 : and it is a pain to update that information.
1940 : Exception: if source is a constant, moving it later can't hurt.
1941 : Accept that as a special case. */
1942 107106675 : || (DF_INSN_LUID (insn) < last_call_luid && ! CONSTANT_P (src)))
1943 12158439 : return false;
1944 :
1945 : /* DEST must be a REG. */
1946 47311036 : if (REG_P (dest))
1947 : {
1948 : /* If register alignment is being enforced for multi-word items in all
1949 : cases except for parameters, it is possible to have a register copy
1950 : insn referencing a hard register that is not allowed to contain the
1951 : mode being copied and which would not be valid as an operand of most
1952 : insns. Eliminate this problem by not combining with such an insn.
1953 :
1954 : Also, on some machines we don't want to extend the life of a hard
1955 : register. */
1956 :
1957 46798236 : if (REG_P (src)
1958 46798236 : && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1959 29331 : && !targetm.hard_regno_mode_ok (REGNO (dest), GET_MODE (dest)))
1960 : /* Don't extend the life of a hard register unless it is
1961 : user variable (if we have few registers) or it can't
1962 : fit into the desired register (meaning something special
1963 : is going on).
1964 : Also avoid substituting a return register into I3, because
1965 : reload can't handle a conflict with constraints of other
1966 : inputs. */
1967 2520738 : || (REGNO (src) < FIRST_PSEUDO_REGISTER
1968 37202 : && !targetm.hard_regno_mode_ok (REGNO (src),
1969 37202 : GET_MODE (src)))))
1970 0 : return false;
1971 : }
1972 : else
1973 : return false;
1974 :
1975 :
1976 46798236 : if (GET_CODE (PATTERN (i3)) == PARALLEL)
1977 34321701 : for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1978 23134823 : if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER)
1979 : {
1980 10869452 : rtx reg = XEXP (XVECEXP (PATTERN (i3), 0, i), 0);
1981 :
1982 : /* If the clobber represents an earlyclobber operand, we must not
1983 : substitute an expression containing the clobbered register.
1984 : As we do not analyze the constraint strings here, we have to
1985 : make the conservative assumption. However, if the register is
1986 : a fixed hard reg, the clobber cannot represent any operand;
1987 : we leave it up to the machine description to either accept or
1988 : reject use-and-clobber patterns. */
1989 10869452 : if (!REG_P (reg)
1990 10532912 : || REGNO (reg) >= FIRST_PSEUDO_REGISTER
1991 21355206 : || !fixed_regs[REGNO (reg)])
1992 421998 : if (reg_overlap_mentioned_p (reg, src))
1993 : return false;
1994 : }
1995 :
1996 : /* If INSN contains anything volatile, or is an `asm' (whether volatile
1997 : or not), reject, unless nothing volatile comes between it and I3 */
1998 :
1999 46797572 : if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
2000 : {
2001 : /* Make sure neither succ nor succ2 contains a volatile reference. */
2002 696108 : if (succ2 != 0 && volatile_refs_p (PATTERN (succ2)))
2003 : return false;
2004 696015 : if (succ != 0 && volatile_refs_p (PATTERN (succ)))
2005 : return false;
2006 : /* We'll check insns between INSN and I3 below. */
2007 : }
2008 :
2009 : /* If INSN is an asm, and DEST is a hard register, reject, since it has
2010 : to be an explicit register variable, and was chosen for a reason. */
2011 :
2012 46761772 : if (GET_CODE (src) == ASM_OPERANDS
2013 46761772 : && REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER)
2014 : return false;
2015 :
2016 : /* If INSN contains volatile references (specifically volatile MEMs),
2017 : we cannot combine across any other volatile references.
2018 : Even if INSN doesn't contain volatile references, any intervening
2019 : volatile insn might affect machine state. */
2020 :
2021 92862368 : is_volatile_p = volatile_refs_p (PATTERN (insn))
2022 46761772 : ? volatile_refs_p
2023 : : volatile_insn_p;
2024 :
2025 207388127 : for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
2026 114069814 : if (INSN_P (p) && p != succ && p != succ2 && is_volatile_p (PATTERN (p)))
2027 : return false;
2028 :
2029 : /* If INSN contains an autoincrement or autodecrement, make sure that
2030 : register is not used between there and I3, and not already used in
2031 : I3 either. Neither must it be used in PRED or SUCC, if they exist.
2032 : Also insist that I3 not be a jump if using LRA; if it were one
2033 : and the incremented register were spilled, we would lose.
2034 : Reload handles this correctly. */
2035 :
2036 46556541 : if (AUTO_INC_DEC)
2037 : for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2038 : if (REG_NOTE_KIND (link) == REG_INC
2039 : && ((JUMP_P (i3) && targetm.lra_p ())
2040 : || reg_used_between_p (XEXP (link, 0), insn, i3)
2041 : || (pred != NULL_RTX
2042 : && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (pred)))
2043 : || (pred2 != NULL_RTX
2044 : && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (pred2)))
2045 : || (succ != NULL_RTX
2046 : && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (succ)))
2047 : || (succ2 != NULL_RTX
2048 : && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (succ2)))
2049 : || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
2050 : return false;
2051 :
2052 : /* If we get here, we have passed all the tests and the combination is
2053 : to be allowed. */
2054 :
2055 46556541 : *pdest = dest;
2056 46556541 : *psrc = src;
2057 :
2058 46556541 : return true;
2059 : }
2060 :
2061 : /* LOC is the location within I3 that contains its pattern or the component
2062 : of a PARALLEL of the pattern. We validate that it is valid for combining.
2063 :
2064 : One problem is if I3 modifies its output, as opposed to replacing it
2065 : entirely, we can't allow the output to contain I2DEST, I1DEST or I0DEST as
2066 : doing so would produce an insn that is not equivalent to the original insns.
2067 :
2068 : Consider:
2069 :
2070 : (set (reg:DI 101) (reg:DI 100))
2071 : (set (subreg:SI (reg:DI 101) 0) <foo>)
2072 :
2073 : This is NOT equivalent to:
2074 :
2075 : (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
2076 : (set (reg:DI 101) (reg:DI 100))])
2077 :
2078 : Not only does this modify 100 (in which case it might still be valid
2079 : if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
2080 :
2081 : We can also run into a problem if I2 sets a register that I1
2082 : uses and I1 gets directly substituted into I3 (not via I2). In that
2083 : case, we would be getting the wrong value of I2DEST into I3, so we
2084 : must reject the combination. This case occurs when I2 and I1 both
2085 : feed into I3, rather than when I1 feeds into I2, which feeds into I3.
2086 : If I1_NOT_IN_SRC is nonzero, it means that finding I1 in the source
2087 : of a SET must prevent combination from occurring. The same situation
2088 : can occur for I0, in which case I0_NOT_IN_SRC is set.
2089 :
2090 : Before doing the above check, we first try to expand a field assignment
2091 : into a set of logical operations.
2092 :
2093 : If PI3_DEST_KILLED is nonzero, it is a pointer to a location in which
2094 : we place a register that is both set and used within I3. If more than one
2095 : such register is detected, we fail.
2096 :
2097 : Return true if the combination is valid, false otherwise. */
2098 :
2099 : static bool
2100 66349173 : combinable_i3pat (rtx_insn *i3, rtx *loc, rtx i2dest, rtx i1dest, rtx i0dest,
2101 : bool i1_not_in_src, bool i0_not_in_src, rtx *pi3dest_killed)
2102 : {
2103 66349173 : rtx x = *loc;
2104 :
2105 66349173 : if (GET_CODE (x) == SET)
2106 : {
2107 45119120 : rtx set = x ;
2108 45119120 : rtx dest = SET_DEST (set);
2109 45119120 : rtx src = SET_SRC (set);
2110 45119120 : rtx inner_dest = dest;
2111 45119120 : rtx subdest;
2112 :
2113 45119120 : while (GET_CODE (inner_dest) == STRICT_LOW_PART
2114 45607767 : || GET_CODE (inner_dest) == SUBREG
2115 45607767 : || GET_CODE (inner_dest) == ZERO_EXTRACT)
2116 488647 : inner_dest = XEXP (inner_dest, 0);
2117 :
2118 : /* Check for the case where I3 modifies its output, as discussed
2119 : above. We don't want to prevent pseudos from being combined
2120 : into the address of a MEM, so only prevent the combination if
2121 : i1 or i2 set the same MEM. */
2122 468171 : if ((inner_dest != dest &&
2123 : (!MEM_P (inner_dest)
2124 793 : || rtx_equal_p (i2dest, inner_dest)
2125 793 : || (i1dest && rtx_equal_p (i1dest, inner_dest))
2126 793 : || (i0dest && rtx_equal_p (i0dest, inner_dest)))
2127 467378 : && (reg_overlap_mentioned_p (i2dest, inner_dest)
2128 347500 : || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))
2129 346253 : || (i0dest && reg_overlap_mentioned_p (i0dest, inner_dest))))
2130 :
2131 : /* This is the same test done in can_combine_p except we can't test
2132 : all_adjacent; we don't have to, since this instruction will stay
2133 : in place, thus we are not considering increasing the lifetime of
2134 : INNER_DEST.
2135 :
2136 : Also, if this insn sets a function argument, combining it with
2137 : something that might need a spill could clobber a previous
2138 : function argument; the all_adjacent test in can_combine_p also
2139 : checks this; here, we do a more specific test for this case. */
2140 :
2141 44997886 : || (REG_P (inner_dest)
2142 28591380 : && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
2143 7177925 : && !targetm.hard_regno_mode_ok (REGNO (inner_dest),
2144 7177925 : GET_MODE (inner_dest)))
2145 44997886 : || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src))
2146 90110288 : || (i0_not_in_src && reg_overlap_mentioned_p (i0dest, src)))
2147 151057 : return false;
2148 :
2149 : /* If DEST is used in I3, it is being killed in this insn, so
2150 : record that for later. We have to consider paradoxical
2151 : subregs here, since they kill the whole register, but we
2152 : ignore partial subregs, STRICT_LOW_PART, etc.
2153 : Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
2154 : STACK_POINTER_REGNUM, since these are always considered to be
2155 : live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
2156 44968063 : subdest = dest;
2157 44968063 : if (GET_CODE (subdest) == SUBREG && !partial_subreg_p (subdest))
2158 247574 : subdest = SUBREG_REG (subdest);
2159 44968063 : if (pi3dest_killed
2160 32793513 : && REG_P (subdest)
2161 20594419 : && reg_referenced_p (subdest, PATTERN (i3))
2162 1146588 : && REGNO (subdest) != FRAME_POINTER_REGNUM
2163 1146588 : && (HARD_FRAME_POINTER_IS_FRAME_POINTER
2164 1146588 : || REGNO (subdest) != HARD_FRAME_POINTER_REGNUM)
2165 1146588 : && (FRAME_POINTER_REGNUM == ARG_POINTER_REGNUM
2166 1146588 : || (REGNO (subdest) != ARG_POINTER_REGNUM
2167 0 : || ! fixed_regs [REGNO (subdest)]))
2168 46114651 : && REGNO (subdest) != STACK_POINTER_REGNUM)
2169 : {
2170 1108565 : if (*pi3dest_killed)
2171 : return false;
2172 :
2173 1086636 : *pi3dest_killed = subdest;
2174 : }
2175 : }
2176 :
2177 21230053 : else if (GET_CODE (x) == PARALLEL)
2178 : {
2179 : int i;
2180 :
2181 32318415 : for (i = 0; i < XVECLEN (x, 0); i++)
2182 21780179 : if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest, i0dest,
2183 : i1_not_in_src, i0_not_in_src, pi3dest_killed))
2184 : return false;
2185 : }
2186 :
2187 : return true;
2188 : }
2189 :
2190 : /* Return true if X is an arithmetic expression that contains a multiplication
2191 : and division. We don't count multiplications by powers of two here. */
2192 :
2193 : static bool
2194 16840464 : contains_muldiv (rtx x)
2195 : {
2196 17521882 : switch (GET_CODE (x))
2197 : {
2198 : case MOD: case DIV: case UMOD: case UDIV:
2199 : return true;
2200 :
2201 471270 : case MULT:
2202 471270 : return ! (CONST_INT_P (XEXP (x, 1))
2203 126368 : && pow2p_hwi (UINTVAL (XEXP (x, 1))));
2204 16890675 : default:
2205 16890675 : if (BINARY_P (x))
2206 5790901 : return contains_muldiv (XEXP (x, 0))
2207 5790901 : || contains_muldiv (XEXP (x, 1));
2208 :
2209 11099774 : if (UNARY_P (x))
2210 681418 : return contains_muldiv (XEXP (x, 0));
2211 :
2212 : return false;
2213 : }
2214 : }
2215 :
2216 : /* Determine whether INSN can be used in a combination. Return true if
2217 : not. This is used in try_combine to detect early some cases where we
2218 : can't perform combinations. */
2219 :
2220 : static bool
2221 162082616 : cant_combine_insn_p (rtx_insn *insn)
2222 : {
2223 162082616 : rtx set;
2224 162082616 : rtx src, dest;
2225 :
2226 : /* If this isn't really an insn, we can't do anything.
2227 : This can occur when flow deletes an insn that it has merged into an
2228 : auto-increment address. */
2229 162082616 : if (!NONDEBUG_INSN_P (insn))
2230 : return true;
2231 :
2232 : /* Never combine loads and stores involving hard regs that are likely
2233 : to be spilled. The register allocator can usually handle such
2234 : reg-reg moves by tying. If we allow the combiner to make
2235 : substitutions of likely-spilled regs, reload might die.
2236 : As an exception, we allow combinations involving fixed regs; these are
2237 : not available to the register allocator so there's no risk involved. */
2238 :
2239 162082204 : set = single_set (insn);
2240 162082204 : if (! set)
2241 : return false;
2242 149308074 : src = SET_SRC (set);
2243 149308074 : dest = SET_DEST (set);
2244 149308074 : if (GET_CODE (src) == SUBREG)
2245 990614 : src = SUBREG_REG (src);
2246 149308074 : if (GET_CODE (dest) == SUBREG)
2247 1589195 : dest = SUBREG_REG (dest);
2248 40257811 : if (REG_P (src) && REG_P (dest)
2249 182800021 : && ((HARD_REGISTER_P (src)
2250 6620602 : && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (src))
2251 : #ifdef LEAF_REGISTERS
2252 : && ! LEAF_REGISTERS [REGNO (src)])
2253 : #else
2254 : )
2255 : #endif
2256 27174545 : || (HARD_REGISTER_P (dest)
2257 19166976 : && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (dest))
2258 18898720 : && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (dest))))))
2259 23674229 : return true;
2260 :
2261 : return false;
2262 : }
2263 :
2264 : struct likely_spilled_retval_info
2265 : {
2266 : unsigned regno, nregs;
2267 : unsigned mask;
2268 : };
2269 :
2270 : /* Called via note_stores by likely_spilled_retval_p. Remove from info->mask
2271 : hard registers that are known to be written to / clobbered in full. */
2272 : static void
2273 160321 : likely_spilled_retval_1 (rtx x, const_rtx set, void *data)
2274 : {
2275 160321 : struct likely_spilled_retval_info *const info =
2276 : (struct likely_spilled_retval_info *) data;
2277 160321 : unsigned regno, nregs;
2278 160321 : unsigned new_mask;
2279 :
2280 160321 : if (!REG_P (XEXP (set, 0)))
2281 : return;
2282 160321 : regno = REGNO (x);
2283 160321 : if (regno >= info->regno + info->nregs)
2284 : return;
2285 160321 : nregs = REG_NREGS (x);
2286 160321 : if (regno + nregs <= info->regno)
2287 : return;
2288 160321 : new_mask = (2U << (nregs - 1)) - 1;
2289 160321 : if (regno < info->regno)
2290 0 : new_mask >>= info->regno - regno;
2291 : else
2292 160321 : new_mask <<= regno - info->regno;
2293 160321 : info->mask &= ~new_mask;
2294 : }
2295 :
2296 : /* Return true iff part of the return value is live during INSN, and
2297 : it is likely spilled. This can happen when more than one insn is needed
2298 : to copy the return value, e.g. when we consider to combine into the
2299 : second copy insn for a complex value. */
2300 :
2301 : static bool
2302 46385505 : likely_spilled_retval_p (rtx_insn *insn)
2303 : {
2304 46385505 : rtx_insn *use = BB_END (this_basic_block);
2305 46385505 : rtx reg;
2306 46385505 : rtx_insn *p;
2307 46385505 : unsigned regno, nregs;
2308 : /* We assume here that no machine mode needs more than
2309 : 32 hard registers when the value overlaps with a register
2310 : for which TARGET_FUNCTION_VALUE_REGNO_P is true. */
2311 46385505 : unsigned mask;
2312 46385505 : struct likely_spilled_retval_info info;
2313 :
2314 46385505 : if (!NONJUMP_INSN_P (use) || GET_CODE (PATTERN (use)) != USE || insn == use)
2315 : return false;
2316 3080845 : reg = XEXP (PATTERN (use), 0);
2317 3080845 : if (!REG_P (reg) || !targetm.calls.function_value_regno_p (REGNO (reg)))
2318 0 : return false;
2319 3080845 : regno = REGNO (reg);
2320 3080845 : nregs = REG_NREGS (reg);
2321 3080845 : if (nregs == 1)
2322 : return false;
2323 157609 : mask = (2U << (nregs - 1)) - 1;
2324 :
2325 : /* Disregard parts of the return value that are set later. */
2326 157609 : info.regno = regno;
2327 157609 : info.nregs = nregs;
2328 157609 : info.mask = mask;
2329 538523 : for (p = PREV_INSN (use); info.mask && p != insn; p = PREV_INSN (p))
2330 223305 : if (INSN_P (p))
2331 223305 : note_stores (p, likely_spilled_retval_1, &info);
2332 315218 : mask = info.mask;
2333 :
2334 : /* Check if any of the (probably) live return value registers is
2335 : likely spilled. */
2336 : nregs --;
2337 315218 : do
2338 : {
2339 315218 : if ((mask & 1 << nregs)
2340 315218 : && targetm.class_likely_spilled_p (REGNO_REG_CLASS (regno + nregs)))
2341 : return true;
2342 315208 : } while (nregs--);
2343 : return false;
2344 : }
2345 :
2346 : /* Adjust INSN after we made a change to its destination.
2347 :
2348 : Changing the destination can invalidate notes that say something about
2349 : the results of the insn and a LOG_LINK pointing to the insn. */
2350 :
2351 : static void
2352 18514 : adjust_for_new_dest (rtx_insn *insn)
2353 : {
2354 : /* For notes, be conservative and simply remove them. */
2355 18514 : remove_reg_equal_equiv_notes (insn, true);
2356 :
2357 : /* The new insn will have a destination that was previously the destination
2358 : of an insn just above it. Call distribute_links to make a LOG_LINK from
2359 : the next use of that destination. */
2360 :
2361 18514 : rtx set = single_set (insn);
2362 18514 : gcc_assert (set);
2363 :
2364 18514 : rtx reg = SET_DEST (set);
2365 :
2366 18514 : while (GET_CODE (reg) == ZERO_EXTRACT
2367 18514 : || GET_CODE (reg) == STRICT_LOW_PART
2368 37028 : || GET_CODE (reg) == SUBREG)
2369 0 : reg = XEXP (reg, 0);
2370 18514 : gcc_assert (REG_P (reg));
2371 :
2372 18514 : distribute_links (alloc_insn_link (insn, REGNO (reg), NULL));
2373 :
2374 18514 : df_insn_rescan (insn);
2375 18514 : }
2376 :
2377 : /* Return TRUE if combine can reuse reg X in mode MODE.
2378 : ADDED_SETS is trueif the original set is still required. */
2379 : static bool
2380 2604687 : can_change_dest_mode (rtx x, bool added_sets, machine_mode mode)
2381 : {
2382 2604687 : unsigned int regno;
2383 :
2384 2604687 : if (!REG_P (x))
2385 : return false;
2386 :
2387 : /* Don't change between modes with different underlying register sizes,
2388 : since this could lead to invalid subregs. */
2389 2604687 : if (maybe_ne (REGMODE_NATURAL_SIZE (mode),
2390 2604687 : REGMODE_NATURAL_SIZE (GET_MODE (x))))
2391 : return false;
2392 :
2393 2604687 : regno = REGNO (x);
2394 : /* Allow hard registers if the new mode is legal, and occupies no more
2395 : registers than the old mode. */
2396 2604687 : if (regno < FIRST_PSEUDO_REGISTER)
2397 1126522 : return (targetm.hard_regno_mode_ok (regno, mode)
2398 1126522 : && REG_NREGS (x) >= hard_regno_nregs (regno, mode));
2399 :
2400 : /* Or a pseudo that is only used once. */
2401 1478165 : return (regno < reg_n_sets_max
2402 1478144 : && REG_N_SETS (regno) == 1
2403 1427127 : && !added_sets
2404 2905292 : && !REG_USERVAR_P (x));
2405 : }
2406 :
2407 :
2408 : /* Check whether X, the destination of a set, refers to part of
2409 : the register specified by REG. */
2410 :
2411 : static bool
2412 18715 : reg_subword_p (rtx x, rtx reg)
2413 : {
2414 : /* Check that reg is an integer mode register. */
2415 18715 : if (!REG_P (reg) || GET_MODE_CLASS (GET_MODE (reg)) != MODE_INT)
2416 : return false;
2417 :
2418 18245 : if (GET_CODE (x) == STRICT_LOW_PART
2419 16364 : || GET_CODE (x) == ZERO_EXTRACT)
2420 1904 : x = XEXP (x, 0);
2421 :
2422 18245 : return GET_CODE (x) == SUBREG
2423 18048 : && !paradoxical_subreg_p (x)
2424 18048 : && SUBREG_REG (x) == reg
2425 36293 : && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT;
2426 : }
2427 :
2428 : /* Return whether PAT is a PARALLEL of exactly N register SETs followed
2429 : by an arbitrary number of CLOBBERs. */
2430 : static bool
2431 99298921 : is_parallel_of_n_reg_sets (rtx pat, int n)
2432 : {
2433 99298921 : if (GET_CODE (pat) != PARALLEL)
2434 : return false;
2435 :
2436 26434902 : int len = XVECLEN (pat, 0);
2437 26434902 : if (len < n)
2438 : return false;
2439 :
2440 : int i;
2441 52515751 : for (i = 0; i < n; i++)
2442 49808705 : if (GET_CODE (XVECEXP (pat, 0, i)) != SET
2443 29441423 : || !REG_P (SET_DEST (XVECEXP (pat, 0, i))))
2444 : return false;
2445 3074698 : for ( ; i < len; i++)
2446 964027 : switch (GET_CODE (XVECEXP (pat, 0, i)))
2447 : {
2448 367653 : case CLOBBER:
2449 367653 : if (XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
2450 : return false;
2451 367652 : break;
2452 : default:
2453 : return false;
2454 : }
2455 : return true;
2456 : }
2457 :
2458 : /* Return whether INSN, a PARALLEL of N register SETs (and maybe some
2459 : CLOBBERs), can be split into individual SETs in that order, without
2460 : changing semantics. */
2461 : static bool
2462 360039 : can_split_parallel_of_n_reg_sets (rtx_insn *insn, int n)
2463 : {
2464 360039 : if (!insn_nothrow_p (insn))
2465 : return false;
2466 :
2467 358524 : rtx pat = PATTERN (insn);
2468 :
2469 358524 : int i, j;
2470 972358 : for (i = 0; i < n; i++)
2471 : {
2472 665441 : if (side_effects_p (SET_SRC (XVECEXP (pat, 0, i))))
2473 : return false;
2474 :
2475 662333 : rtx reg = SET_DEST (XVECEXP (pat, 0, i));
2476 :
2477 969250 : for (j = i + 1; j < n; j++)
2478 355416 : if (reg_referenced_p (reg, XVECEXP (pat, 0, j)))
2479 : return false;
2480 : }
2481 :
2482 : return true;
2483 : }
2484 :
2485 : /* Return whether X is just a single_set, with the source
2486 : a general_operand. */
2487 : static bool
2488 65190522 : is_just_move (rtx_insn *x)
2489 : {
2490 65190522 : rtx set = single_set (x);
2491 65190522 : if (!set)
2492 : return false;
2493 :
2494 64782839 : return general_operand (SET_SRC (set), VOIDmode);
2495 : }
2496 :
2497 : /* Callback function to count autoincs. */
2498 :
2499 : static int
2500 1034049 : count_auto_inc (rtx, rtx, rtx, rtx, rtx, void *arg)
2501 : {
2502 1034049 : (*((int *) arg))++;
2503 :
2504 1034049 : return 0;
2505 : }
2506 :
2507 : /* Try to combine the insns I0, I1 and I2 into I3.
2508 : Here I0, I1 and I2 appear earlier than I3.
2509 : I0 and I1 can be zero; then we combine just I2 into I3, or I1 and I2 into
2510 : I3.
2511 :
2512 : If we are combining more than two insns and the resulting insn is not
2513 : recognized, try splitting it into two insns. If that happens, I2 and I3
2514 : are retained and I1/I0 are pseudo-deleted by turning them into a NOTE.
2515 : Otherwise, I0, I1 and I2 are pseudo-deleted.
2516 :
2517 : Return 0 if the combination does not work. Then nothing is changed.
2518 : If we did the combination, return the insn at which combine should
2519 : resume scanning.
2520 :
2521 : Set NEW_DIRECT_JUMP_P to true if try_combine creates a
2522 : new direct jump instruction.
2523 :
2524 : LAST_COMBINED_INSN is either I3, or some insn after I3 that has
2525 : been I3 passed to an earlier try_combine within the same basic
2526 : block. */
2527 :
2528 : static rtx_insn *
2529 93582789 : try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0,
2530 : bool *new_direct_jump_p, rtx_insn *last_combined_insn)
2531 : {
2532 : /* New patterns for I3 and I2, respectively. */
2533 93582789 : rtx newpat, newi2pat = 0;
2534 93582789 : rtvec newpat_vec_with_clobbers = 0;
2535 93582789 : bool substed_i2 = false, substed_i1 = false, substed_i0 = false;
2536 : /* Indicates need to preserve SET in I0, I1 or I2 in I3 if it is not
2537 : dead. */
2538 93582789 : bool added_sets_0, added_sets_1, added_sets_2;
2539 : /* Total number of SETs to put into I3. */
2540 93582789 : int total_sets;
2541 : /* Nonzero if I2's or I1's body now appears in I3. */
2542 93582789 : int i2_is_used = 0, i1_is_used = 0;
2543 : /* INSN_CODEs for new I3, new I2, and user of condition code. */
2544 93582789 : int insn_code_number, i2_code_number = 0, other_code_number = 0;
2545 : /* Contains I3 if the destination of I3 is used in its source, which means
2546 : that the old life of I3 is being killed. If that usage is placed into
2547 : I2 and not in I3, a REG_DEAD note must be made. */
2548 93582789 : rtx i3dest_killed = 0;
2549 : /* SET_DEST and SET_SRC of I2, I1 and I0. */
2550 93582789 : rtx i2dest = 0, i2src = 0, i1dest = 0, i1src = 0, i0dest = 0, i0src = 0;
2551 : /* Copy of SET_SRC of I1 and I0, if needed. */
2552 93582789 : rtx i1src_copy = 0, i0src_copy = 0, i0src_copy2 = 0;
2553 : /* Set if I2DEST was reused as a scratch register. */
2554 93582789 : bool i2scratch = false;
2555 : /* The PATTERNs of I0, I1, and I2, or a copy of them in certain cases. */
2556 93582789 : rtx i0pat = 0, i1pat = 0, i2pat = 0;
2557 : /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
2558 93582789 : bool i2dest_in_i2src = false, i1dest_in_i1src = false;
2559 93582789 : bool i2dest_in_i1src = false, i0dest_in_i0src = false;
2560 93582789 : bool i1dest_in_i0src = false, i2dest_in_i0src = false;;
2561 93582789 : bool i2dest_killed = false, i1dest_killed = false, i0dest_killed = false;
2562 93582789 : bool i1_feeds_i2_n = false, i0_feeds_i2_n = false, i0_feeds_i1_n = false;
2563 : /* Notes that must be added to REG_NOTES in I3 and I2. */
2564 93582789 : rtx new_i3_notes, new_i2_notes;
2565 : /* Notes that we substituted I3 into I2 instead of the normal case. */
2566 93582789 : bool i3_subst_into_i2 = false;
2567 : /* Notes that I1, I2 or I3 is a MULT operation. */
2568 93582789 : bool have_mult = false;
2569 93582789 : bool swap_i2i3 = false;
2570 93582789 : bool split_i2i3 = false;
2571 93582789 : bool changed_i3_dest = false;
2572 93582789 : bool i2_was_move = false, i3_was_move = false;
2573 93582789 : int n_auto_inc = 0;
2574 :
2575 93582789 : int maxreg;
2576 93582789 : rtx_insn *temp_insn;
2577 93582789 : rtx temp_expr;
2578 93582789 : struct insn_link *link;
2579 93582789 : rtx other_pat = 0;
2580 93582789 : rtx new_other_notes;
2581 93582789 : int i;
2582 93582789 : scalar_int_mode dest_mode, temp_mode;
2583 93582789 : bool has_non_call_exception = false;
2584 :
2585 : /* Immediately return if any of I0,I1,I2 are the same insn (I3 can
2586 : never be). */
2587 93582789 : if (i1 == i2 || i0 == i2 || (i0 && i0 == i1))
2588 : return 0;
2589 :
2590 : /* Only try four-insn combinations when there's high likelihood of
2591 : success. Look for simple insns, such as loads of constants or
2592 : binary operations involving a constant. */
2593 21348533 : if (i0)
2594 : {
2595 21348533 : int i;
2596 21348533 : int ngood = 0;
2597 21348533 : int nshift = 0;
2598 21348533 : rtx set0, set3;
2599 :
2600 21348533 : if (!flag_expensive_optimizations)
2601 : return 0;
2602 :
2603 84824823 : for (i = 0; i < 4; i++)
2604 : {
2605 69364027 : rtx_insn *insn = i == 0 ? i0 : i == 1 ? i1 : i == 2 ? i2 : i3;
2606 69364027 : rtx set = single_set (insn);
2607 69364027 : rtx src;
2608 69364027 : if (!set)
2609 2284775 : continue;
2610 67079252 : src = SET_SRC (set);
2611 67079252 : if (CONSTANT_P (src))
2612 : {
2613 4532764 : ngood += 2;
2614 4532764 : break;
2615 : }
2616 62546488 : else if (BINARY_P (src) && CONSTANT_P (XEXP (src, 1)))
2617 7836794 : ngood++;
2618 54709694 : else if (GET_CODE (src) == IF_THEN_ELSE)
2619 2021959 : ngood++;
2620 52687735 : else if (GET_CODE (src) == ASHIFT || GET_CODE (src) == ASHIFTRT
2621 52595467 : || GET_CODE (src) == LSHIFTRT)
2622 122412 : nshift++;
2623 : }
2624 :
2625 : /* If I0 loads a memory and I3 sets the same memory, then I1 and I2
2626 : are likely manipulating its value. Ideally we'll be able to combine
2627 : all four insns into a bitfield insertion of some kind.
2628 :
2629 : Note the source in I0 might be inside a sign/zero extension and the
2630 : memory modes in I0 and I3 might be different. So extract the address
2631 : from the destination of I3 and search for it in the source of I0.
2632 :
2633 : In the event that there's a match but the source/dest do not actually
2634 : refer to the same memory, the worst that happens is we try some
2635 : combinations that we wouldn't have otherwise. */
2636 19993560 : if ((set0 = single_set (i0))
2637 : /* Ensure the source of SET0 is a MEM, possibly buried inside
2638 : an extension. */
2639 19867300 : && (GET_CODE (SET_SRC (set0)) == MEM
2640 16691294 : || ((GET_CODE (SET_SRC (set0)) == ZERO_EXTEND
2641 16691294 : || GET_CODE (SET_SRC (set0)) == SIGN_EXTEND)
2642 549565 : && GET_CODE (XEXP (SET_SRC (set0), 0)) == MEM))
2643 3285638 : && (set3 = single_set (i3))
2644 : /* Ensure the destination of SET3 is a MEM. */
2645 2849430 : && GET_CODE (SET_DEST (set3)) == MEM
2646 : /* Would it be better to extract the base address for the MEM
2647 : in SET3 and look for that? I don't have cases where it matters
2648 : but I could envision such cases. */
2649 20291712 : && rtx_referenced_p (XEXP (SET_DEST (set3), 0), SET_SRC (set0)))
2650 21325 : ngood += 2;
2651 :
2652 19993560 : if (ngood < 2 && nshift < 2)
2653 : return 0;
2654 : }
2655 :
2656 : /* Exit early if one of the insns involved can't be used for
2657 : combinations. */
2658 78861340 : if (CALL_P (i2)
2659 73908695 : || (i1 && CALL_P (i1))
2660 70520938 : || (i0 && CALL_P (i0))
2661 70060146 : || cant_combine_insn_p (i3)
2662 66831984 : || cant_combine_insn_p (i2)
2663 51461221 : || (i1 && cant_combine_insn_p (i1))
2664 46613126 : || (i0 && cant_combine_insn_p (i0))
2665 125246845 : || likely_spilled_retval_p (i3))
2666 32475845 : return 0;
2667 :
2668 46385495 : combine_attempts++;
2669 46385495 : undobuf.other_insn = 0;
2670 :
2671 : /* Reset the hard register usage information. */
2672 46385495 : CLEAR_HARD_REG_SET (newpat_used_regs);
2673 :
2674 46385495 : if (dump_file && (dump_flags & TDF_DETAILS))
2675 : {
2676 174 : if (i0)
2677 20 : fprintf (dump_file, "\nTrying %d, %d, %d -> %d:\n",
2678 20 : INSN_UID (i0), INSN_UID (i1), INSN_UID (i2), INSN_UID (i3));
2679 154 : else if (i1)
2680 26 : fprintf (dump_file, "\nTrying %d, %d -> %d:\n",
2681 26 : INSN_UID (i1), INSN_UID (i2), INSN_UID (i3));
2682 : else
2683 128 : fprintf (dump_file, "\nTrying %d -> %d:\n",
2684 128 : INSN_UID (i2), INSN_UID (i3));
2685 :
2686 174 : if (i0)
2687 20 : dump_insn_slim (dump_file, i0);
2688 174 : if (i1)
2689 46 : dump_insn_slim (dump_file, i1);
2690 174 : dump_insn_slim (dump_file, i2);
2691 174 : dump_insn_slim (dump_file, i3);
2692 : }
2693 :
2694 : /* If multiple insns feed into one of I2 or I3, they can be in any
2695 : order. To simplify the code below, reorder them in sequence. */
2696 46385495 : if (i0 && DF_INSN_LUID (i0) > DF_INSN_LUID (i2))
2697 : std::swap (i0, i2);
2698 46385495 : if (i0 && DF_INSN_LUID (i0) > DF_INSN_LUID (i1))
2699 : std::swap (i0, i1);
2700 46385495 : if (i1 && DF_INSN_LUID (i1) > DF_INSN_LUID (i2))
2701 : std::swap (i1, i2);
2702 :
2703 46385495 : added_links_insn = 0;
2704 46385495 : added_notes_insn = 0;
2705 :
2706 : /* First check for one important special case that the code below will
2707 : not handle. Namely, the case where I1 is zero, I2 is a PARALLEL
2708 : and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
2709 : we may be able to replace that destination with the destination of I3.
2710 : This occurs in the common code where we compute both a quotient and
2711 : remainder into a structure, in which case we want to do the computation
2712 : directly into the structure to avoid register-register copies.
2713 :
2714 : Note that this case handles both multiple sets in I2 and also cases
2715 : where I2 has a number of CLOBBERs inside the PARALLEL.
2716 :
2717 : We make very conservative checks below and only try to handle the
2718 : most common cases of this. For example, we only handle the case
2719 : where I2 and I3 are adjacent to avoid making difficult register
2720 : usage tests. */
2721 :
2722 29010766 : if (i1 == 0 && NONJUMP_INSN_P (i3) && GET_CODE (PATTERN (i3)) == SET
2723 15171824 : && REG_P (SET_SRC (PATTERN (i3)))
2724 5155864 : && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
2725 4946752 : && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
2726 4070184 : && GET_CODE (PATTERN (i2)) == PARALLEL
2727 1064340 : && ! side_effects_p (SET_DEST (PATTERN (i3)))
2728 : /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
2729 : below would need to check what is inside (and reg_overlap_mentioned_p
2730 : doesn't support those codes anyway). Don't allow those destinations;
2731 : the resulting insn isn't likely to be recognized anyway. */
2732 574119 : && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
2733 574099 : && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
2734 572911 : && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
2735 572911 : SET_DEST (PATTERN (i3)))
2736 46958289 : && next_active_insn (i2) == i3)
2737 : {
2738 352797 : rtx p2 = PATTERN (i2);
2739 :
2740 : /* Make sure that the destination of I3,
2741 : which we are going to substitute into one output of I2,
2742 : is not used within another output of I2. We must avoid making this:
2743 : (parallel [(set (mem (reg 69)) ...)
2744 : (set (reg 69) ...)])
2745 : which is not well-defined as to order of actions.
2746 : (Besides, reload can't handle output reloads for this.)
2747 :
2748 : The problem can also happen if the dest of I3 is a memory ref,
2749 : if another dest in I2 is an indirect memory ref.
2750 :
2751 : Neither can this PARALLEL be an asm. We do not allow combining
2752 : that usually (see can_combine_p), so do not here either. */
2753 352797 : bool ok = true;
2754 1070389 : for (i = 0; ok && i < XVECLEN (p2, 0); i++)
2755 : {
2756 717592 : if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
2757 352314 : || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
2758 1433797 : && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
2759 716205 : SET_DEST (XVECEXP (p2, 0, i))))
2760 : ok = false;
2761 716821 : else if (GET_CODE (XVECEXP (p2, 0, i)) == SET
2762 364509 : && GET_CODE (SET_SRC (XVECEXP (p2, 0, i))) == ASM_OPERANDS)
2763 1896 : ok = false;
2764 : }
2765 :
2766 352797 : if (ok)
2767 416398 : for (i = 0; i < XVECLEN (p2, 0); i++)
2768 385170 : if (GET_CODE (XVECEXP (p2, 0, i)) == SET
2769 385170 : && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
2770 : {
2771 319673 : combine_merges++;
2772 :
2773 319673 : subst_insn = i3;
2774 319673 : subst_low_luid = DF_INSN_LUID (i2);
2775 :
2776 319673 : added_sets_2 = added_sets_1 = added_sets_0 = false;
2777 319673 : i2src = SET_SRC (XVECEXP (p2, 0, i));
2778 319673 : i2dest = SET_DEST (XVECEXP (p2, 0, i));
2779 319673 : i2dest_killed = dead_or_set_p (i2, i2dest);
2780 :
2781 : /* Replace the dest in I2 with our dest and make the resulting
2782 : insn the new pattern for I3. Then skip to where we validate
2783 : the pattern. Everything was set up above. */
2784 319673 : SUBST (SET_DEST (XVECEXP (p2, 0, i)), SET_DEST (PATTERN (i3)));
2785 319673 : newpat = p2;
2786 319673 : i3_subst_into_i2 = true;
2787 319673 : goto validate_replacement;
2788 : }
2789 : }
2790 :
2791 : /* If I2 is setting a pseudo to a constant and I3 is setting some
2792 : sub-part of it to another constant, merge them by making a new
2793 : constant. */
2794 46065822 : if (i1 == 0
2795 28691093 : && (temp_expr = single_set (i2)) != 0
2796 28418986 : && is_a <scalar_int_mode> (GET_MODE (SET_DEST (temp_expr)), &temp_mode)
2797 18708832 : && CONST_SCALAR_INT_P (SET_SRC (temp_expr))
2798 2773117 : && GET_CODE (PATTERN (i3)) == SET
2799 1372535 : && CONST_SCALAR_INT_P (SET_SRC (PATTERN (i3)))
2800 46084537 : && reg_subword_p (SET_DEST (PATTERN (i3)), SET_DEST (temp_expr)))
2801 : {
2802 18048 : rtx dest = SET_DEST (PATTERN (i3));
2803 18048 : rtx temp_dest = SET_DEST (temp_expr);
2804 18048 : int offset = -1;
2805 18048 : int width = 0;
2806 :
2807 18048 : if (GET_CODE (dest) == ZERO_EXTRACT)
2808 : {
2809 1 : if (CONST_INT_P (XEXP (dest, 1))
2810 1 : && CONST_INT_P (XEXP (dest, 2))
2811 2 : && is_a <scalar_int_mode> (GET_MODE (XEXP (dest, 0)),
2812 : &dest_mode))
2813 : {
2814 1 : width = INTVAL (XEXP (dest, 1));
2815 1 : offset = INTVAL (XEXP (dest, 2));
2816 1 : dest = XEXP (dest, 0);
2817 1 : if (BITS_BIG_ENDIAN)
2818 : offset = GET_MODE_PRECISION (dest_mode) - width - offset;
2819 : }
2820 : }
2821 : else
2822 : {
2823 18047 : if (GET_CODE (dest) == STRICT_LOW_PART)
2824 1881 : dest = XEXP (dest, 0);
2825 18047 : if (is_a <scalar_int_mode> (GET_MODE (dest), &dest_mode))
2826 : {
2827 18047 : width = GET_MODE_PRECISION (dest_mode);
2828 18047 : offset = 0;
2829 : }
2830 : }
2831 :
2832 18048 : if (offset >= 0)
2833 : {
2834 : /* If this is the low part, we're done. */
2835 18048 : if (subreg_lowpart_p (dest))
2836 : ;
2837 : /* Handle the case where inner is twice the size of outer. */
2838 4728 : else if (GET_MODE_PRECISION (temp_mode)
2839 4728 : == 2 * GET_MODE_PRECISION (dest_mode))
2840 4725 : offset += GET_MODE_PRECISION (dest_mode);
2841 : /* Otherwise give up for now. */
2842 : else
2843 : offset = -1;
2844 : }
2845 :
2846 18045 : if (offset >= 0)
2847 : {
2848 18045 : rtx inner = SET_SRC (PATTERN (i3));
2849 18045 : rtx outer = SET_SRC (temp_expr);
2850 :
2851 36090 : wide_int o = wi::insert (rtx_mode_t (outer, temp_mode),
2852 18045 : rtx_mode_t (inner, dest_mode),
2853 36090 : offset, width);
2854 :
2855 18045 : combine_merges++;
2856 18045 : subst_insn = i3;
2857 18045 : subst_low_luid = DF_INSN_LUID (i2);
2858 18045 : added_sets_2 = added_sets_1 = added_sets_0 = false;
2859 18045 : i2dest = temp_dest;
2860 18045 : i2dest_killed = dead_or_set_p (i2, i2dest);
2861 :
2862 : /* Replace the source in I2 with the new constant and make the
2863 : resulting insn the new pattern for I3. Then skip to where we
2864 : validate the pattern. Everything was set up above. */
2865 18045 : SUBST (SET_SRC (temp_expr),
2866 : immed_wide_int_const (o, temp_mode));
2867 :
2868 18045 : newpat = PATTERN (i2);
2869 :
2870 : /* The dest of I3 has been replaced with the dest of I2. */
2871 18045 : changed_i3_dest = true;
2872 18045 : goto validate_replacement;
2873 18045 : }
2874 : }
2875 :
2876 : /* If we have no I1 and I2 looks like:
2877 : (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
2878 : (set Y OP)])
2879 : make up a dummy I1 that is
2880 : (set Y OP)
2881 : and change I2 to be
2882 : (set (reg:CC X) (compare:CC Y (const_int 0)))
2883 :
2884 : (We can ignore any trailing CLOBBERs.)
2885 :
2886 : This undoes a previous combination and allows us to match a branch-and-
2887 : decrement insn. */
2888 :
2889 46047777 : if (i1 == 0
2890 28673048 : && is_parallel_of_n_reg_sets (PATTERN (i2), 2)
2891 221069 : && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
2892 : == MODE_CC)
2893 138714 : && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
2894 112503 : && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
2895 74828 : && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
2896 74828 : SET_SRC (XVECEXP (PATTERN (i2), 0, 1)))
2897 69941 : && !reg_used_between_p (SET_DEST (XVECEXP (PATTERN (i2), 0, 0)), i2, i3)
2898 46117718 : && !reg_used_between_p (SET_DEST (XVECEXP (PATTERN (i2), 0, 1)), i2, i3))
2899 : {
2900 : /* We make I1 with the same INSN_UID as I2. This gives it
2901 : the same DF_INSN_LUID for value tracking. Our fake I1 will
2902 : never appear in the insn stream so giving it the same INSN_UID
2903 : as I2 will not cause a problem. */
2904 :
2905 139446 : i1 = gen_rtx_INSN (VOIDmode, NULL, i2, BLOCK_FOR_INSN (i2),
2906 69723 : XVECEXP (PATTERN (i2), 0, 1), INSN_LOCATION (i2),
2907 : -1, NULL_RTX);
2908 69723 : INSN_UID (i1) = INSN_UID (i2);
2909 :
2910 69723 : SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
2911 69723 : SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
2912 : SET_DEST (PATTERN (i1)));
2913 69723 : unsigned int regno = REGNO (SET_DEST (PATTERN (i1)));
2914 69723 : SUBST_LINK (LOG_LINKS (i2),
2915 : alloc_insn_link (i1, regno, LOG_LINKS (i2)));
2916 : }
2917 :
2918 : /* If I2 is a PARALLEL of two SETs of REGs (and perhaps some CLOBBERs),
2919 : make those two SETs separate I1 and I2 insns, and make an I0 that is
2920 : the original I1. */
2921 46047777 : if (i0 == 0
2922 43535361 : && is_parallel_of_n_reg_sets (PATTERN (i2), 2)
2923 360039 : && can_split_parallel_of_n_reg_sets (i2, 2)
2924 306917 : && !reg_used_between_p (SET_DEST (XVECEXP (PATTERN (i2), 0, 0)), i2, i3)
2925 268569 : && !reg_used_between_p (SET_DEST (XVECEXP (PATTERN (i2), 0, 1)), i2, i3)
2926 251190 : && !reg_set_between_p (SET_DEST (XVECEXP (PATTERN (i2), 0, 0)), i2, i3)
2927 46298958 : && !reg_set_between_p (SET_DEST (XVECEXP (PATTERN (i2), 0, 1)), i2, i3))
2928 : {
2929 : /* If there is no I1, there is no I0 either. */
2930 251181 : i0 = i1;
2931 :
2932 : /* We make I1 with the same INSN_UID as I2. This gives it
2933 : the same DF_INSN_LUID for value tracking. Our fake I1 will
2934 : never appear in the insn stream so giving it the same INSN_UID
2935 : as I2 will not cause a problem. */
2936 :
2937 502362 : i1 = gen_rtx_INSN (VOIDmode, NULL, i2, BLOCK_FOR_INSN (i2),
2938 251181 : XVECEXP (PATTERN (i2), 0, 0), INSN_LOCATION (i2),
2939 : -1, NULL_RTX);
2940 251181 : INSN_UID (i1) = INSN_UID (i2);
2941 :
2942 251181 : SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 1));
2943 : }
2944 :
2945 : /* Verify that I2 and maybe I1 and I0 can be combined into I3. */
2946 46047777 : if (!can_combine_p (i2, i3, i0, i1, NULL, NULL, &i2dest, &i2src))
2947 : {
2948 11868656 : if (dump_file && (dump_flags & TDF_DETAILS))
2949 8 : fprintf (dump_file, "Can't combine i2 into i3\n");
2950 11868656 : undo_all ();
2951 11868656 : return 0;
2952 : }
2953 34179121 : if (i1 && !can_combine_p (i1, i3, i0, NULL, i2, NULL, &i1dest, &i1src))
2954 : {
2955 1357418 : if (dump_file && (dump_flags & TDF_DETAILS))
2956 0 : fprintf (dump_file, "Can't combine i1 into i3\n");
2957 1357418 : undo_all ();
2958 1357418 : return 0;
2959 : }
2960 32821703 : if (i0 && !can_combine_p (i0, i3, NULL, NULL, i1, i2, &i0dest, &i0src))
2961 : {
2962 226270 : if (dump_file && (dump_flags & TDF_DETAILS))
2963 0 : fprintf (dump_file, "Can't combine i0 into i3\n");
2964 226270 : undo_all ();
2965 226270 : return 0;
2966 : }
2967 :
2968 : /* With non-call exceptions we can end up trying to combine multiple
2969 : insns with possible EH side effects. Make sure we can combine
2970 : that to a single insn which means there must be at most one insn
2971 : in the combination with an EH side effect. */
2972 32595433 : if (cfun->can_throw_non_call_exceptions)
2973 : {
2974 6050512 : if (find_reg_note (i3, REG_EH_REGION, NULL_RTX)
2975 6026863 : || find_reg_note (i2, REG_EH_REGION, NULL_RTX)
2976 6026781 : || (i1 && find_reg_note (i1, REG_EH_REGION, NULL_RTX))
2977 12077292 : || (i0 && find_reg_note (i0, REG_EH_REGION, NULL_RTX)))
2978 : {
2979 23732 : has_non_call_exception = true;
2980 23732 : if (insn_could_throw_p (i3)
2981 23732 : + insn_could_throw_p (i2)
2982 23732 : + (i1 ? insn_could_throw_p (i1) : 0)
2983 23732 : + (i0 ? insn_could_throw_p (i0) : 0) > 1)
2984 : {
2985 172 : if (dump_file && (dump_flags & TDF_DETAILS))
2986 0 : fprintf (dump_file, "Can't combine multiple insns with EH "
2987 : "side-effects\n");
2988 172 : undo_all ();
2989 172 : return 0;
2990 : }
2991 : }
2992 : }
2993 :
2994 : /* Record whether i2 and i3 are trivial moves. */
2995 32595261 : i2_was_move = is_just_move (i2);
2996 32595261 : i3_was_move = is_just_move (i3);
2997 :
2998 : /* Record whether I2DEST is used in I2SRC and similarly for the other
2999 : cases. Knowing this will help in register status updating below. */
3000 32595261 : i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
3001 32595261 : i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
3002 10347571 : i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
3003 32595261 : i0dest_in_i0src = i0 && reg_overlap_mentioned_p (i0dest, i0src);
3004 1803534 : i1dest_in_i0src = i0 && reg_overlap_mentioned_p (i1dest, i0src);
3005 1803534 : i2dest_in_i0src = i0 && reg_overlap_mentioned_p (i2dest, i0src);
3006 32595261 : i2dest_killed = dead_or_set_p (i2, i2dest);
3007 32595261 : i1dest_killed = i1 && dead_or_set_p (i1, i1dest);
3008 32595261 : i0dest_killed = i0 && dead_or_set_p (i0, i0dest);
3009 :
3010 : /* For the earlier insns, determine which of the subsequent ones they
3011 : feed. */
3012 32595261 : i1_feeds_i2_n = i1 && insn_a_feeds_b (i1, i2);
3013 32595261 : i0_feeds_i1_n = i0 && insn_a_feeds_b (i0, i1);
3014 3148076 : i0_feeds_i2_n = (i0 && (!i0_feeds_i1_n ? insn_a_feeds_b (i0, i2)
3015 1344542 : : (!reg_overlap_mentioned_p (i1dest, i0dest)
3016 1321767 : && reg_overlap_mentioned_p (i0dest, i2src))));
3017 :
3018 : /* Ensure that I3's pattern can be the destination of combines. */
3019 32595261 : if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest, i0dest,
3020 32595261 : i1 && i2dest_in_i1src && !i1_feeds_i2_n,
3021 1803534 : i0 && ((i2dest_in_i0src && !i0_feeds_i2_n)
3022 1776744 : || (i1dest_in_i0src && !i0_feeds_i1_n)),
3023 : &i3dest_killed))
3024 : {
3025 172946 : undo_all ();
3026 172946 : return 0;
3027 : }
3028 :
3029 : /* See if any of the insns is a MULT operation. Unless one is, we will
3030 : reject a combination that is, since it must be slower. Be conservative
3031 : here. */
3032 32422315 : if (GET_CODE (i2src) == MULT
3033 31636035 : || (i1 != 0 && GET_CODE (i1src) == MULT)
3034 31302958 : || (i0 != 0 && GET_CODE (i0src) == MULT)
3035 63681379 : || (GET_CODE (PATTERN (i3)) == SET
3036 24628032 : && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
3037 : have_mult = true;
3038 :
3039 : /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
3040 : We used to do this EXCEPT in one case: I3 has a post-inc in an
3041 : output operand. However, that exception can give rise to insns like
3042 : mov r3,(r3)+
3043 : which is a famous insn on the PDP-11 where the value of r3 used as the
3044 : source was model-dependent. Avoid this sort of thing. */
3045 :
3046 : #if 0
3047 : if (!(GET_CODE (PATTERN (i3)) == SET
3048 : && REG_P (SET_SRC (PATTERN (i3)))
3049 : && MEM_P (SET_DEST (PATTERN (i3)))
3050 : && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
3051 : || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
3052 : /* It's not the exception. */
3053 : #endif
3054 32422315 : if (AUTO_INC_DEC)
3055 : {
3056 : rtx link;
3057 : for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
3058 : if (REG_NOTE_KIND (link) == REG_INC
3059 : && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
3060 : || (i1 != 0
3061 : && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
3062 : {
3063 : undo_all ();
3064 : return 0;
3065 : }
3066 : }
3067 :
3068 : /* See if the SETs in I1 or I2 need to be kept around in the merged
3069 : instruction: whenever the value set there is still needed past I3.
3070 : For the SET in I2, this is easy: we see if I2DEST dies or is set in I3.
3071 :
3072 : For the SET in I1, we have two cases: if I1 and I2 independently feed
3073 : into I3, the set in I1 needs to be kept around unless I1DEST dies
3074 : or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
3075 : in I1 needs to be kept around unless I1DEST dies or is set in either
3076 : I2 or I3. The same considerations apply to I0. */
3077 :
3078 32422315 : added_sets_2 = !dead_or_set_p (i3, i2dest);
3079 :
3080 32422315 : if (i1)
3081 10294361 : added_sets_1 = !(dead_or_set_p (i3, i1dest)
3082 7902728 : || (i1_feeds_i2_n && dead_or_set_p (i2, i1dest)));
3083 : else
3084 : added_sets_1 = false;
3085 :
3086 32422315 : if (i0)
3087 2630233 : added_sets_0 = !(dead_or_set_p (i3, i0dest)
3088 1611217 : || (i0_feeds_i1_n && dead_or_set_p (i1, i0dest))
3089 338215 : || ((i0_feeds_i2_n || (i0_feeds_i1_n && i1_feeds_i2_n))
3090 791308 : && dead_or_set_p (i2, i0dest)));
3091 : else
3092 : added_sets_0 = false;
3093 :
3094 : /* We are about to copy insns for the case where they need to be kept
3095 : around. Check that they can be copied in the merged instruction. */
3096 :
3097 32422315 : if (targetm.cannot_copy_insn_p
3098 32422315 : && ((added_sets_2 && targetm.cannot_copy_insn_p (i2))
3099 0 : || (i1 && added_sets_1 && targetm.cannot_copy_insn_p (i1))
3100 0 : || (i0 && added_sets_0 && targetm.cannot_copy_insn_p (i0))))
3101 : {
3102 0 : undo_all ();
3103 0 : return 0;
3104 : }
3105 :
3106 : /* We cannot safely duplicate volatile references in any case. */
3107 :
3108 7242896 : if ((added_sets_2 && volatile_refs_p (PATTERN (i2)))
3109 32385865 : || (added_sets_1 && volatile_refs_p (PATTERN (i1)))
3110 64779849 : || (added_sets_0 && volatile_refs_p (PATTERN (i0))))
3111 : {
3112 67086 : undo_all ();
3113 67086 : return 0;
3114 : }
3115 :
3116 : /* Count how many auto_inc expressions there were in the original insns;
3117 : we need to have the same number in the resulting patterns. */
3118 :
3119 32355229 : if (i0)
3120 1772438 : for_each_inc_dec (PATTERN (i0), count_auto_inc, &n_auto_inc);
3121 32355229 : if (i1)
3122 10262247 : for_each_inc_dec (PATTERN (i1), count_auto_inc, &n_auto_inc);
3123 32355229 : for_each_inc_dec (PATTERN (i2), count_auto_inc, &n_auto_inc);
3124 32355229 : for_each_inc_dec (PATTERN (i3), count_auto_inc, &n_auto_inc);
3125 :
3126 : /* If the set in I2 needs to be kept around, we must make a copy of
3127 : PATTERN (I2), so that when we substitute I1SRC for I1DEST in
3128 : PATTERN (I2), we are only substituting for the original I1DEST, not into
3129 : an already-substituted copy. This also prevents making self-referential
3130 : rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
3131 : I2DEST. */
3132 :
3133 32355229 : if (added_sets_2)
3134 : {
3135 7203560 : if (GET_CODE (PATTERN (i2)) == PARALLEL)
3136 2223127 : i2pat = gen_rtx_SET (i2dest, copy_rtx (i2src));
3137 : else
3138 4980433 : i2pat = copy_rtx (PATTERN (i2));
3139 : }
3140 :
3141 32355229 : if (added_sets_1)
3142 : {
3143 3950003 : if (GET_CODE (PATTERN (i1)) == PARALLEL)
3144 1273495 : i1pat = gen_rtx_SET (i1dest, copy_rtx (i1src));
3145 : else
3146 2676508 : i1pat = copy_rtx (PATTERN (i1));
3147 : }
3148 :
3149 32355229 : if (added_sets_0)
3150 : {
3151 498560 : if (GET_CODE (PATTERN (i0)) == PARALLEL)
3152 183308 : i0pat = gen_rtx_SET (i0dest, copy_rtx (i0src));
3153 : else
3154 315252 : i0pat = copy_rtx (PATTERN (i0));
3155 : }
3156 :
3157 32355229 : combine_merges++;
3158 :
3159 : /* Substitute in the latest insn for the regs set by the earlier ones. */
3160 :
3161 32355229 : maxreg = max_reg_num ();
3162 :
3163 32355229 : subst_insn = i3;
3164 :
3165 : /* Many machines have insns that can both perform an
3166 : arithmetic operation and set the condition code. These operations will
3167 : be represented as a PARALLEL with the first element of the vector
3168 : being a COMPARE of an arithmetic operation with the constant zero.
3169 : The second element of the vector will set some pseudo to the result
3170 : of the same arithmetic operation. If we simplify the COMPARE, we won't
3171 : match such a pattern and so will generate an extra insn. Here we test
3172 : for this case, where both the comparison and the operation result are
3173 : needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
3174 : I2SRC. Later we will make the PARALLEL that contains I2. */
3175 :
3176 22092982 : if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
3177 4249201 : && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
3178 1805321 : && CONST_INT_P (XEXP (SET_SRC (PATTERN (i3)), 1))
3179 33230040 : && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
3180 : {
3181 805852 : rtx newpat_dest;
3182 805852 : rtx *cc_use_loc = NULL;
3183 805852 : rtx_insn *cc_use_insn = NULL;
3184 805852 : rtx op0 = i2src, op1 = XEXP (SET_SRC (PATTERN (i3)), 1);
3185 805852 : machine_mode compare_mode, orig_compare_mode;
3186 805852 : enum rtx_code compare_code = UNKNOWN, orig_compare_code = UNKNOWN;
3187 805852 : scalar_int_mode mode;
3188 :
3189 805852 : newpat = PATTERN (i3);
3190 805852 : newpat_dest = SET_DEST (newpat);
3191 805852 : compare_mode = orig_compare_mode = GET_MODE (newpat_dest);
3192 :
3193 805852 : if (undobuf.other_insn == 0
3194 805852 : && (cc_use_loc = find_single_use (SET_DEST (newpat), i3,
3195 : &cc_use_insn)))
3196 : {
3197 799763 : compare_code = orig_compare_code = GET_CODE (*cc_use_loc);
3198 799763 : if (is_a <scalar_int_mode> (GET_MODE (i2dest), &mode))
3199 799763 : compare_code = simplify_compare_const (compare_code, mode,
3200 : &op0, &op1);
3201 799763 : target_canonicalize_comparison (&compare_code, &op0, &op1, 1);
3202 : }
3203 :
3204 : /* Do the rest only if op1 is const0_rtx, which may be the
3205 : result of simplification. */
3206 805852 : if (op1 == const0_rtx)
3207 : {
3208 : /* If a single use of the CC is found, prepare to modify it
3209 : when SELECT_CC_MODE returns a new CC-class mode, or when
3210 : the above simplify_compare_const() returned a new comparison
3211 : operator. undobuf.other_insn is assigned the CC use insn
3212 : when modifying it. */
3213 504138 : if (cc_use_loc)
3214 : {
3215 : #ifdef SELECT_CC_MODE
3216 501486 : machine_mode new_mode
3217 501486 : = SELECT_CC_MODE (compare_code, op0, op1);
3218 501486 : if (new_mode != orig_compare_mode
3219 501486 : && can_change_dest_mode (SET_DEST (newpat),
3220 : added_sets_2, new_mode))
3221 : {
3222 407 : unsigned int regno = REGNO (newpat_dest);
3223 407 : compare_mode = new_mode;
3224 407 : if (regno < FIRST_PSEUDO_REGISTER)
3225 407 : newpat_dest = gen_rtx_REG (compare_mode, regno);
3226 : else
3227 : {
3228 0 : subst_mode (regno, compare_mode);
3229 0 : newpat_dest = regno_reg_rtx[regno];
3230 : }
3231 : }
3232 : #endif
3233 : /* Cases for modifying the CC-using comparison. */
3234 501486 : if (compare_code != orig_compare_code
3235 419 : && COMPARISON_P (*cc_use_loc))
3236 : {
3237 : /* Replace cc_use_loc with entire new RTX. */
3238 419 : SUBST (*cc_use_loc,
3239 : gen_rtx_fmt_ee (compare_code, GET_MODE (*cc_use_loc),
3240 : newpat_dest, const0_rtx));
3241 419 : undobuf.other_insn = cc_use_insn;
3242 : }
3243 501067 : else if (compare_mode != orig_compare_mode)
3244 : {
3245 1 : subrtx_ptr_iterator::array_type array;
3246 :
3247 : /* Just replace the CC reg with a new mode. */
3248 4 : FOR_EACH_SUBRTX_PTR (iter, array, cc_use_loc, NONCONST)
3249 : {
3250 3 : rtx *loc = *iter;
3251 3 : if (REG_P (*loc)
3252 3 : && REGNO (*loc) == REGNO (newpat_dest))
3253 : {
3254 1 : SUBST (*loc, newpat_dest);
3255 1 : iter.skip_subrtxes ();
3256 : }
3257 : }
3258 1 : undobuf.other_insn = cc_use_insn;
3259 1 : }
3260 : }
3261 :
3262 : /* Now we modify the current newpat:
3263 : First, SET_DEST(newpat) is updated if the CC mode has been
3264 : altered. For targets without SELECT_CC_MODE, this should be
3265 : optimized away. */
3266 504138 : if (compare_mode != orig_compare_mode)
3267 407 : SUBST (SET_DEST (newpat), newpat_dest);
3268 : /* This is always done to propagate i2src into newpat. */
3269 504138 : SUBST (SET_SRC (newpat),
3270 : gen_rtx_COMPARE (compare_mode, op0, op1));
3271 : /* Create new version of i2pat if needed; the below PARALLEL
3272 : creation needs this to work correctly. */
3273 504138 : if (! rtx_equal_p (i2src, op0))
3274 31 : i2pat = gen_rtx_SET (i2dest, op0);
3275 504138 : i2_is_used = 1;
3276 : }
3277 : }
3278 :
3279 805852 : if (i2_is_used == 0)
3280 : {
3281 : /* It is possible that the source of I2 or I1 may be performing
3282 : an unneeded operation, such as a ZERO_EXTEND of something
3283 : that is known to have the high part zero. Handle that case
3284 : by letting subst look at the inner insns.
3285 :
3286 : Another way to do this would be to have a function that tries
3287 : to simplify a single insn instead of merging two or more
3288 : insns. We don't do this because of the potential of infinite
3289 : loops and because of the potential extra memory required.
3290 : However, doing it the way we are is a bit of a kludge and
3291 : doesn't catch all cases.
3292 :
3293 : But only do this if -fexpensive-optimizations since it slows
3294 : things down and doesn't usually win.
3295 :
3296 : This is not done in the COMPARE case above because the
3297 : unmodified I2PAT is used in the PARALLEL and so a pattern
3298 : with a modified I2SRC would not match. */
3299 :
3300 31851091 : if (flag_expensive_optimizations)
3301 : {
3302 : /* Pass pc_rtx so no substitutions are done, just
3303 : simplifications. */
3304 29652946 : if (i1)
3305 : {
3306 9617293 : subst_low_luid = DF_INSN_LUID (i1);
3307 9617293 : i1src = subst (i1src, pc_rtx, pc_rtx, false, false, false);
3308 : }
3309 :
3310 29652946 : subst_low_luid = DF_INSN_LUID (i2);
3311 29652946 : i2src = subst (i2src, pc_rtx, pc_rtx, false, false, false);
3312 : }
3313 :
3314 31851091 : n_occurrences = 0; /* `subst' counts here */
3315 31851091 : subst_low_luid = DF_INSN_LUID (i2);
3316 :
3317 : /* If I1 feeds into I2 and I1DEST is in I1SRC, we need to make a unique
3318 : copy of I2SRC each time we substitute it, in order to avoid creating
3319 : self-referential RTL when we will be substituting I1SRC for I1DEST
3320 : later. Likewise if I0 feeds into I2, either directly or indirectly
3321 : through I1, and I0DEST is in I0SRC. */
3322 31851091 : newpat = subst (PATTERN (i3), i2dest, i2src, false, false,
3323 31851091 : (i1_feeds_i2_n && i1dest_in_i1src)
3324 31851091 : || ((i0_feeds_i2_n || (i0_feeds_i1_n && i1_feeds_i2_n))
3325 : && i0dest_in_i0src));
3326 31851091 : substed_i2 = true;
3327 :
3328 : /* Record whether I2's body now appears within I3's body. */
3329 31851091 : i2_is_used = n_occurrences;
3330 : }
3331 :
3332 : /* If we already got a failure, don't try to do more. Otherwise, try to
3333 : substitute I1 if we have it. */
3334 :
3335 32355229 : if (i1 && GET_CODE (newpat) != CLOBBER)
3336 : {
3337 : /* Before we can do this substitution, we must redo the test done
3338 : above (see detailed comments there) that ensures I1DEST isn't
3339 : mentioned in any SETs in NEWPAT that are field assignments. */
3340 10219094 : if (!combinable_i3pat (NULL, &newpat, i1dest, NULL_RTX, NULL_RTX,
3341 : false, false, 0))
3342 : {
3343 38 : undo_all ();
3344 38 : return 0;
3345 : }
3346 :
3347 10219056 : n_occurrences = 0;
3348 10219056 : subst_low_luid = DF_INSN_LUID (i1);
3349 :
3350 : /* If the following substitution will modify I1SRC, make a copy of it
3351 : for the case where it is substituted for I1DEST in I2PAT later. */
3352 10219056 : if (added_sets_2 && i1_feeds_i2_n)
3353 1412738 : i1src_copy = copy_rtx (i1src);
3354 :
3355 : /* If I0 feeds into I1 and I0DEST is in I0SRC, we need to make a unique
3356 : copy of I1SRC each time we substitute it, in order to avoid creating
3357 : self-referential RTL when we will be substituting I0SRC for I0DEST
3358 : later. */
3359 20438112 : newpat = subst (newpat, i1dest, i1src, false, false,
3360 10219056 : i0_feeds_i1_n && i0dest_in_i0src);
3361 10219056 : substed_i1 = true;
3362 :
3363 : /* Record whether I1's body now appears within I3's body. */
3364 10219056 : i1_is_used = n_occurrences;
3365 : }
3366 :
3367 : /* Likewise for I0 if we have it. */
3368 :
3369 32355191 : if (i0 && GET_CODE (newpat) != CLOBBER)
3370 : {
3371 1754639 : if (!combinable_i3pat (NULL, &newpat, i0dest, NULL_RTX, NULL_RTX,
3372 : false, false, 0))
3373 : {
3374 2 : undo_all ();
3375 2 : return 0;
3376 : }
3377 :
3378 : /* If the following substitution will modify I0SRC, make a copy of it
3379 : for the case where it is substituted for I0DEST in I1PAT later. */
3380 1754637 : if (added_sets_1 && i0_feeds_i1_n)
3381 351624 : i0src_copy = copy_rtx (i0src);
3382 : /* And a copy for I0DEST in I2PAT substitution. */
3383 1754637 : if (added_sets_2 && ((i0_feeds_i1_n && i1_feeds_i2_n)
3384 197868 : || (i0_feeds_i2_n)))
3385 312181 : i0src_copy2 = copy_rtx (i0src);
3386 :
3387 1754637 : n_occurrences = 0;
3388 1754637 : subst_low_luid = DF_INSN_LUID (i0);
3389 1754637 : newpat = subst (newpat, i0dest, i0src, false, false, false);
3390 1754637 : substed_i0 = true;
3391 : }
3392 :
3393 32355189 : if (n_auto_inc)
3394 : {
3395 517590 : int new_n_auto_inc = 0;
3396 517590 : for_each_inc_dec (newpat, count_auto_inc, &new_n_auto_inc);
3397 :
3398 517590 : if (n_auto_inc != new_n_auto_inc)
3399 : {
3400 1135 : if (dump_file && (dump_flags & TDF_DETAILS))
3401 0 : fprintf (dump_file, "Number of auto_inc expressions changed\n");
3402 1135 : undo_all ();
3403 1135 : return 0;
3404 : }
3405 : }
3406 :
3407 : /* Fail if an autoincrement side-effect has been duplicated. Be careful
3408 : to count all the ways that I2SRC and I1SRC can be used. */
3409 32354054 : if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
3410 : && i2_is_used + added_sets_2 > 1)
3411 : || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
3412 : && (i1_is_used + added_sets_1 + (added_sets_2 && i1_feeds_i2_n) > 1))
3413 : || (i0 != 0 && FIND_REG_INC_NOTE (i0, NULL_RTX) != 0
3414 : && (n_occurrences + added_sets_0
3415 : + (added_sets_1 && i0_feeds_i1_n)
3416 : + (added_sets_2 && i0_feeds_i2_n) > 1))
3417 : /* Fail if we tried to make a new register. */
3418 32354054 : || max_reg_num () != maxreg
3419 : /* Fail if we couldn't do something and have a CLOBBER. */
3420 32354054 : || GET_CODE (newpat) == CLOBBER
3421 : /* Fail if this new pattern is a MULT and we didn't have one before
3422 : at the outer level. */
3423 64397070 : || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
3424 278081 : && ! have_mult))
3425 : {
3426 342015 : undo_all ();
3427 342015 : return 0;
3428 : }
3429 :
3430 : /* If the actions of the earlier insns must be kept
3431 : in addition to substituting them into the latest one,
3432 : we must make a new PARALLEL for the latest insn
3433 : to hold additional the SETs. */
3434 :
3435 32012039 : if (added_sets_0 || added_sets_1 || added_sets_2)
3436 : {
3437 10647811 : int extra_sets = added_sets_0 + added_sets_1 + added_sets_2;
3438 10647811 : combine_extras++;
3439 :
3440 10647811 : if (GET_CODE (newpat) == PARALLEL)
3441 : {
3442 2060008 : rtvec old = XVEC (newpat, 0);
3443 2060008 : total_sets = XVECLEN (newpat, 0) + extra_sets;
3444 2060008 : newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
3445 2060008 : memcpy (XVEC (newpat, 0)->elem, &old->elem[0],
3446 2060008 : sizeof (old->elem[0]) * old->num_elem);
3447 : }
3448 : else
3449 : {
3450 8587803 : rtx old = newpat;
3451 8587803 : total_sets = 1 + extra_sets;
3452 8587803 : newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
3453 8587803 : XVECEXP (newpat, 0, 0) = old;
3454 : }
3455 :
3456 10647811 : if (added_sets_0)
3457 483415 : XVECEXP (newpat, 0, --total_sets) = i0pat;
3458 :
3459 10647811 : if (added_sets_1)
3460 : {
3461 3907127 : rtx t = i1pat;
3462 3907127 : if (i0_feeds_i1_n)
3463 349854 : t = subst (t, i0dest, i0src_copy ? i0src_copy : i0src,
3464 : false, false, false);
3465 :
3466 3907127 : XVECEXP (newpat, 0, --total_sets) = t;
3467 : }
3468 10647811 : if (added_sets_2)
3469 : {
3470 7157611 : rtx t = i2pat;
3471 7157611 : if (i1_feeds_i2_n)
3472 1401415 : t = subst (t, i1dest, i1src_copy ? i1src_copy : i1src, false, false,
3473 1401415 : i0_feeds_i1_n && i0dest_in_i0src);
3474 7157611 : if ((i0_feeds_i1_n && i1_feeds_i2_n) || i0_feeds_i2_n)
3475 311222 : t = subst (t, i0dest, i0src_copy2 ? i0src_copy2 : i0src,
3476 : false, false, false);
3477 :
3478 7157611 : XVECEXP (newpat, 0, --total_sets) = t;
3479 : }
3480 : }
3481 :
3482 24854428 : validate_replacement:
3483 :
3484 : /* Note which hard regs this insn has as inputs. */
3485 32349757 : mark_used_regs_combine (newpat);
3486 :
3487 : /* If recog_for_combine fails, it strips existing clobbers. If we'll
3488 : consider splitting this pattern, we might need these clobbers. */
3489 32349757 : if (i1 && GET_CODE (newpat) == PARALLEL
3490 7068814 : && GET_CODE (XVECEXP (newpat, 0, XVECLEN (newpat, 0) - 1)) == CLOBBER)
3491 : {
3492 1615059 : int len = XVECLEN (newpat, 0);
3493 :
3494 1615059 : newpat_vec_with_clobbers = rtvec_alloc (len);
3495 6510786 : for (i = 0; i < len; i++)
3496 3280668 : RTVEC_ELT (newpat_vec_with_clobbers, i) = XVECEXP (newpat, 0, i);
3497 : }
3498 :
3499 : /* We have recognized nothing yet. */
3500 32349757 : insn_code_number = -1;
3501 :
3502 : /* See if this is a PARALLEL of two SETs where one SET's destination is
3503 : a register that is unused and this isn't marked as an instruction that
3504 : might trap in an EH region. In that case, we just need the other SET.
3505 : We prefer this over the PARALLEL.
3506 :
3507 : This can occur when simplifying a divmod insn. We *must* test for this
3508 : case here because the code below that splits two independent SETs doesn't
3509 : handle this case correctly when it updates the register status.
3510 :
3511 : It's pointless doing this if we originally had two sets, one from
3512 : i3, and one from i2. Combining then splitting the parallel results
3513 : in the original i2 again plus an invalid insn (which we delete).
3514 : The net effect is only to move instructions around, which makes
3515 : debug info less accurate.
3516 :
3517 : If the remaining SET came from I2 its destination should not be used
3518 : between I2 and I3. See PR82024. */
3519 :
3520 7157611 : if (!(added_sets_2 && i1 == 0)
3521 27090512 : && is_parallel_of_n_reg_sets (newpat, 2)
3522 33879320 : && asm_noperands (newpat) < 0)
3523 : {
3524 1528676 : rtx set0 = XVECEXP (newpat, 0, 0);
3525 1528676 : rtx set1 = XVECEXP (newpat, 0, 1);
3526 1528676 : rtx oldpat = newpat;
3527 :
3528 1528676 : if (((REG_P (SET_DEST (set1))
3529 1528676 : && find_reg_note (i3, REG_UNUSED, SET_DEST (set1)))
3530 1487844 : || (GET_CODE (SET_DEST (set1)) == SUBREG
3531 0 : && find_reg_note (i3, REG_UNUSED, SUBREG_REG (SET_DEST (set1)))))
3532 40832 : && insn_nothrow_p (i3)
3533 1568245 : && !side_effects_p (SET_SRC (set1)))
3534 : {
3535 39302 : newpat = set0;
3536 39302 : insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3537 : }
3538 :
3539 1489374 : else if (((REG_P (SET_DEST (set0))
3540 1489374 : && find_reg_note (i3, REG_UNUSED, SET_DEST (set0)))
3541 1465206 : || (GET_CODE (SET_DEST (set0)) == SUBREG
3542 0 : && find_reg_note (i3, REG_UNUSED,
3543 0 : SUBREG_REG (SET_DEST (set0)))))
3544 24168 : && insn_nothrow_p (i3)
3545 1512962 : && !side_effects_p (SET_SRC (set0)))
3546 : {
3547 23545 : rtx dest = SET_DEST (set1);
3548 23545 : if (GET_CODE (dest) == SUBREG)
3549 0 : dest = SUBREG_REG (dest);
3550 23545 : if (!reg_used_between_p (dest, i2, i3))
3551 : {
3552 23544 : newpat = set1;
3553 23544 : insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3554 :
3555 23544 : if (insn_code_number >= 0)
3556 : changed_i3_dest = true;
3557 : }
3558 : }
3559 :
3560 39302 : if (insn_code_number < 0)
3561 1523102 : newpat = oldpat;
3562 : }
3563 :
3564 : /* Is the result of combination a valid instruction? */
3565 1523102 : if (insn_code_number < 0)
3566 32344183 : insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3567 :
3568 : /* If we were combining three insns and the result is a simple SET
3569 : with no ASM_OPERANDS that wasn't recognized, try to split it into two
3570 : insns. There are two ways to do this. It can be split using a
3571 : machine-specific method (like when you have an addition of a large
3572 : constant) or by combine in the function find_split_point. */
3573 :
3574 10075551 : if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
3575 36884334 : && asm_noperands (newpat) < 0)
3576 : {
3577 4534100 : rtx parallel, *split;
3578 4534100 : rtx_insn *m_split_insn;
3579 4534100 : unsigned int old_nregs, new_nregs;
3580 :
3581 : /* See if the MD file can split NEWPAT. If it can't, see if letting it
3582 : use I2DEST as a scratch register will help. In the latter case,
3583 : convert I2DEST to the mode of the source of NEWPAT if we can. */
3584 :
3585 4534100 : m_split_insn = combine_split_insns (newpat, i3, &old_nregs, &new_nregs);
3586 :
3587 : /* We can only use I2DEST as a scratch reg if it doesn't overlap any
3588 : inputs of NEWPAT. */
3589 :
3590 : /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
3591 : possible to try that as a scratch reg. This would require adding
3592 : more code to make it work though. */
3593 :
3594 4534100 : if (m_split_insn == 0 && ! reg_overlap_mentioned_p (i2dest, newpat))
3595 : {
3596 4401499 : machine_mode new_mode = GET_MODE (SET_DEST (newpat));
3597 :
3598 : /* ??? Reusing i2dest without resetting the reg_stat entry for it
3599 : (temporarily, until we are committed to this instruction
3600 : combination) does not work: for example, any call to nonzero_bits
3601 : on the register (from a splitter in the MD file, for example)
3602 : will get the old information, which is invalid.
3603 :
3604 : Since nowadays we can create registers during combine just fine,
3605 : we should just create a new one here, not reuse i2dest. */
3606 :
3607 : /* First try to split using the original register as a
3608 : scratch register. */
3609 4401499 : parallel = gen_rtx_PARALLEL (VOIDmode,
3610 : gen_rtvec (2, newpat,
3611 : gen_rtx_CLOBBER (VOIDmode,
3612 : i2dest)));
3613 4401499 : m_split_insn = combine_split_insns (parallel, i3, &old_nregs, &new_nregs);
3614 :
3615 : /* If that didn't work, try changing the mode of I2DEST if
3616 : we can. */
3617 4401499 : if (m_split_insn == 0
3618 4401499 : && new_mode != GET_MODE (i2dest)
3619 1689213 : && new_mode != VOIDmode
3620 5540972 : && can_change_dest_mode (i2dest, added_sets_2, new_mode))
3621 : {
3622 854424 : machine_mode old_mode = GET_MODE (i2dest);
3623 854424 : rtx ni2dest;
3624 :
3625 854424 : if (REGNO (i2dest) < FIRST_PSEUDO_REGISTER)
3626 8605 : ni2dest = gen_rtx_REG (new_mode, REGNO (i2dest));
3627 : else
3628 : {
3629 845819 : subst_mode (REGNO (i2dest), new_mode);
3630 845819 : ni2dest = regno_reg_rtx[REGNO (i2dest)];
3631 : }
3632 :
3633 854424 : parallel = (gen_rtx_PARALLEL
3634 : (VOIDmode,
3635 : gen_rtvec (2, newpat,
3636 : gen_rtx_CLOBBER (VOIDmode,
3637 : ni2dest))));
3638 854424 : m_split_insn = combine_split_insns (parallel, i3, &old_nregs, &new_nregs);
3639 :
3640 854424 : if (m_split_insn == 0
3641 854424 : && REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
3642 : {
3643 845819 : struct undo *buf;
3644 :
3645 845819 : adjust_reg_mode (regno_reg_rtx[REGNO (i2dest)], old_mode);
3646 845819 : buf = undobuf.undos;
3647 845819 : undobuf.undos = buf->next;
3648 845819 : buf->next = undobuf.frees;
3649 845819 : undobuf.frees = buf;
3650 : }
3651 : }
3652 :
3653 4401499 : i2scratch = m_split_insn != 0;
3654 : }
3655 :
3656 : /* If recog_for_combine has discarded clobbers, try to use them
3657 : again for the split. */
3658 4534100 : if (m_split_insn == 0 && newpat_vec_with_clobbers)
3659 : {
3660 1563688 : parallel = gen_rtx_PARALLEL (VOIDmode, newpat_vec_with_clobbers);
3661 1563688 : m_split_insn = combine_split_insns (parallel, i3, &old_nregs, &new_nregs);
3662 : }
3663 :
3664 4544586 : if (m_split_insn && NEXT_INSN (m_split_insn) == NULL_RTX)
3665 : {
3666 1517 : rtx m_split_pat = PATTERN (m_split_insn);
3667 1517 : insn_code_number = recog_for_combine (&m_split_pat, i3, &new_i3_notes,
3668 : old_nregs, new_nregs);
3669 1517 : if (insn_code_number >= 0)
3670 198 : newpat = m_split_pat;
3671 : }
3672 8969 : else if (m_split_insn && NEXT_INSN (NEXT_INSN (m_split_insn)) == NULL_RTX
3673 4541552 : && (next_nonnote_nondebug_insn (i2) == i3
3674 6 : || !modified_between_p (PATTERN (m_split_insn), i2, i3)))
3675 : {
3676 8969 : rtx i2set, i3set;
3677 8969 : rtx newi3pat = PATTERN (NEXT_INSN (m_split_insn));
3678 8969 : newi2pat = PATTERN (m_split_insn);
3679 :
3680 8969 : i3set = single_set (NEXT_INSN (m_split_insn));
3681 8969 : i2set = single_set (m_split_insn);
3682 :
3683 8969 : i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
3684 :
3685 : /* If I2 or I3 has multiple SETs, we won't know how to track
3686 : register status, so don't use these insns. If I2's destination
3687 : is used between I2 and I3, we also can't use these insns. */
3688 :
3689 8969 : if (i2_code_number >= 0 && i2set && i3set
3690 17938 : && (next_nonnote_nondebug_insn (i2) == i3
3691 6 : || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
3692 8969 : insn_code_number = recog_for_combine (&newi3pat, i3,
3693 : &new_i3_notes,
3694 : old_nregs, new_nregs);
3695 8969 : if (insn_code_number >= 0)
3696 8969 : newpat = newi3pat;
3697 :
3698 : /* It is possible that both insns now set the destination of I3.
3699 : If so, we must show an extra use of it. */
3700 :
3701 8969 : if (insn_code_number >= 0)
3702 : {
3703 8969 : rtx new_i3_dest = SET_DEST (i3set);
3704 8969 : rtx new_i2_dest = SET_DEST (i2set);
3705 :
3706 8969 : while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
3707 9009 : || GET_CODE (new_i3_dest) == STRICT_LOW_PART
3708 18000 : || GET_CODE (new_i3_dest) == SUBREG)
3709 40 : new_i3_dest = XEXP (new_i3_dest, 0);
3710 :
3711 8969 : while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
3712 8969 : || GET_CODE (new_i2_dest) == STRICT_LOW_PART
3713 17938 : || GET_CODE (new_i2_dest) == SUBREG)
3714 0 : new_i2_dest = XEXP (new_i2_dest, 0);
3715 :
3716 8969 : if (REG_P (new_i3_dest)
3717 5079 : && REG_P (new_i2_dest)
3718 5079 : && REGNO (new_i3_dest) == REGNO (new_i2_dest)
3719 8969 : && REGNO (new_i2_dest) < reg_n_sets_max)
3720 0 : INC_REG_N_SETS (REGNO (new_i2_dest), 1);
3721 : }
3722 : }
3723 :
3724 : /* If we can split it and use I2DEST, go ahead and see if that
3725 : helps things be recognized. Verify that none of the registers
3726 : are set between I2 and I3. */
3727 1319 : if (insn_code_number < 0
3728 4524933 : && (split = find_split_point (&newpat, i3, false)) != 0
3729 : /* We need I2DEST in the proper mode. If it is a hard register
3730 : or the only use of a pseudo, we can change its mode.
3731 : Make sure we don't change a hard register to have a mode that
3732 : isn't valid for it, or change the number of registers. */
3733 4268810 : && (GET_MODE (*split) == GET_MODE (i2dest)
3734 1603015 : || GET_MODE (*split) == VOIDmode
3735 1257470 : || can_change_dest_mode (i2dest, added_sets_2,
3736 : GET_MODE (*split)))
3737 3585404 : && (next_nonnote_nondebug_insn (i2) == i3
3738 570934 : || !modified_between_p (*split, i2, i3))
3739 : /* We can't overwrite I2DEST if its value is still used by
3740 : NEWPAT. */
3741 3557235 : && ! reg_referenced_p (i2dest, newpat)
3742 : /* We should not split a possibly trapping part when we
3743 : care about non-call EH and have REG_EH_REGION notes
3744 : to distribute. */
3745 8020133 : && ! (cfun->can_throw_non_call_exceptions
3746 377754 : && has_non_call_exception
3747 117 : && may_trap_p (*split)))
3748 : {
3749 3487235 : rtx newdest = i2dest;
3750 3487235 : enum rtx_code split_code = GET_CODE (*split);
3751 3487235 : machine_mode split_mode = GET_MODE (*split);
3752 3487235 : bool subst_done = false;
3753 3487235 : newi2pat = NULL_RTX;
3754 :
3755 3487235 : i2scratch = true;
3756 :
3757 : /* *SPLIT may be part of I2SRC, so make sure we have the
3758 : original expression around for later debug processing.
3759 : We should not need I2SRC any more in other cases. */
3760 3487235 : if (MAY_HAVE_DEBUG_BIND_INSNS)
3761 1665195 : i2src = copy_rtx (i2src);
3762 : else
3763 1822040 : i2src = NULL;
3764 :
3765 : /* Get NEWDEST as a register in the proper mode. We have already
3766 : validated that we can do this. */
3767 3487235 : if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
3768 : {
3769 570711 : if (REGNO (i2dest) < FIRST_PSEUDO_REGISTER)
3770 0 : newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
3771 : else
3772 : {
3773 570711 : subst_mode (REGNO (i2dest), split_mode);
3774 570711 : newdest = regno_reg_rtx[REGNO (i2dest)];
3775 : }
3776 : }
3777 :
3778 : /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
3779 : an ASHIFT. This can occur if it was inside a PLUS and hence
3780 : appeared to be a memory address. This is a kludge. */
3781 3487235 : if (split_code == MULT
3782 200139 : && CONST_INT_P (XEXP (*split, 1))
3783 102496 : && INTVAL (XEXP (*split, 1)) > 0
3784 3586724 : && (i = exact_log2 (UINTVAL (XEXP (*split, 1)))) >= 0)
3785 : {
3786 71011 : rtx i_rtx = gen_int_shift_amount (split_mode, i);
3787 71011 : SUBST (*split, gen_rtx_ASHIFT (split_mode,
3788 : XEXP (*split, 0), i_rtx));
3789 : /* Update split_code because we may not have a multiply
3790 : anymore. */
3791 71011 : split_code = GET_CODE (*split);
3792 : }
3793 :
3794 : /* Similarly for (plus (mult FOO (const_int pow2))). */
3795 3487235 : if (split_code == PLUS
3796 659110 : && GET_CODE (XEXP (*split, 0)) == MULT
3797 108391 : && CONST_INT_P (XEXP (XEXP (*split, 0), 1))
3798 38654 : && INTVAL (XEXP (XEXP (*split, 0), 1)) > 0
3799 3522308 : && (i = exact_log2 (UINTVAL (XEXP (XEXP (*split, 0), 1)))) >= 0)
3800 : {
3801 6676 : rtx nsplit = XEXP (*split, 0);
3802 6676 : rtx i_rtx = gen_int_shift_amount (GET_MODE (nsplit), i);
3803 6676 : SUBST (XEXP (*split, 0), gen_rtx_ASHIFT (GET_MODE (nsplit),
3804 : XEXP (nsplit, 0),
3805 : i_rtx));
3806 : /* Update split_code because we may not have a multiply
3807 : anymore. */
3808 6676 : split_code = GET_CODE (*split);
3809 : }
3810 :
3811 : #ifdef INSN_SCHEDULING
3812 : /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
3813 : be written as a ZERO_EXTEND. */
3814 3487235 : if (split_code == SUBREG && MEM_P (SUBREG_REG (*split)))
3815 : {
3816 : /* Or as a SIGN_EXTEND if LOAD_EXTEND_OP says that that's
3817 : what it really is. */
3818 12279 : if (load_extend_op (GET_MODE (SUBREG_REG (*split)))
3819 : == SIGN_EXTEND)
3820 : SUBST (*split, gen_rtx_SIGN_EXTEND (split_mode,
3821 : SUBREG_REG (*split)));
3822 : else
3823 12279 : SUBST (*split, gen_rtx_ZERO_EXTEND (split_mode,
3824 : SUBREG_REG (*split)));
3825 : }
3826 : #endif
3827 :
3828 : /* Attempt to split binary operators using arithmetic identities. */
3829 3487235 : if (BINARY_P (SET_SRC (newpat))
3830 2923895 : && split_mode == GET_MODE (SET_SRC (newpat))
3831 5472397 : && ! side_effects_p (SET_SRC (newpat)))
3832 : {
3833 1970841 : rtx setsrc = SET_SRC (newpat);
3834 1970841 : machine_mode mode = GET_MODE (setsrc);
3835 1970841 : enum rtx_code code = GET_CODE (setsrc);
3836 1970841 : rtx src_op0 = XEXP (setsrc, 0);
3837 1970841 : rtx src_op1 = XEXP (setsrc, 1);
3838 :
3839 : /* Split "X = Y op Y" as "Z = Y; X = Z op Z". */
3840 1970841 : if (rtx_equal_p (src_op0, src_op1))
3841 : {
3842 1459 : newi2pat = gen_rtx_SET (newdest, src_op0);
3843 1459 : SUBST (XEXP (setsrc, 0), newdest);
3844 1459 : SUBST (XEXP (setsrc, 1), newdest);
3845 1459 : subst_done = true;
3846 : }
3847 : /* Split "((P op Q) op R) op S" where op is PLUS or MULT. */
3848 1969382 : else if ((code == PLUS || code == MULT)
3849 969965 : && GET_CODE (src_op0) == code
3850 414728 : && GET_CODE (XEXP (src_op0, 0)) == code
3851 173717 : && (INTEGRAL_MODE_P (mode)
3852 : || (FLOAT_MODE_P (mode)
3853 100127 : && flag_unsafe_math_optimizations)))
3854 : {
3855 77635 : rtx p = XEXP (XEXP (src_op0, 0), 0);
3856 77635 : rtx q = XEXP (XEXP (src_op0, 0), 1);
3857 77635 : rtx r = XEXP (src_op0, 1);
3858 77635 : rtx s = src_op1;
3859 :
3860 : /* Split both "((X op Y) op X) op Y" and
3861 : "((X op Y) op Y) op X" as "T op T" where T is
3862 : "X op Y". */
3863 77891 : if ((rtx_equal_p (p,r) && rtx_equal_p (q,s))
3864 77792 : || (rtx_equal_p (p,s) && rtx_equal_p (q,r)))
3865 : {
3866 99 : newi2pat = gen_rtx_SET (newdest, XEXP (src_op0, 0));
3867 99 : SUBST (XEXP (setsrc, 0), newdest);
3868 99 : SUBST (XEXP (setsrc, 1), newdest);
3869 99 : subst_done = true;
3870 : }
3871 : /* Split "((X op X) op Y) op Y)" as "T op T" where
3872 : T is "X op Y". */
3873 77536 : else if (rtx_equal_p (p,q) && rtx_equal_p (r,s))
3874 : {
3875 60 : rtx tmp = simplify_gen_binary (code, mode, p, r);
3876 60 : newi2pat = gen_rtx_SET (newdest, tmp);
3877 60 : SUBST (XEXP (setsrc, 0), newdest);
3878 60 : SUBST (XEXP (setsrc, 1), newdest);
3879 60 : subst_done = true;
3880 : }
3881 : }
3882 : }
3883 :
3884 1618 : if (!subst_done)
3885 : {
3886 3485617 : newi2pat = gen_rtx_SET (newdest, *split);
3887 3485617 : SUBST (*split, newdest);
3888 : }
3889 :
3890 3487235 : i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
3891 :
3892 : /* recog_for_combine might have added CLOBBERs to newi2pat.
3893 : Make sure NEWPAT does not depend on the clobbered regs. */
3894 3487235 : if (GET_CODE (newi2pat) == PARALLEL)
3895 2370762 : for (i = XVECLEN (newi2pat, 0) - 1; i >= 0; i--)
3896 1591120 : if (GET_CODE (XVECEXP (newi2pat, 0, i)) == CLOBBER)
3897 : {
3898 811478 : rtx reg = XEXP (XVECEXP (newi2pat, 0, i), 0);
3899 811478 : if (reg_overlap_mentioned_p (reg, newpat))
3900 : {
3901 18318 : undo_all ();
3902 18318 : return 0;
3903 : }
3904 : }
3905 :
3906 : /* If the split point was a MULT and we didn't have one before,
3907 : don't use one now. */
3908 3468917 : if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
3909 2102654 : insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3910 : }
3911 : }
3912 :
3913 : /* Check for a case where we loaded from memory in a narrow mode and
3914 : then sign extended it, but we need both registers. In that case,
3915 : we have a PARALLEL with both loads from the same memory location.
3916 : We can split this into a load from memory followed by a register-register
3917 : copy. This saves at least one insn, more if register allocation can
3918 : eliminate the copy.
3919 :
3920 : We cannot do this if the involved modes have more than one elements,
3921 : like for vector or complex modes.
3922 :
3923 : We cannot do this if the destination of the first assignment is a
3924 : condition code register. We eliminate this case by making sure
3925 : the SET_DEST and SET_SRC have the same mode.
3926 :
3927 : We cannot do this if the destination of the second assignment is
3928 : a register that we have already assumed is zero-extended. Similarly
3929 : for a SUBREG of such a register. */
3930 :
3931 5541451 : else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
3932 5484269 : && GET_CODE (newpat) == PARALLEL
3933 5482528 : && XVECLEN (newpat, 0) == 2
3934 4542095 : && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
3935 4541897 : && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
3936 21690 : && (GET_MODE (SET_DEST (XVECEXP (newpat, 0, 0)))
3937 21690 : == GET_MODE (SET_SRC (XVECEXP (newpat, 0, 0))))
3938 21690 : && ! VECTOR_MODE_P (GET_MODE (SET_DEST (XVECEXP (newpat, 0, 0))))
3939 : && ! COMPLEX_MODE_P (GET_MODE (SET_DEST (XVECEXP (newpat, 0, 0))))
3940 20249 : && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
3941 20249 : && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
3942 20249 : XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
3943 5062 : && !modified_between_p (SET_SRC (XVECEXP (newpat, 0, 1)), i2, i3)
3944 5062 : && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
3945 5062 : && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
3946 5062 : && ! (temp_expr = SET_DEST (XVECEXP (newpat, 0, 1)),
3947 : (REG_P (temp_expr)
3948 5062 : && reg_stat[REGNO (temp_expr)].nonzero_bits != 0
3949 5150 : && known_lt (GET_MODE_PRECISION (GET_MODE (temp_expr)),
3950 : BITS_PER_WORD)
3951 4933 : && known_lt (GET_MODE_PRECISION (GET_MODE (temp_expr)),
3952 : HOST_BITS_PER_INT)
3953 1124 : && (reg_stat[REGNO (temp_expr)].nonzero_bits
3954 1124 : != GET_MODE_MASK (word_mode))))
3955 5041 : && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
3956 0 : && (temp_expr = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
3957 0 : (REG_P (temp_expr)
3958 0 : && reg_stat[REGNO (temp_expr)].nonzero_bits != 0
3959 0 : && known_lt (GET_MODE_PRECISION (GET_MODE (temp_expr)),
3960 : BITS_PER_WORD)
3961 0 : && known_lt (GET_MODE_PRECISION (GET_MODE (temp_expr)),
3962 : HOST_BITS_PER_INT)
3963 0 : && (reg_stat[REGNO (temp_expr)].nonzero_bits
3964 0 : != GET_MODE_MASK (word_mode)))))
3965 5041 : && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
3966 5041 : SET_SRC (XVECEXP (newpat, 0, 1)))
3967 27820649 : && ! find_reg_note (i3, REG_UNUSED,
3968 4992 : SET_DEST (XVECEXP (newpat, 0, 0))))
3969 : {
3970 4992 : rtx ni2dest;
3971 :
3972 4992 : newi2pat = XVECEXP (newpat, 0, 0);
3973 4992 : ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
3974 4992 : newpat = XVECEXP (newpat, 0, 1);
3975 4992 : SUBST (SET_SRC (newpat),
3976 : gen_lowpart (GET_MODE (SET_SRC (newpat)), ni2dest));
3977 4992 : i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
3978 :
3979 4992 : if (i2_code_number >= 0)
3980 0 : insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3981 :
3982 4992 : if (insn_code_number >= 0)
3983 : swap_i2i3 = 1;
3984 : }
3985 :
3986 : /* Similarly, check for a case where we have a PARALLEL of two independent
3987 : SETs but we started with three insns. In this case, we can do the sets
3988 : as two separate insns. This case occurs when some SET allows two
3989 : other insns to combine, but the destination of that SET is still live.
3990 :
3991 : Also do this if we started with two insns and (at least) one of the
3992 : resulting sets is a noop; this noop will be deleted later.
3993 :
3994 : Also do this if we started with two insns neither of which was a simple
3995 : move. */
3996 :
3997 23800555 : else if (insn_code_number < 0 && asm_noperands (newpat) < 0
3998 23782760 : && GET_CODE (newpat) == PARALLEL
3999 10849252 : && XVECLEN (newpat, 0) == 2
4000 9816863 : && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
4001 9711357 : && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
4002 9654291 : && (i1
4003 5137193 : || set_noop_p (XVECEXP (newpat, 0, 0))
4004 5136730 : || set_noop_p (XVECEXP (newpat, 0, 1))
4005 5136728 : || (!i2_was_move && !i3_was_move))
4006 6371814 : && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
4007 6371085 : && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
4008 6370916 : && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
4009 6370335 : && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
4010 6370321 : && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
4011 : XVECEXP (newpat, 0, 0))
4012 5288948 : && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
4013 5288948 : XVECEXP (newpat, 0, 1))
4014 33380371 : && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0)))
4015 404955 : && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1)))))
4016 : {
4017 4938499 : rtx set0 = XVECEXP (newpat, 0, 0);
4018 4938499 : rtx set1 = XVECEXP (newpat, 0, 1);
4019 :
4020 : /* Normally, it doesn't matter which of the two is done first, but
4021 : one which uses any regs/memory set or used in between i2 and i3
4022 : can't be first. The PARALLEL might also have been pre-existing
4023 : in i3, so we need to make sure that we won't wrongly hoist a SET
4024 : to i2 that would conflict with a death note present in there, or
4025 : would have its dest modified or used between i2 and i3. */
4026 4938499 : if ((set_noop_p (set1)
4027 4938499 : || (!modified_between_p (SET_SRC (set1), i2, i3)
4028 9835399 : && !(REG_P (SET_DEST (set1))
4029 4905179 : && find_reg_note (i2, REG_DEAD, SET_DEST (set1)))
4030 4954966 : && !(GET_CODE (SET_DEST (set1)) == SUBREG
4031 25041 : && find_reg_note (i2, REG_DEAD,
4032 25041 : SUBREG_REG (SET_DEST (set1))))
4033 4929925 : && !modified_between_p (SET_DEST (set1), i2, i3)
4034 4929925 : && !reg_used_between_p (SET_DEST (set1), i2, i3)))
4035 : /* If I3 is a jump, ensure that set0 is a jump so that
4036 : we do not create invalid RTL. */
4037 9868418 : && (!JUMP_P (i3) || SET_DEST (set0) == pc_rtx))
4038 : {
4039 4929919 : newi2pat = set1;
4040 4929919 : newpat = set0;
4041 : }
4042 8580 : else if ((set_noop_p (set0)
4043 8574 : || (!modified_between_p (SET_SRC (set0), i2, i3)
4044 594 : && !(REG_P (SET_DEST (set0))
4045 297 : && find_reg_note (i2, REG_DEAD, SET_DEST (set0)))
4046 297 : && !(GET_CODE (SET_DEST (set0)) == SUBREG
4047 0 : && find_reg_note (i2, REG_DEAD,
4048 0 : SUBREG_REG (SET_DEST (set0))))
4049 297 : && !modified_between_p (SET_DEST (set0), i2, i3)
4050 296 : && !reg_used_between_p (SET_DEST (set0), i2, i3)))
4051 : /* If I3 is a jump, ensure that set1 is a jump so that
4052 : we do not create invalid RTL. */
4053 8876 : && (!JUMP_P (i3) || SET_DEST (set1) == pc_rtx))
4054 : {
4055 302 : newi2pat = set0;
4056 302 : newpat = set1;
4057 : }
4058 : else
4059 : {
4060 8278 : undo_all ();
4061 8278 : return 0;
4062 : }
4063 :
4064 4930221 : i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
4065 :
4066 4930221 : if (i2_code_number >= 0)
4067 : {
4068 : /* recog_for_combine might have added CLOBBERs to newi2pat.
4069 : Make sure NEWPAT does not depend on the clobbered regs. */
4070 3647058 : if (GET_CODE (newi2pat) == PARALLEL)
4071 : {
4072 1269026 : for (i = XVECLEN (newi2pat, 0) - 1; i >= 0; i--)
4073 849195 : if (GET_CODE (XVECEXP (newi2pat, 0, i)) == CLOBBER)
4074 : {
4075 429364 : rtx reg = XEXP (XVECEXP (newi2pat, 0, i), 0);
4076 429364 : if (reg_overlap_mentioned_p (reg, newpat))
4077 : {
4078 3212 : undo_all ();
4079 3212 : return 0;
4080 : }
4081 : }
4082 : }
4083 :
4084 3643846 : insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
4085 :
4086 : /* Likewise, recog_for_combine might have added clobbers to NEWPAT.
4087 : Checking that the SET0's SET_DEST and SET1's SET_DEST aren't
4088 : mentioned/clobbered, ensures NEWI2PAT's SET_DEST is live. */
4089 3643846 : if (insn_code_number >= 0 && GET_CODE (newpat) == PARALLEL)
4090 : {
4091 61721 : for (i = XVECLEN (newpat, 0) - 1; i >= 0; i--)
4092 41158 : if (GET_CODE (XVECEXP (newpat, 0, i)) == CLOBBER)
4093 : {
4094 20595 : rtx reg = XEXP (XVECEXP (newpat, 0, i), 0);
4095 20595 : if (reg_overlap_mentioned_p (reg, SET_DEST (set0))
4096 20595 : || reg_overlap_mentioned_p (reg, SET_DEST (set1)))
4097 : {
4098 0 : undo_all ();
4099 0 : return 0;
4100 : }
4101 : }
4102 : }
4103 :
4104 : if (insn_code_number >= 0)
4105 : split_i2i3 = true;
4106 : }
4107 : }
4108 :
4109 : /* If it still isn't recognized, fail and change things back the way they
4110 : were. */
4111 28671111 : if ((insn_code_number < 0
4112 : /* Is the result a reasonable ASM_OPERANDS? */
4113 32166622 : && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
4114 : {
4115 28101724 : undo_all ();
4116 28101724 : return 0;
4117 : }
4118 :
4119 : /* If we had to change another insn, make sure it is valid also. */
4120 4218225 : if (undobuf.other_insn)
4121 : {
4122 201995 : CLEAR_HARD_REG_SET (newpat_used_regs);
4123 :
4124 201995 : other_pat = PATTERN (undobuf.other_insn);
4125 201995 : other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
4126 : &new_other_notes);
4127 :
4128 201995 : if (other_code_number < 0 && ! check_asm_operands (other_pat))
4129 : {
4130 7252 : undo_all ();
4131 7252 : return 0;
4132 : }
4133 : }
4134 :
4135 : /* Reject this combination if insn_cost reports that the replacement
4136 : instructions are more expensive than the originals. */
4137 4210973 : if (!combine_validate_cost (i0, i1, i2, i3, newpat, newi2pat, other_pat))
4138 : {
4139 210306 : undo_all ();
4140 210306 : return 0;
4141 : }
4142 :
4143 4000667 : if (MAY_HAVE_DEBUG_BIND_INSNS)
4144 : {
4145 2164475 : struct undo *undo;
4146 :
4147 6497752 : for (undo = undobuf.undos; undo; undo = undo->next)
4148 4333277 : if (undo->kind == UNDO_MODE)
4149 : {
4150 2195 : rtx reg = regno_reg_rtx[undo->where.regno];
4151 2195 : machine_mode new_mode = GET_MODE (reg);
4152 2195 : machine_mode old_mode = undo->old_contents.m;
4153 :
4154 : /* Temporarily revert mode back. */
4155 2195 : adjust_reg_mode (reg, old_mode);
4156 :
4157 2195 : if (reg == i2dest && i2scratch)
4158 : {
4159 : /* If we used i2dest as a scratch register with a
4160 : different mode, substitute it for the original
4161 : i2src while its original mode is temporarily
4162 : restored, and then clear i2scratch so that we don't
4163 : do it again later. */
4164 2195 : propagate_for_debug (i2, last_combined_insn, reg, i2src,
4165 : this_basic_block);
4166 2195 : i2scratch = false;
4167 : /* Put back the new mode. */
4168 2195 : adjust_reg_mode (reg, new_mode);
4169 : }
4170 : else
4171 : {
4172 0 : rtx tempreg = gen_raw_REG (old_mode, REGNO (reg));
4173 0 : rtx_insn *first, *last;
4174 :
4175 0 : if (reg == i2dest)
4176 : {
4177 : first = i2;
4178 : last = last_combined_insn;
4179 : }
4180 : else
4181 : {
4182 0 : first = i3;
4183 0 : last = undobuf.other_insn;
4184 0 : gcc_assert (last);
4185 0 : if (DF_INSN_LUID (last)
4186 0 : < DF_INSN_LUID (last_combined_insn))
4187 0 : last = last_combined_insn;
4188 : }
4189 :
4190 : /* We're dealing with a reg that changed mode but not
4191 : meaning, so we want to turn it into a subreg for
4192 : the new mode. However, because of REG sharing and
4193 : because its mode had already changed, we have to do
4194 : it in two steps. First, replace any debug uses of
4195 : reg, with its original mode temporarily restored,
4196 : with this copy we have created; then, replace the
4197 : copy with the SUBREG of the original shared reg,
4198 : once again changed to the new mode. */
4199 0 : propagate_for_debug (first, last, reg, tempreg,
4200 : this_basic_block);
4201 0 : adjust_reg_mode (reg, new_mode);
4202 0 : propagate_for_debug (first, last, tempreg,
4203 : lowpart_subreg (old_mode, reg, new_mode),
4204 : this_basic_block);
4205 : }
4206 : }
4207 : }
4208 :
4209 : /* If we will be able to accept this, we have made a
4210 : change to the destination of I3. This requires us to
4211 : do a few adjustments. */
4212 :
4213 4000667 : if (changed_i3_dest)
4214 : {
4215 18514 : PATTERN (i3) = newpat;
4216 18514 : adjust_for_new_dest (i3);
4217 : }
4218 :
4219 4000667 : bool only_i3_changed = !i0 && !i1 && rtx_equal_p (newi2pat, PATTERN (i2));
4220 :
4221 : /* If only i3 has changed, any split of the combined instruction just
4222 : restored i2 to its original state. No destinations moved from i3
4223 : to i2. */
4224 : if (only_i3_changed)
4225 : split_i2i3 = false;
4226 :
4227 : /* We now know that we can do this combination. Merge the insns and
4228 : update the status of registers and LOG_LINKS. */
4229 :
4230 4000667 : if (undobuf.other_insn)
4231 : {
4232 194604 : rtx note, next;
4233 :
4234 194604 : PATTERN (undobuf.other_insn) = other_pat;
4235 :
4236 : /* If any of the notes in OTHER_INSN were REG_DEAD or REG_UNUSED,
4237 : ensure that they are still valid. Then add any non-duplicate
4238 : notes added by recog_for_combine. */
4239 581030 : for (note = REG_NOTES (undobuf.other_insn); note; note = next)
4240 : {
4241 386426 : next = XEXP (note, 1);
4242 :
4243 386426 : if ((REG_NOTE_KIND (note) == REG_DEAD
4244 197562 : && !reg_referenced_p (XEXP (note, 0),
4245 197562 : PATTERN (undobuf.other_insn)))
4246 382119 : ||(REG_NOTE_KIND (note) == REG_UNUSED
4247 28 : && !reg_set_p (XEXP (note, 0),
4248 28 : PATTERN (undobuf.other_insn)))
4249 : /* Simply drop equal note since it may be no longer valid
4250 : for other_insn. It may be possible to record that CC
4251 : register is changed and only discard those notes, but
4252 : in practice it's unnecessary complication and doesn't
4253 : give any meaningful improvement.
4254 :
4255 : See PR78559. */
4256 382119 : || REG_NOTE_KIND (note) == REG_EQUAL
4257 768404 : || REG_NOTE_KIND (note) == REG_EQUIV)
4258 4448 : remove_note (undobuf.other_insn, note);
4259 : }
4260 :
4261 194604 : distribute_notes (new_other_notes, undobuf.other_insn,
4262 : undobuf.other_insn, NULL, NULL_RTX, NULL_RTX,
4263 : NULL_RTX);
4264 : }
4265 :
4266 4000667 : if (swap_i2i3)
4267 : {
4268 : /* I3 now uses what used to be its destination and which is now
4269 : I2's destination. This requires us to do a few adjustments. */
4270 0 : PATTERN (i3) = newpat;
4271 0 : adjust_for_new_dest (i3);
4272 : }
4273 :
4274 4000667 : if (swap_i2i3 || split_i2i3)
4275 : {
4276 : /* We might need a LOG_LINK from I3 to I2. But then we used to
4277 : have one, so we still will.
4278 :
4279 : However, some later insn might be using I2's dest and have
4280 : a LOG_LINK pointing at I3. We should change it to point at
4281 : I2 instead. */
4282 :
4283 : /* newi2pat is usually a SET here; however, recog_for_combine might
4284 : have added some clobbers. */
4285 25719 : rtx x = newi2pat;
4286 25719 : if (GET_CODE (x) == PARALLEL)
4287 488 : x = XVECEXP (newi2pat, 0, 0);
4288 :
4289 25719 : if (REG_P (SET_DEST (x))
4290 6 : || (GET_CODE (SET_DEST (x)) == SUBREG
4291 0 : && REG_P (SUBREG_REG (SET_DEST (x)))))
4292 : {
4293 25713 : unsigned int regno = reg_or_subregno (SET_DEST (x));
4294 :
4295 25713 : bool done = false;
4296 473243 : for (rtx_insn *insn = NEXT_INSN (i3);
4297 473243 : !done
4298 473243 : && insn
4299 471921 : && INSN_P (insn)
4300 920773 : && BLOCK_FOR_INSN (insn) == this_basic_block;
4301 447530 : insn = NEXT_INSN (insn))
4302 : {
4303 447530 : if (DEBUG_INSN_P (insn))
4304 154331 : continue;
4305 293199 : struct insn_link *link;
4306 554050 : FOR_EACH_LOG_LINK (link, insn)
4307 260861 : if (link->insn == i3 && link->regno == regno)
4308 : {
4309 10 : link->insn = i2;
4310 10 : done = true;
4311 10 : break;
4312 : }
4313 : }
4314 : }
4315 : }
4316 :
4317 4000667 : {
4318 4000667 : rtx i3notes, i2notes, i1notes = 0, i0notes = 0;
4319 4000667 : struct insn_link *i3links, *i2links, *i1links = 0, *i0links = 0;
4320 4000667 : rtx midnotes = 0;
4321 4000667 : int from_luid;
4322 : /* Compute which registers we expect to eliminate. newi2pat may be setting
4323 : either i3dest or i2dest, so we must check it. */
4324 97727 : rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
4325 3913278 : || i2dest_in_i2src || i2dest_in_i1src || i2dest_in_i0src
4326 3829724 : || !i2dest_killed
4327 7829311 : ? 0 : i2dest);
4328 : /* For i1, we need to compute both local elimination and global
4329 : elimination information with respect to newi2pat because i1dest
4330 : may be the same as i3dest, in which case newi2pat may be setting
4331 : i1dest. Global information is used when distributing REG_DEAD
4332 : note for i2 and i3, in which case it does matter if newi2pat sets
4333 : i1dest or not.
4334 :
4335 : Local information is used when distributing REG_DEAD note for i1,
4336 : in which case it doesn't matter if newi2pat sets i1dest or not.
4337 : See PR62151, if we have four insns combination:
4338 : i0: r0 <- i0src
4339 : i1: r1 <- i1src (using r0)
4340 : REG_DEAD (r0)
4341 : i2: r0 <- i2src (using r1)
4342 : i3: r3 <- i3src (using r0)
4343 : ix: using r0
4344 : From i1's point of view, r0 is eliminated, no matter if it is set
4345 : by newi2pat or not. In other words, REG_DEAD info for r0 in i1
4346 : should be discarded.
4347 :
4348 : Note local information only affects cases in forms like "I1->I2->I3",
4349 : "I0->I1->I2->I3" or "I0&I1->I2, I2->I3". For other cases like
4350 : "I0->I1, I1&I2->I3" or "I1&I2->I3", newi2pat won't set i1dest or
4351 : i0dest anyway. */
4352 99088 : rtx local_elim_i1 = (i1 == 0 || i1dest_in_i1src || i1dest_in_i0src
4353 99018 : || !i1dest_killed
4354 4000667 : ? 0 : i1dest);
4355 99017 : rtx elim_i1 = (local_elim_i1 == 0
4356 99017 : || (newi2pat && reg_set_p (i1dest, newi2pat))
4357 99017 : ? 0 : i1dest);
4358 : /* Same case as i1. */
4359 4053 : rtx local_elim_i0 = (i0 == 0 || i0dest_in_i0src || !i0dest_killed
4360 4000667 : ? 0 : i0dest);
4361 4026 : rtx elim_i0 = (local_elim_i0 == 0
4362 4026 : || (newi2pat && reg_set_p (i0dest, newi2pat))
4363 4026 : ? 0 : i0dest);
4364 :
4365 : /* Get the old REG_NOTES and LOG_LINKS from all our insns and
4366 : clear them. */
4367 4000667 : i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
4368 4000667 : i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
4369 4000667 : if (i1)
4370 99088 : i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
4371 4000667 : if (i0)
4372 4053 : i0notes = REG_NOTES (i0), i0links = LOG_LINKS (i0);
4373 :
4374 : /* Ensure that we do not have something that should not be shared but
4375 : occurs multiple times in the new insns. Check this by first
4376 : resetting all the `used' flags and then copying anything is shared. */
4377 :
4378 4000667 : reset_used_flags (i3notes);
4379 4000667 : reset_used_flags (i2notes);
4380 4000667 : reset_used_flags (i1notes);
4381 4000667 : reset_used_flags (i0notes);
4382 4000667 : reset_used_flags (newpat);
4383 4000667 : reset_used_flags (newi2pat);
4384 4000667 : if (undobuf.other_insn)
4385 194604 : reset_used_flags (PATTERN (undobuf.other_insn));
4386 :
4387 4000667 : i3notes = copy_rtx_if_shared (i3notes);
4388 4000667 : i2notes = copy_rtx_if_shared (i2notes);
4389 4000667 : i1notes = copy_rtx_if_shared (i1notes);
4390 4000667 : i0notes = copy_rtx_if_shared (i0notes);
4391 4000667 : newpat = copy_rtx_if_shared (newpat);
4392 4000667 : newi2pat = copy_rtx_if_shared (newi2pat);
4393 4000667 : if (undobuf.other_insn)
4394 194604 : reset_used_flags (PATTERN (undobuf.other_insn));
4395 :
4396 4000667 : INSN_CODE (i3) = insn_code_number;
4397 4000667 : PATTERN (i3) = newpat;
4398 :
4399 4000667 : if (CALL_P (i3) && CALL_INSN_FUNCTION_USAGE (i3))
4400 : {
4401 347145 : for (rtx link = CALL_INSN_FUNCTION_USAGE (i3); link;
4402 269044 : link = XEXP (link, 1))
4403 : {
4404 269044 : if (substed_i2)
4405 : {
4406 : /* I2SRC must still be meaningful at this point. Some
4407 : splitting operations can invalidate I2SRC, but those
4408 : operations do not apply to calls. */
4409 269044 : gcc_assert (i2src);
4410 269044 : XEXP (link, 0) = simplify_replace_rtx (XEXP (link, 0),
4411 : i2dest, i2src);
4412 : }
4413 269044 : if (substed_i1)
4414 0 : XEXP (link, 0) = simplify_replace_rtx (XEXP (link, 0),
4415 : i1dest, i1src);
4416 269044 : if (substed_i0)
4417 0 : XEXP (link, 0) = simplify_replace_rtx (XEXP (link, 0),
4418 : i0dest, i0src);
4419 : }
4420 : }
4421 :
4422 4000667 : if (undobuf.other_insn)
4423 194604 : INSN_CODE (undobuf.other_insn) = other_code_number;
4424 :
4425 : /* We had one special case above where I2 had more than one set and
4426 : we replaced a destination of one of those sets with the destination
4427 : of I3. In that case, we have to update LOG_LINKS of insns later
4428 : in this basic block. Note that this (expensive) case is rare.
4429 :
4430 : Also, in this case, we must pretend that all REG_NOTEs for I2
4431 : actually came from I3, so that REG_UNUSED notes from I2 will be
4432 : properly handled. */
4433 :
4434 4000667 : if (i3_subst_into_i2)
4435 : {
4436 204813 : for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
4437 140562 : if ((GET_CODE (XVECEXP (PATTERN (i2), 0, i)) == SET
4438 65443 : || GET_CODE (XVECEXP (PATTERN (i2), 0, i)) == CLOBBER)
4439 139751 : && REG_P (SET_DEST (XVECEXP (PATTERN (i2), 0, i)))
4440 125431 : && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
4441 265993 : && ! find_reg_note (i2, REG_UNUSED,
4442 125431 : SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
4443 28091285 : for (temp_insn = NEXT_INSN (i2);
4444 : temp_insn
4445 28091285 : && (this_basic_block->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
4446 27994559 : || BB_HEAD (this_basic_block) != temp_insn);
4447 28034315 : temp_insn = NEXT_INSN (temp_insn))
4448 28034315 : if (temp_insn != i3 && NONDEBUG_INSN_P (temp_insn))
4449 18522844 : FOR_EACH_LOG_LINK (link, temp_insn)
4450 6728233 : if (link->insn == i2)
4451 438 : link->insn = i3;
4452 :
4453 64251 : if (i3notes)
4454 : {
4455 : rtx link = i3notes;
4456 71323 : while (XEXP (link, 1))
4457 : link = XEXP (link, 1);
4458 64251 : XEXP (link, 1) = i2notes;
4459 : }
4460 : else
4461 : i3notes = i2notes;
4462 : i2notes = 0;
4463 : }
4464 :
4465 4000667 : LOG_LINKS (i3) = NULL;
4466 4000667 : REG_NOTES (i3) = 0;
4467 4000667 : LOG_LINKS (i2) = NULL;
4468 4000667 : REG_NOTES (i2) = 0;
4469 :
4470 4000667 : if (newi2pat)
4471 : {
4472 97727 : if (MAY_HAVE_DEBUG_BIND_INSNS && i2scratch)
4473 10426 : propagate_for_debug (i2, last_combined_insn, i2dest, i2src,
4474 : this_basic_block);
4475 97727 : INSN_CODE (i2) = i2_code_number;
4476 97727 : PATTERN (i2) = newi2pat;
4477 : }
4478 : else
4479 : {
4480 3902940 : if (MAY_HAVE_DEBUG_BIND_INSNS && i2src)
4481 2102388 : propagate_for_debug (i2, last_combined_insn, i2dest, i2src,
4482 : this_basic_block);
4483 3902940 : SET_INSN_DELETED (i2);
4484 : }
4485 :
4486 4000667 : if (i1)
4487 : {
4488 99088 : LOG_LINKS (i1) = NULL;
4489 99088 : REG_NOTES (i1) = 0;
4490 99088 : if (MAY_HAVE_DEBUG_BIND_INSNS)
4491 50467 : propagate_for_debug (i1, last_combined_insn, i1dest, i1src,
4492 : this_basic_block);
4493 99088 : SET_INSN_DELETED (i1);
4494 : }
4495 :
4496 4000667 : if (i0)
4497 : {
4498 4053 : LOG_LINKS (i0) = NULL;
4499 4053 : REG_NOTES (i0) = 0;
4500 4053 : if (MAY_HAVE_DEBUG_BIND_INSNS)
4501 2617 : propagate_for_debug (i0, last_combined_insn, i0dest, i0src,
4502 : this_basic_block);
4503 4053 : SET_INSN_DELETED (i0);
4504 : }
4505 :
4506 : /* Get death notes for everything that is now used in either I3 or
4507 : I2 and used to die in a previous insn. If we built two new
4508 : patterns, move from I1 to I2 then I2 to I3 so that we get the
4509 : proper movement on registers that I2 modifies. */
4510 :
4511 4000667 : if (i0)
4512 4053 : from_luid = DF_INSN_LUID (i0);
4513 3996614 : else if (i1)
4514 95035 : from_luid = DF_INSN_LUID (i1);
4515 : else
4516 3901579 : from_luid = DF_INSN_LUID (i2);
4517 4000667 : if (newi2pat)
4518 97727 : move_deaths (newi2pat, NULL_RTX, from_luid, i2, &midnotes);
4519 4000667 : move_deaths (newpat, newi2pat, from_luid, i3, &midnotes);
4520 :
4521 : /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
4522 4000667 : if (i3notes)
4523 7302125 : distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL,
4524 : elim_i2, elim_i1, elim_i0);
4525 4000667 : if (i2notes)
4526 5632436 : distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL,
4527 : elim_i2, elim_i1, elim_i0);
4528 4000667 : if (i1notes)
4529 58886 : distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL,
4530 : elim_i2, local_elim_i1, local_elim_i0);
4531 4000667 : if (i0notes)
4532 3198 : distribute_notes (i0notes, i0, i3, newi2pat ? i2 : NULL,
4533 : elim_i2, elim_i1, local_elim_i0);
4534 4000667 : if (midnotes)
4535 4789254 : distribute_notes (midnotes, NULL, i3, newi2pat ? i2 : NULL,
4536 : elim_i2, elim_i1, elim_i0);
4537 :
4538 : /* Distribute any notes added to I2 or I3 by recog_for_combine. We
4539 : know these are REG_UNUSED and want them to go to the desired insn,
4540 : so we always pass it as i3. */
4541 :
4542 4000667 : if (newi2pat && new_i2_notes)
4543 37992 : distribute_notes (new_i2_notes, i2, i2, NULL, NULL_RTX, NULL_RTX,
4544 : NULL_RTX);
4545 :
4546 4000667 : if (new_i3_notes)
4547 140418 : distribute_notes (new_i3_notes, i3, i3, NULL, NULL_RTX, NULL_RTX,
4548 : NULL_RTX);
4549 :
4550 : /* If I3DEST was used in I3SRC, it really died in I3. We may need to
4551 : put a REG_DEAD note for it somewhere. If NEWI2PAT exists and sets
4552 : I3DEST, the death must be somewhere before I2, not I3. If we passed I3
4553 : in that case, it might delete I2. Similarly for I2 and I1.
4554 : Show an additional death due to the REG_DEAD note we make here. If
4555 : we discard it in distribute_notes, we will decrement it again. */
4556 :
4557 4000667 : if (i3dest_killed)
4558 : {
4559 301289 : rtx new_note = alloc_reg_note (REG_DEAD, i3dest_killed, NULL_RTX);
4560 301289 : if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
4561 743 : distribute_notes (new_note, NULL, i2, NULL, elim_i2,
4562 : elim_i1, elim_i0);
4563 : else
4564 599603 : distribute_notes (new_note, NULL, i3, newi2pat ? i2 : NULL,
4565 : elim_i2, elim_i1, elim_i0);
4566 : }
4567 :
4568 4000667 : if (i2dest_in_i2src)
4569 : {
4570 82730 : rtx new_note = alloc_reg_note (REG_DEAD, i2dest, NULL_RTX);
4571 82730 : if (newi2pat && reg_set_p (i2dest, newi2pat))
4572 718 : distribute_notes (new_note, NULL, i2, NULL, NULL_RTX,
4573 : NULL_RTX, NULL_RTX);
4574 : else
4575 163986 : distribute_notes (new_note, NULL, i3, newi2pat ? i2 : NULL,
4576 : NULL_RTX, NULL_RTX, NULL_RTX);
4577 : }
4578 :
4579 4000667 : if (i1dest_in_i1src)
4580 : {
4581 68 : rtx new_note = alloc_reg_note (REG_DEAD, i1dest, NULL_RTX);
4582 68 : if (newi2pat && reg_set_p (i1dest, newi2pat))
4583 4 : distribute_notes (new_note, NULL, i2, NULL, NULL_RTX,
4584 : NULL_RTX, NULL_RTX);
4585 : else
4586 110 : distribute_notes (new_note, NULL, i3, newi2pat ? i2 : NULL,
4587 : NULL_RTX, NULL_RTX, NULL_RTX);
4588 : }
4589 :
4590 4000667 : if (i0dest_in_i0src)
4591 : {
4592 27 : rtx new_note = alloc_reg_note (REG_DEAD, i0dest, NULL_RTX);
4593 27 : if (newi2pat && reg_set_p (i0dest, newi2pat))
4594 0 : distribute_notes (new_note, NULL, i2, NULL, NULL_RTX,
4595 : NULL_RTX, NULL_RTX);
4596 : else
4597 54 : distribute_notes (new_note, NULL, i3, newi2pat ? i2 : NULL,
4598 : NULL_RTX, NULL_RTX, NULL_RTX);
4599 : }
4600 :
4601 4000667 : if (only_i3_changed)
4602 30740 : distribute_links (i3links, i3, param_max_combine_search_insns);
4603 : else
4604 : {
4605 3969927 : distribute_links (i3links);
4606 3969927 : distribute_links (i2links, i2);
4607 3969927 : distribute_links (i1links);
4608 3969927 : distribute_links (i0links);
4609 : }
4610 :
4611 4000667 : if (REG_P (i2dest))
4612 : {
4613 4000667 : struct insn_link *link;
4614 4000667 : rtx_insn *i2_insn = 0;
4615 4000667 : rtx i2_val = 0, set;
4616 :
4617 : /* The insn that used to set this register doesn't exist, and
4618 : this life of the register may not exist either. See if one of
4619 : I3's links points to an insn that sets I2DEST. If it does,
4620 : that is now the last known value for I2DEST. If we don't update
4621 : this and I2 set the register to a value that depended on its old
4622 : contents, we will get confused. If this insn is used, thing
4623 : will be set correctly in combine_instructions. */
4624 7314172 : FOR_EACH_LOG_LINK (link, i3)
4625 3313505 : if ((set = single_set (link->insn)) != 0
4626 3313505 : && rtx_equal_p (i2dest, SET_DEST (set)))
4627 48243 : i2_insn = link->insn, i2_val = SET_SRC (set);
4628 :
4629 4000667 : record_value_for_reg (i2dest, i2_insn, i2_val);
4630 :
4631 : /* If the reg formerly set in I2 died only once and that was in I3,
4632 : zero its use count so it won't make `reload' do any work. */
4633 4000667 : if (! added_sets_2
4634 3878300 : && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
4635 3840383 : && ! i2dest_in_i2src
4636 7778066 : && REGNO (i2dest) < reg_n_sets_max)
4637 3777397 : INC_REG_N_SETS (REGNO (i2dest), -1);
4638 : }
4639 :
4640 4000667 : if (i1 && REG_P (i1dest))
4641 : {
4642 99088 : struct insn_link *link;
4643 99088 : rtx_insn *i1_insn = 0;
4644 99088 : rtx i1_val = 0, set;
4645 :
4646 173263 : FOR_EACH_LOG_LINK (link, i3)
4647 74175 : if ((set = single_set (link->insn)) != 0
4648 74175 : && rtx_equal_p (i1dest, SET_DEST (set)))
4649 462 : i1_insn = link->insn, i1_val = SET_SRC (set);
4650 :
4651 99088 : record_value_for_reg (i1dest, i1_insn, i1_val);
4652 :
4653 99088 : if (! added_sets_1
4654 : && ! i1dest_in_i1src
4655 99088 : && REGNO (i1dest) < reg_n_sets_max)
4656 93376 : INC_REG_N_SETS (REGNO (i1dest), -1);
4657 : }
4658 :
4659 4000667 : if (i0 && REG_P (i0dest))
4660 : {
4661 4053 : struct insn_link *link;
4662 4053 : rtx_insn *i0_insn = 0;
4663 4053 : rtx i0_val = 0, set;
4664 :
4665 6282 : FOR_EACH_LOG_LINK (link, i3)
4666 2229 : if ((set = single_set (link->insn)) != 0
4667 2229 : && rtx_equal_p (i0dest, SET_DEST (set)))
4668 0 : i0_insn = link->insn, i0_val = SET_SRC (set);
4669 :
4670 4053 : record_value_for_reg (i0dest, i0_insn, i0_val);
4671 :
4672 4053 : if (! added_sets_0
4673 : && ! i0dest_in_i0src
4674 4053 : && REGNO (i0dest) < reg_n_sets_max)
4675 3980 : INC_REG_N_SETS (REGNO (i0dest), -1);
4676 : }
4677 :
4678 : /* Update reg_stat[].nonzero_bits et al for any changes that may have
4679 : been made to this insn. The order is important, because newi2pat
4680 : can affect nonzero_bits of newpat. */
4681 4000667 : if (newi2pat)
4682 97727 : note_pattern_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
4683 4000667 : note_pattern_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
4684 : }
4685 :
4686 4000667 : if (undobuf.other_insn != NULL_RTX)
4687 : {
4688 194604 : if (dump_file)
4689 : {
4690 12 : fprintf (dump_file, "modifying other_insn ");
4691 12 : dump_insn_slim (dump_file, undobuf.other_insn);
4692 : }
4693 194604 : df_insn_rescan (undobuf.other_insn);
4694 : }
4695 :
4696 4000667 : if (i0 && !(NOTE_P (i0) && (NOTE_KIND (i0) == NOTE_INSN_DELETED)))
4697 : {
4698 0 : if (dump_file)
4699 : {
4700 0 : fprintf (dump_file, "modifying insn i0 ");
4701 0 : dump_insn_slim (dump_file, i0);
4702 : }
4703 0 : df_insn_rescan (i0);
4704 : }
4705 :
4706 4000667 : if (i1 && !(NOTE_P (i1) && (NOTE_KIND (i1) == NOTE_INSN_DELETED)))
4707 : {
4708 0 : if (dump_file)
4709 : {
4710 0 : fprintf (dump_file, "modifying insn i1 ");
4711 0 : dump_insn_slim (dump_file, i1);
4712 : }
4713 0 : df_insn_rescan (i1);
4714 : }
4715 :
4716 4000667 : if (i2 && !(NOTE_P (i2) && (NOTE_KIND (i2) == NOTE_INSN_DELETED)))
4717 : {
4718 97727 : if (dump_file)
4719 : {
4720 15 : fprintf (dump_file, "modifying insn i2 ");
4721 15 : dump_insn_slim (dump_file, i2);
4722 : }
4723 97727 : df_insn_rescan (i2);
4724 : }
4725 :
4726 4000667 : if (i3 && !(NOTE_P (i3) && (NOTE_KIND (i3) == NOTE_INSN_DELETED)))
4727 : {
4728 4000667 : if (dump_file)
4729 : {
4730 240 : fprintf (dump_file, "modifying insn i3 ");
4731 240 : dump_insn_slim (dump_file, i3);
4732 : }
4733 4000667 : df_insn_rescan (i3);
4734 : }
4735 :
4736 : /* Set new_direct_jump_p if a new return or simple jump instruction
4737 : has been created. Adjust the CFG accordingly. */
4738 4000667 : if (returnjump_p (i3) || any_uncondjump_p (i3))
4739 : {
4740 164 : *new_direct_jump_p = 1;
4741 164 : mark_jump_label (PATTERN (i3), i3, 0);
4742 164 : update_cfg_for_uncondjump (i3);
4743 : }
4744 :
4745 4000667 : if (undobuf.other_insn != NULL_RTX
4746 4000667 : && (returnjump_p (undobuf.other_insn)
4747 194604 : || any_uncondjump_p (undobuf.other_insn)))
4748 : {
4749 1929 : *new_direct_jump_p = 1;
4750 1929 : update_cfg_for_uncondjump (undobuf.other_insn);
4751 : }
4752 :
4753 4000667 : if (GET_CODE (PATTERN (i3)) == TRAP_IF
4754 4000667 : && XEXP (PATTERN (i3), 0) == const1_rtx)
4755 : {
4756 0 : basic_block bb = BLOCK_FOR_INSN (i3);
4757 0 : gcc_assert (bb);
4758 0 : remove_edge (split_block (bb, i3));
4759 0 : emit_barrier_after_bb (bb);
4760 0 : *new_direct_jump_p = 1;
4761 : }
4762 :
4763 4000667 : if (undobuf.other_insn
4764 194604 : && GET_CODE (PATTERN (undobuf.other_insn)) == TRAP_IF
4765 4000667 : && XEXP (PATTERN (undobuf.other_insn), 0) == const1_rtx)
4766 : {
4767 0 : basic_block bb = BLOCK_FOR_INSN (undobuf.other_insn);
4768 0 : gcc_assert (bb);
4769 0 : remove_edge (split_block (bb, undobuf.other_insn));
4770 0 : emit_barrier_after_bb (bb);
4771 0 : *new_direct_jump_p = 1;
4772 : }
4773 :
4774 : /* A noop might also need cleaning up of CFG, if it comes from the
4775 : simplification of a jump. */
4776 4000667 : if (JUMP_P (i3)
4777 44520 : && GET_CODE (newpat) == SET
4778 33830 : && SET_SRC (newpat) == pc_rtx
4779 403 : && SET_DEST (newpat) == pc_rtx)
4780 : {
4781 403 : *new_direct_jump_p = 1;
4782 403 : update_cfg_for_uncondjump (i3);
4783 : }
4784 :
4785 4000667 : if (undobuf.other_insn != NULL_RTX
4786 194604 : && JUMP_P (undobuf.other_insn)
4787 188767 : && GET_CODE (PATTERN (undobuf.other_insn)) == SET
4788 188767 : && SET_SRC (PATTERN (undobuf.other_insn)) == pc_rtx
4789 4002565 : && SET_DEST (PATTERN (undobuf.other_insn)) == pc_rtx)
4790 : {
4791 1898 : *new_direct_jump_p = 1;
4792 1898 : update_cfg_for_uncondjump (undobuf.other_insn);
4793 : }
4794 :
4795 4000667 : combine_successes++;
4796 4000667 : undo_commit ();
4797 :
4798 4000667 : if (only_i3_changed)
4799 : return i3;
4800 :
4801 3969927 : rtx_insn *ret = newi2pat ? i2 : i3;
4802 3969927 : if (added_links_insn && DF_INSN_LUID (added_links_insn) < DF_INSN_LUID (ret))
4803 : ret = added_links_insn;
4804 3969927 : if (added_notes_insn && DF_INSN_LUID (added_notes_insn) < DF_INSN_LUID (ret))
4805 : ret = added_notes_insn;
4806 :
4807 : return ret;
4808 : }
4809 :
4810 : /* Get a marker for undoing to the current state. */
4811 :
4812 : static void *
4813 36751860 : get_undo_marker (void)
4814 : {
4815 36751860 : return undobuf.undos;
4816 : }
4817 :
4818 : /* Undo the modifications up to the marker. */
4819 :
4820 : static void
4821 43492104 : undo_to_marker (void *marker)
4822 : {
4823 43492104 : struct undo *undo, *next;
4824 :
4825 136579954 : for (undo = undobuf.undos; undo != marker; undo = next)
4826 : {
4827 93087850 : gcc_assert (undo);
4828 :
4829 93087850 : next = undo->next;
4830 93087850 : switch (undo->kind)
4831 : {
4832 86228694 : case UNDO_RTX:
4833 86228694 : *undo->where.r = undo->old_contents.r;
4834 86228694 : break;
4835 6223232 : case UNDO_INT:
4836 6223232 : *undo->where.i = undo->old_contents.i;
4837 6223232 : break;
4838 566495 : case UNDO_MODE:
4839 566495 : adjust_reg_mode (regno_reg_rtx[undo->where.regno],
4840 : undo->old_contents.m);
4841 566495 : break;
4842 69429 : case UNDO_LINKS:
4843 69429 : *undo->where.l = undo->old_contents.l;
4844 69429 : break;
4845 0 : default:
4846 0 : gcc_unreachable ();
4847 : }
4848 :
4849 93087850 : undo->next = undobuf.frees;
4850 93087850 : undobuf.frees = undo;
4851 : }
4852 :
4853 43492104 : undobuf.undos = (struct undo *) marker;
4854 43492104 : }
4855 :
4856 : /* Undo all the modifications recorded in undobuf. */
4857 :
4858 : static void
4859 42384828 : undo_all (void)
4860 : {
4861 42384828 : undo_to_marker (0);
4862 0 : }
4863 :
4864 : /* We've committed to accepting the changes we made. Move all
4865 : of the undos to the free list. */
4866 :
4867 : static void
4868 4000667 : undo_commit (void)
4869 : {
4870 4000667 : struct undo *undo, *next;
4871 :
4872 11821603 : for (undo = undobuf.undos; undo; undo = next)
4873 : {
4874 7820936 : next = undo->next;
4875 7820936 : undo->next = undobuf.frees;
4876 7820936 : undobuf.frees = undo;
4877 : }
4878 4000667 : undobuf.undos = 0;
4879 4000667 : }
4880 :
4881 : /* Find the innermost point within the rtx at LOC, possibly LOC itself,
4882 : where we have an arithmetic expression and return that point. LOC will
4883 : be inside INSN.
4884 :
4885 : try_combine will call this function to see if an insn can be split into
4886 : two insns. */
4887 :
4888 : static rtx *
4889 30490588 : find_split_point (rtx *loc, rtx_insn *insn, bool set_src)
4890 : {
4891 30490588 : rtx x = *loc;
4892 30490588 : enum rtx_code code = GET_CODE (x);
4893 30490588 : rtx *split;
4894 30490588 : unsigned HOST_WIDE_INT len = 0;
4895 30490588 : HOST_WIDE_INT pos = 0;
4896 30490588 : bool unsignedp = false;
4897 30490588 : rtx inner = NULL_RTX;
4898 30490588 : scalar_int_mode mode, inner_mode;
4899 :
4900 : /* First special-case some codes. */
4901 30490588 : switch (code)
4902 : {
4903 973005 : case SUBREG:
4904 : #ifdef INSN_SCHEDULING
4905 : /* If we are making a paradoxical SUBREG invalid, it becomes a split
4906 : point. */
4907 973005 : if (MEM_P (SUBREG_REG (x)))
4908 : return loc;
4909 : #endif
4910 958295 : return find_split_point (&SUBREG_REG (x), insn, false);
4911 :
4912 1494043 : case MEM:
4913 : /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
4914 : using LO_SUM and HIGH. */
4915 1494043 : if (HAVE_lo_sum && (GET_CODE (XEXP (x, 0)) == CONST
4916 : || GET_CODE (XEXP (x, 0)) == SYMBOL_REF))
4917 : {
4918 : machine_mode address_mode = get_address_mode (x);
4919 :
4920 : SUBST (XEXP (x, 0),
4921 : gen_rtx_LO_SUM (address_mode,
4922 : gen_rtx_HIGH (address_mode, XEXP (x, 0)),
4923 : XEXP (x, 0)));
4924 : return &XEXP (XEXP (x, 0), 0);
4925 : }
4926 :
4927 : /* If we have a PLUS whose second operand is a constant and the
4928 : address is not valid, perhaps we can split it up using
4929 : the machine-specific way to split large constants. We use
4930 : the first pseudo-reg (one of the virtual regs) as a placeholder;
4931 : it will not remain in the result. */
4932 1494043 : if (GET_CODE (XEXP (x, 0)) == PLUS
4933 980158 : && CONST_INT_P (XEXP (XEXP (x, 0), 1))
4934 3166271 : && ! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
4935 692070 : MEM_ADDR_SPACE (x)))
4936 : {
4937 129071 : rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
4938 129071 : unsigned int old_nregs, new_nregs;
4939 129071 : rtx_insn *seq = combine_split_insns (gen_rtx_SET (reg, XEXP (x, 0)),
4940 : subst_insn, &old_nregs, &new_nregs);
4941 :
4942 : /* This should have produced two insns, each of which sets our
4943 : placeholder. If the source of the second is a valid address,
4944 : we can put both sources together and make a split point
4945 : in the middle. */
4946 :
4947 129071 : if (seq
4948 54 : && NEXT_INSN (seq) != NULL_RTX
4949 0 : && NEXT_INSN (NEXT_INSN (seq)) == NULL_RTX
4950 0 : && NONJUMP_INSN_P (seq)
4951 0 : && GET_CODE (PATTERN (seq)) == SET
4952 0 : && SET_DEST (PATTERN (seq)) == reg
4953 0 : && ! reg_mentioned_p (reg,
4954 0 : SET_SRC (PATTERN (seq)))
4955 0 : && NONJUMP_INSN_P (NEXT_INSN (seq))
4956 0 : && GET_CODE (PATTERN (NEXT_INSN (seq))) == SET
4957 0 : && SET_DEST (PATTERN (NEXT_INSN (seq))) == reg
4958 129071 : && memory_address_addr_space_p
4959 129071 : (GET_MODE (x), SET_SRC (PATTERN (NEXT_INSN (seq))),
4960 0 : MEM_ADDR_SPACE (x)))
4961 : {
4962 0 : rtx src1 = SET_SRC (PATTERN (seq));
4963 0 : rtx src2 = SET_SRC (PATTERN (NEXT_INSN (seq)));
4964 :
4965 : /* Replace the placeholder in SRC2 with SRC1. If we can
4966 : find where in SRC2 it was placed, that can become our
4967 : split point and we can replace this address with SRC2.
4968 : Just try two obvious places. */
4969 :
4970 0 : src2 = replace_rtx (src2, reg, src1);
4971 0 : split = 0;
4972 0 : if (XEXP (src2, 0) == src1)
4973 0 : split = &XEXP (src2, 0);
4974 0 : else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
4975 0 : && XEXP (XEXP (src2, 0), 0) == src1)
4976 0 : split = &XEXP (XEXP (src2, 0), 0);
4977 :
4978 0 : if (split)
4979 : {
4980 0 : SUBST (XEXP (x, 0), src2);
4981 105136 : return split;
4982 : }
4983 : }
4984 :
4985 : /* If that didn't work and we have a nested plus, like:
4986 : ((REG1 * CONST1) + REG2) + CONST2 and (REG1 + REG2) + CONST2
4987 : is valid address, try to split (REG1 * CONST1). */
4988 129071 : if (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
4989 94336 : && !OBJECT_P (XEXP (XEXP (XEXP (x, 0), 0), 0))
4990 81422 : && OBJECT_P (XEXP (XEXP (XEXP (x, 0), 0), 1))
4991 81418 : && ! (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SUBREG
4992 10 : && OBJECT_P (SUBREG_REG (XEXP (XEXP (XEXP (x, 0),
4993 : 0), 0)))))
4994 : {
4995 81418 : rtx tem = XEXP (XEXP (XEXP (x, 0), 0), 0);
4996 81418 : XEXP (XEXP (XEXP (x, 0), 0), 0) = reg;
4997 162836 : if (memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
4998 81418 : MEM_ADDR_SPACE (x)))
4999 : {
5000 70418 : XEXP (XEXP (XEXP (x, 0), 0), 0) = tem;
5001 70418 : return &XEXP (XEXP (XEXP (x, 0), 0), 0);
5002 : }
5003 11000 : XEXP (XEXP (XEXP (x, 0), 0), 0) = tem;
5004 11000 : }
5005 47653 : else if (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
5006 12918 : && OBJECT_P (XEXP (XEXP (XEXP (x, 0), 0), 0))
5007 12914 : && !OBJECT_P (XEXP (XEXP (XEXP (x, 0), 0), 1))
5008 266 : && ! (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == SUBREG
5009 266 : && OBJECT_P (SUBREG_REG (XEXP (XEXP (XEXP (x, 0),
5010 : 0), 1)))))
5011 : {
5012 0 : rtx tem = XEXP (XEXP (XEXP (x, 0), 0), 1);
5013 0 : XEXP (XEXP (XEXP (x, 0), 0), 1) = reg;
5014 0 : if (memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
5015 0 : MEM_ADDR_SPACE (x)))
5016 : {
5017 0 : XEXP (XEXP (XEXP (x, 0), 0), 1) = tem;
5018 0 : return &XEXP (XEXP (XEXP (x, 0), 0), 1);
5019 : }
5020 0 : XEXP (XEXP (XEXP (x, 0), 0), 1) = tem;
5021 : }
5022 :
5023 : /* If that didn't work, perhaps the first operand is complex and
5024 : needs to be computed separately, so make a split point there.
5025 : This will occur on machines that just support REG + CONST
5026 : and have a constant moved through some previous computation. */
5027 58653 : if (!OBJECT_P (XEXP (XEXP (x, 0), 0))
5028 34718 : && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
5029 0 : && OBJECT_P (SUBREG_REG (XEXP (XEXP (x, 0), 0)))))
5030 34718 : return &XEXP (XEXP (x, 0), 0);
5031 : }
5032 :
5033 : /* If we have a PLUS whose first operand is complex, try computing it
5034 : separately by making a split there. */
5035 1388907 : if (GET_CODE (XEXP (x, 0)) == PLUS
5036 2433430 : && ! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
5037 875022 : MEM_ADDR_SPACE (x))
5038 169501 : && ! OBJECT_P (XEXP (XEXP (x, 0), 0))
5039 1508275 : && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
5040 558 : && OBJECT_P (SUBREG_REG (XEXP (XEXP (x, 0), 0)))))
5041 119364 : return &XEXP (XEXP (x, 0), 0);
5042 : break;
5043 :
5044 4524933 : case SET:
5045 : /* See if we can split SET_SRC as it stands. */
5046 4524933 : split = find_split_point (&SET_SRC (x), insn, true);
5047 4524933 : if (split && split != &SET_SRC (x))
5048 : return split;
5049 :
5050 : /* See if we can split SET_DEST as it stands. */
5051 501618 : split = find_split_point (&SET_DEST (x), insn, false);
5052 501618 : if (split && split != &SET_DEST (x))
5053 : return split;
5054 :
5055 : /* See if this is a bitfield assignment with everything constant. If
5056 : so, this is an IOR of an AND, so split it into that. */
5057 469966 : if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
5058 4313 : && is_a <scalar_int_mode> (GET_MODE (XEXP (SET_DEST (x), 0)),
5059 : &inner_mode)
5060 4313 : && HWI_COMPUTABLE_MODE_P (inner_mode)
5061 4313 : && CONST_INT_P (XEXP (SET_DEST (x), 1))
5062 4313 : && CONST_INT_P (XEXP (SET_DEST (x), 2))
5063 4138 : && CONST_INT_P (SET_SRC (x))
5064 419 : && ((INTVAL (XEXP (SET_DEST (x), 1))
5065 419 : + INTVAL (XEXP (SET_DEST (x), 2)))
5066 419 : <= GET_MODE_PRECISION (inner_mode))
5067 470385 : && ! side_effects_p (XEXP (SET_DEST (x), 0)))
5068 : {
5069 402 : HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2));
5070 402 : unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1));
5071 402 : rtx dest = XEXP (SET_DEST (x), 0);
5072 402 : unsigned HOST_WIDE_INT mask = (HOST_WIDE_INT_1U << len) - 1;
5073 402 : unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x)) & mask;
5074 402 : rtx or_mask;
5075 :
5076 402 : if (BITS_BIG_ENDIAN)
5077 : pos = GET_MODE_PRECISION (inner_mode) - len - pos;
5078 :
5079 402 : or_mask = gen_int_mode (src << pos, inner_mode);
5080 402 : if (src == mask)
5081 0 : SUBST (SET_SRC (x),
5082 : simplify_gen_binary (IOR, inner_mode, dest, or_mask));
5083 : else
5084 : {
5085 402 : rtx negmask = gen_int_mode (~(mask << pos), inner_mode);
5086 402 : SUBST (SET_SRC (x),
5087 : simplify_gen_binary (IOR, inner_mode,
5088 : simplify_gen_binary (AND, inner_mode,
5089 : dest, negmask),
5090 : or_mask));
5091 : }
5092 :
5093 402 : SUBST (SET_DEST (x), dest);
5094 :
5095 402 : split = find_split_point (&SET_SRC (x), insn, true);
5096 402 : if (split && split != &SET_SRC (x))
5097 : return split;
5098 : }
5099 :
5100 : /* Otherwise, see if this is an operation that we can split into two.
5101 : If so, try to split that. */
5102 469564 : code = GET_CODE (SET_SRC (x));
5103 :
5104 469564 : switch (code)
5105 : {
5106 16261 : case AND:
5107 : /* If we are AND'ing with a large constant that is only a single
5108 : bit and the result is only being used in a context where we
5109 : need to know if it is zero or nonzero, replace it with a bit
5110 : extraction. This will avoid the large constant, which might
5111 : have taken more than one insn to make. If the constant were
5112 : not a valid argument to the AND but took only one insn to make,
5113 : this is no worse, but if it took more than one insn, it will
5114 : be better. */
5115 :
5116 16261 : if (CONST_INT_P (XEXP (SET_SRC (x), 1))
5117 10906 : && REG_P (XEXP (SET_SRC (x), 0))
5118 442 : && (pos = exact_log2 (UINTVAL (XEXP (SET_SRC (x), 1)))) >= 7
5119 1 : && REG_P (SET_DEST (x))
5120 0 : && (split = find_single_use (SET_DEST (x), insn, NULL)) != 0
5121 0 : && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
5122 0 : && XEXP (*split, 0) == SET_DEST (x)
5123 16261 : && XEXP (*split, 1) == const0_rtx)
5124 : {
5125 0 : rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
5126 0 : XEXP (SET_SRC (x), 0),
5127 : pos, NULL_RTX, 1,
5128 : true, false, false);
5129 0 : if (extraction != 0)
5130 : {
5131 0 : SUBST (SET_SRC (x), extraction);
5132 0 : return find_split_point (loc, insn, false);
5133 : }
5134 : }
5135 : break;
5136 :
5137 : case NE:
5138 : /* If STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
5139 : is known to be on, this can be converted into a NEG of a shift. */
5140 : if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
5141 : && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
5142 : && ((pos = exact_log2 (nonzero_bits (XEXP (SET_SRC (x), 0),
5143 : GET_MODE (XEXP (SET_SRC (x),
5144 : 0))))) >= 1))
5145 : {
5146 : machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
5147 : rtx pos_rtx = gen_int_shift_amount (mode, pos);
5148 : SUBST (SET_SRC (x),
5149 : gen_rtx_NEG (mode,
5150 : gen_rtx_LSHIFTRT (mode,
5151 : XEXP (SET_SRC (x), 0),
5152 : pos_rtx)));
5153 :
5154 : split = find_split_point (&SET_SRC (x), insn, true);
5155 : if (split && split != &SET_SRC (x))
5156 : return split;
5157 : }
5158 : break;
5159 :
5160 512 : case SIGN_EXTEND:
5161 512 : inner = XEXP (SET_SRC (x), 0);
5162 :
5163 : /* We can't optimize if either mode is a partial integer
5164 : mode as we don't know how many bits are significant
5165 : in those modes. */
5166 512 : if (!is_int_mode (GET_MODE (inner), &inner_mode)
5167 506 : || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
5168 : break;
5169 :
5170 506 : pos = 0;
5171 506 : len = GET_MODE_PRECISION (inner_mode);
5172 506 : unsignedp = false;
5173 506 : break;
5174 :
5175 12617 : case SIGN_EXTRACT:
5176 12617 : case ZERO_EXTRACT:
5177 12617 : if (is_a <scalar_int_mode> (GET_MODE (XEXP (SET_SRC (x), 0)),
5178 : &inner_mode)
5179 12334 : && CONST_INT_P (XEXP (SET_SRC (x), 1))
5180 12334 : && CONST_INT_P (XEXP (SET_SRC (x), 2)))
5181 : {
5182 11818 : inner = XEXP (SET_SRC (x), 0);
5183 11818 : len = INTVAL (XEXP (SET_SRC (x), 1));
5184 11818 : pos = INTVAL (XEXP (SET_SRC (x), 2));
5185 :
5186 11818 : if (BITS_BIG_ENDIAN)
5187 : pos = GET_MODE_PRECISION (inner_mode) - len - pos;
5188 11818 : unsignedp = (code == ZERO_EXTRACT);
5189 : }
5190 : break;
5191 :
5192 : default:
5193 : break;
5194 : }
5195 :
5196 469564 : if (len
5197 12324 : && known_subrange_p (pos, len,
5198 12324 : 0, GET_MODE_PRECISION (GET_MODE (inner)))
5199 481888 : && is_a <scalar_int_mode> (GET_MODE (SET_SRC (x)), &mode))
5200 : {
5201 : /* For unsigned, we have a choice of a shift followed by an
5202 : AND or two shifts. Use two shifts for field sizes where the
5203 : constant might be too large. We assume here that we can
5204 : always at least get 8-bit constants in an AND insn, which is
5205 : true for every current RISC. */
5206 :
5207 12324 : if (unsignedp && len <= 8)
5208 : {
5209 5205 : unsigned HOST_WIDE_INT mask
5210 5205 : = (HOST_WIDE_INT_1U << len) - 1;
5211 5205 : rtx pos_rtx = gen_int_shift_amount (mode, pos);
5212 5205 : SUBST (SET_SRC (x),
5213 : gen_rtx_AND (mode,
5214 : gen_rtx_LSHIFTRT
5215 : (mode, gen_lowpart (mode, inner), pos_rtx),
5216 : gen_int_mode (mask, mode)));
5217 :
5218 5205 : split = find_split_point (&SET_SRC (x), insn, true);
5219 5205 : if (split && split != &SET_SRC (x))
5220 30490588 : return split;
5221 : }
5222 : else
5223 : {
5224 7119 : int left_bits = GET_MODE_PRECISION (mode) - len - pos;
5225 7119 : int right_bits = GET_MODE_PRECISION (mode) - len;
5226 14238 : SUBST (SET_SRC (x),
5227 : gen_rtx_fmt_ee
5228 : (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
5229 : gen_rtx_ASHIFT (mode,
5230 : gen_lowpart (mode, inner),
5231 : gen_int_shift_amount (mode, left_bits)),
5232 : gen_int_shift_amount (mode, right_bits)));
5233 :
5234 7119 : split = find_split_point (&SET_SRC (x), insn, true);
5235 7119 : if (split && split != &SET_SRC (x))
5236 30490588 : return split;
5237 : }
5238 : }
5239 :
5240 : /* See if this is a simple operation with a constant as the second
5241 : operand. It might be that this constant is out of range and hence
5242 : could be used as a split point. */
5243 457240 : if (BINARY_P (SET_SRC (x))
5244 206718 : && CONSTANT_P (XEXP (SET_SRC (x), 1))
5245 113149 : && (OBJECT_P (XEXP (SET_SRC (x), 0))
5246 35298 : || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
5247 11235 : && OBJECT_P (SUBREG_REG (XEXP (SET_SRC (x), 0))))))
5248 79665 : return &XEXP (SET_SRC (x), 1);
5249 :
5250 : /* Finally, see if this is a simple operation with its first operand
5251 : not in a register. The operation might require this operand in a
5252 : register, so return it as a split point. We can always do this
5253 : because if the first operand were another operation, we would have
5254 : already found it as a split point. */
5255 377575 : if ((BINARY_P (SET_SRC (x)) || UNARY_P (SET_SRC (x)))
5256 377575 : && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
5257 121452 : return &XEXP (SET_SRC (x), 0);
5258 :
5259 : return 0;
5260 :
5261 1150126 : case AND:
5262 1150126 : case IOR:
5263 : /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
5264 : it is better to write this as (not (ior A B)) so we can split it.
5265 : Similarly for IOR. */
5266 1150126 : if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
5267 : {
5268 1758 : SUBST (*loc,
5269 : gen_rtx_NOT (GET_MODE (x),
5270 : gen_rtx_fmt_ee (code == IOR ? AND : IOR,
5271 : GET_MODE (x),
5272 : XEXP (XEXP (x, 0), 0),
5273 : XEXP (XEXP (x, 1), 0))));
5274 879 : return find_split_point (loc, insn, set_src);
5275 : }
5276 :
5277 : /* Many RISC machines have a large set of logical insns. If the
5278 : second operand is a NOT, put it first so we will try to split the
5279 : other operand first. */
5280 1149247 : if (GET_CODE (XEXP (x, 1)) == NOT)
5281 : {
5282 5263 : rtx tem = XEXP (x, 0);
5283 5263 : SUBST (XEXP (x, 0), XEXP (x, 1));
5284 5263 : SUBST (XEXP (x, 1), tem);
5285 : }
5286 : /* Many targets have a `(and (not X) Y)` and/or `(ior (not X) Y)` instructions.
5287 : Split at that insns. However if this is
5288 : the SET_SRC, we likely do not have such an instruction and it's
5289 : worthless to try this split. */
5290 1149247 : if (!set_src && GET_CODE (XEXP (x, 0)) == NOT)
5291 : return loc;
5292 : break;
5293 :
5294 3068125 : case PLUS:
5295 3068125 : case MINUS:
5296 : /* Canonicalization can produce (minus A (mult B C)), where C is a
5297 : constant. It may be better to try splitting (plus (mult B -C) A)
5298 : instead if this isn't a multiply by a power of two. */
5299 185872 : if (set_src && code == MINUS && GET_CODE (XEXP (x, 1)) == MULT
5300 21129 : && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
5301 3073640 : && !pow2p_hwi (INTVAL (XEXP (XEXP (x, 1), 1))))
5302 : {
5303 5515 : machine_mode mode = GET_MODE (x);
5304 5515 : unsigned HOST_WIDE_INT this_int = INTVAL (XEXP (XEXP (x, 1), 1));
5305 5515 : HOST_WIDE_INT other_int = trunc_int_for_mode (-this_int, mode);
5306 5515 : SUBST (*loc, gen_rtx_PLUS (mode,
5307 : gen_rtx_MULT (mode,
5308 : XEXP (XEXP (x, 1), 0),
5309 : gen_int_mode (other_int,
5310 : mode)),
5311 : XEXP (x, 0)));
5312 5515 : return find_split_point (loc, insn, set_src);
5313 : }
5314 :
5315 : /* Split at a multiply-accumulate instruction. However if this is
5316 : the SET_SRC, we likely do not have such an instruction and it's
5317 : worthless to try this split. */
5318 3062610 : if (!set_src
5319 1821867 : && (GET_CODE (XEXP (x, 0)) == MULT
5320 1712076 : || (GET_CODE (XEXP (x, 0)) == ASHIFT
5321 111207 : && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
5322 : return loc;
5323 :
5324 : default:
5325 : break;
5326 : }
5327 :
5328 : /* Otherwise, select our actions depending on our rtx class. */
5329 24535429 : switch (GET_RTX_CLASS (code))
5330 : {
5331 1360548 : case RTX_BITFIELD_OPS: /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
5332 1360548 : case RTX_TERNARY:
5333 1360548 : split = find_split_point (&XEXP (x, 2), insn, false);
5334 1360548 : if (split)
5335 : return split;
5336 : /* fall through */
5337 9681667 : case RTX_BIN_ARITH:
5338 9681667 : case RTX_COMM_ARITH:
5339 9681667 : case RTX_COMPARE:
5340 9681667 : case RTX_COMM_COMPARE:
5341 9681667 : split = find_split_point (&XEXP (x, 1), insn, false);
5342 9681667 : if (split)
5343 : return split;
5344 : /* fall through */
5345 9267609 : case RTX_UNARY:
5346 : /* Some machines have (and (shift ...) ...) insns. If X is not
5347 : an AND, but XEXP (X, 0) is, use it as our split point. */
5348 9267609 : if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
5349 348135 : return &XEXP (x, 0);
5350 :
5351 8919474 : split = find_split_point (&XEXP (x, 0), insn, false);
5352 8919474 : if (split)
5353 : return split;
5354 : return loc;
5355 :
5356 : default:
5357 : /* Otherwise, we don't have a split point. */
5358 : return 0;
5359 : }
5360 : }
5361 :
5362 : /* Throughout X, replace FROM with TO, and return the result.
5363 : The result is TO if X is FROM;
5364 : otherwise the result is X, but its contents may have been modified.
5365 : If they were modified, a record was made in undobuf so that
5366 : undo_all will (among other things) return X to its original state.
5367 :
5368 : If the number of changes necessary is too much to record to undo,
5369 : the excess changes are not made, so the result is invalid.
5370 : The changes already made can still be undone.
5371 : undobuf.num_undo is incremented for such changes, so by testing that
5372 : the caller can tell whether the result is valid.
5373 :
5374 : `n_occurrences' is incremented each time FROM is replaced.
5375 :
5376 : IN_DEST is true if we are processing the SET_DEST of a SET.
5377 :
5378 : IN_COND is true if we are at the top level of a condition.
5379 :
5380 : UNIQUE_COPY is true if each substitution must be unique. We do this
5381 : by copying if `n_occurrences' is nonzero. */
5382 :
5383 : static rtx
5384 401724702 : subst (rtx x, rtx from, rtx to, bool in_dest, bool in_cond, bool unique_copy)
5385 : {
5386 401724702 : enum rtx_code code = GET_CODE (x);
5387 401724702 : machine_mode op0_mode = VOIDmode;
5388 401724702 : const char *fmt;
5389 401724702 : int len, i;
5390 401724702 : rtx new_rtx;
5391 :
5392 : /* Two expressions are equal if they are identical copies of a shared
5393 : RTX or if they are both registers with the same register number
5394 : and mode. */
5395 :
5396 : #define COMBINE_RTX_EQUAL_P(X,Y) \
5397 : ((X) == (Y) \
5398 : || (REG_P (X) && REG_P (Y) \
5399 : && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
5400 :
5401 : /* Do not substitute into clobbers of regs -- this will never result in
5402 : valid RTL. */
5403 401724702 : if (GET_CODE (x) == CLOBBER && REG_P (XEXP (x, 0)))
5404 : return x;
5405 :
5406 391770061 : if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
5407 : {
5408 0 : n_occurrences++;
5409 0 : return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
5410 : }
5411 :
5412 : /* If X and FROM are the same register but different modes, they
5413 : will not have been seen as equal above. However, the log links code
5414 : will make a LOG_LINKS entry for that case. If we do nothing, we
5415 : will try to rerecognize our original insn and, when it succeeds,
5416 : we will delete the feeding insn, which is incorrect.
5417 :
5418 : So force this insn not to match in this (rare) case. */
5419 86930547 : if (! in_dest && code == REG && REG_P (from)
5420 423219309 : && reg_overlap_mentioned_p (x, from))
5421 4214 : return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
5422 :
5423 : /* If this is an object, we are done unless it is a MEM or LO_SUM, both
5424 : of which may contain things that can be combined. */
5425 391765847 : if (code != MEM && code != LO_SUM && OBJECT_P (x))
5426 : return x;
5427 :
5428 : /* It is possible to have a subexpression appear twice in the insn.
5429 : Suppose that FROM is a register that appears within TO.
5430 : Then, after that subexpression has been scanned once by `subst',
5431 : the second time it is scanned, TO may be found. If we were
5432 : to scan TO here, we would find FROM within it and create a
5433 : self-referent rtl structure which is completely wrong. */
5434 209732372 : if (COMBINE_RTX_EQUAL_P (x, to))
5435 : return to;
5436 :
5437 : /* Parallel asm_operands need special attention because all of the
5438 : inputs are shared across the arms. Furthermore, unsharing the
5439 : rtl results in recognition failures. Failure to handle this case
5440 : specially can result in circular rtl.
5441 :
5442 : Solve this by doing a normal pass across the first entry of the
5443 : parallel, and only processing the SET_DESTs of the subsequent
5444 : entries. Ug. */
5445 :
5446 209592945 : if (code == PARALLEL
5447 12324573 : && GET_CODE (XVECEXP (x, 0, 0)) == SET
5448 10518175 : && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
5449 : {
5450 20406 : new_rtx = subst (XVECEXP (x, 0, 0), from, to, false, false, unique_copy);
5451 :
5452 : /* If this substitution failed, this whole thing fails. */
5453 20406 : if (GET_CODE (new_rtx) == CLOBBER
5454 0 : && XEXP (new_rtx, 0) == const0_rtx)
5455 : return new_rtx;
5456 :
5457 20406 : SUBST (XVECEXP (x, 0, 0), new_rtx);
5458 :
5459 101105 : for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
5460 : {
5461 80699 : rtx dest = SET_DEST (XVECEXP (x, 0, i));
5462 :
5463 80699 : if (!REG_P (dest) && GET_CODE (dest) != PC)
5464 : {
5465 2537 : new_rtx = subst (dest, from, to, false, false, unique_copy);
5466 :
5467 : /* If this substitution failed, this whole thing fails. */
5468 2537 : if (GET_CODE (new_rtx) == CLOBBER
5469 0 : && XEXP (new_rtx, 0) == const0_rtx)
5470 : return new_rtx;
5471 :
5472 2537 : SUBST (SET_DEST (XVECEXP (x, 0, i)), new_rtx);
5473 : }
5474 : }
5475 : }
5476 : else
5477 : {
5478 209572539 : len = GET_RTX_LENGTH (code);
5479 209572539 : fmt = GET_RTX_FORMAT (code);
5480 :
5481 : /* We don't need to process a SET_DEST that is a register or PC, so
5482 : set up to skip this common case. All other cases where we want
5483 : to suppress replacing something inside a SET_SRC are handled via
5484 : the IN_DEST operand. */
5485 209572539 : if (code == SET
5486 46354732 : && (REG_P (SET_DEST (x))
5487 46354732 : || GET_CODE (SET_DEST (x)) == PC))
5488 209572539 : fmt = "ie";
5489 :
5490 : /* Trying to simplify the operands of a widening MULT is not likely
5491 : to create RTL matching a machine insn. */
5492 209572539 : if (code == MULT
5493 4678294 : && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
5494 4678294 : || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
5495 275110 : && (GET_CODE (XEXP (x, 1)) == ZERO_EXTEND
5496 275110 : || GET_CODE (XEXP (x, 1)) == SIGN_EXTEND)
5497 201999 : && REG_P (XEXP (XEXP (x, 0), 0))
5498 87338 : && REG_P (XEXP (XEXP (x, 1), 0))
5499 72872 : && from == to)
5500 : return x;
5501 :
5502 :
5503 : /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
5504 : constant. */
5505 209531564 : if (fmt[0] == 'e')
5506 154435508 : op0_mode = GET_MODE (XEXP (x, 0));
5507 :
5508 621337928 : for (i = 0; i < len; i++)
5509 : {
5510 412632325 : if (fmt[i] == 'E')
5511 : {
5512 14672495 : int j;
5513 46309634 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5514 : {
5515 31736809 : if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
5516 : {
5517 1539 : new_rtx = (unique_copy && n_occurrences
5518 296443 : ? copy_rtx (to) : to);
5519 296419 : n_occurrences++;
5520 : }
5521 : else
5522 : {
5523 31440390 : new_rtx = subst (XVECEXP (x, i, j), from, to,
5524 : false, false, unique_copy);
5525 :
5526 : /* If this substitution failed, this whole thing
5527 : fails. */
5528 31440390 : if (GET_CODE (new_rtx) == CLOBBER
5529 10355863 : && XEXP (new_rtx, 0) == const0_rtx)
5530 : return new_rtx;
5531 : }
5532 :
5533 31637139 : SUBST (XVECEXP (x, i, j), new_rtx);
5534 : }
5535 : }
5536 397959830 : else if (fmt[i] == 'e')
5537 : {
5538 : /* If this is a register being set, ignore it. */
5539 324927947 : new_rtx = XEXP (x, i);
5540 324927947 : if (in_dest
5541 324927947 : && i == 0
5542 5966446 : && (((code == SUBREG || code == ZERO_EXTRACT)
5543 350187 : && REG_P (new_rtx))
5544 5618753 : || code == STRICT_LOW_PART))
5545 : ;
5546 :
5547 324569654 : else if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
5548 : {
5549 : /* In general, don't install a subreg involving two
5550 : modes not tieable. It can worsen register
5551 : allocation, and can even make invalid reload
5552 : insns, since the reg inside may need to be copied
5553 : from in the outside mode, and that may be invalid
5554 : if it is an fp reg copied in integer mode.
5555 :
5556 : We allow an exception to this: It is valid if
5557 : it is inside another SUBREG and the mode of that
5558 : SUBREG and the mode of the inside of TO is
5559 : tieable. */
5560 :
5561 46615468 : if (GET_CODE (to) == SUBREG
5562 534569 : && !targetm.modes_tieable_p (GET_MODE (to),
5563 534569 : GET_MODE (SUBREG_REG (to)))
5564 46896855 : && ! (code == SUBREG
5565 22108 : && (targetm.modes_tieable_p
5566 22108 : (GET_MODE (x), GET_MODE (SUBREG_REG (to))))))
5567 257092 : return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
5568 :
5569 46358376 : if (code == SUBREG
5570 2405217 : && REG_P (to)
5571 96742 : && REGNO (to) < FIRST_PSEUDO_REGISTER
5572 46358381 : && simplify_subreg_regno (REGNO (to), GET_MODE (to),
5573 5 : SUBREG_BYTE (x),
5574 5 : GET_MODE (x)) < 0)
5575 0 : return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
5576 :
5577 46358376 : new_rtx = (unique_copy && n_occurrences ? copy_rtx (to) : to);
5578 46358376 : n_occurrences++;
5579 : }
5580 : else
5581 : /* If we are in a SET_DEST, suppress most cases unless we
5582 : have gone inside a MEM, in which case we want to
5583 : simplify the address. We assume here that things that
5584 : are actually part of the destination have their inner
5585 : parts in the first expression. This is true for SUBREG,
5586 : STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
5587 : things aside from REG and MEM that should appear in a
5588 : SET_DEST. */
5589 277954186 : new_rtx = subst (XEXP (x, i), from, to,
5590 : (((in_dest
5591 5335264 : && (code == SUBREG || code == STRICT_LOW_PART
5592 5335264 : || code == ZERO_EXTRACT))
5593 277946169 : || code == SET)
5594 47909231 : && i == 0),
5595 277954186 : code == IF_THEN_ELSE && i == 0,
5596 : unique_copy);
5597 :
5598 : /* If we found that we will have to reject this combination,
5599 : indicate that by returning the CLOBBER ourselves, rather than
5600 : an expression containing it. This will speed things up as
5601 : well as prevent accidents where two CLOBBERs are considered
5602 : to be equal, thus producing an incorrect simplification. */
5603 :
5604 324670855 : if (GET_CODE (new_rtx) == CLOBBER && XEXP (new_rtx, 0) == const0_rtx)
5605 : return new_rtx;
5606 :
5607 324201908 : if (GET_CODE (x) == SUBREG && CONST_SCALAR_INT_P (new_rtx))
5608 : {
5609 30509 : machine_mode mode = GET_MODE (x);
5610 :
5611 61018 : x = simplify_subreg (GET_MODE (x), new_rtx,
5612 30509 : GET_MODE (SUBREG_REG (x)),
5613 30509 : SUBREG_BYTE (x));
5614 30509 : if (! x)
5615 2 : x = gen_rtx_CLOBBER (mode, const0_rtx);
5616 : }
5617 324171399 : else if (CONST_SCALAR_INT_P (new_rtx)
5618 : && (GET_CODE (x) == ZERO_EXTEND
5619 58768491 : || GET_CODE (x) == SIGN_EXTEND
5620 : || GET_CODE (x) == FLOAT
5621 : || GET_CODE (x) == UNSIGNED_FLOAT))
5622 : {
5623 135784 : x = simplify_unary_operation (GET_CODE (x), GET_MODE (x),
5624 : new_rtx,
5625 67892 : GET_MODE (XEXP (x, 0)));
5626 67892 : if (!x)
5627 252 : return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
5628 : }
5629 : /* CONST_INTs shouldn't be substituted into PRE_DEC, PRE_MODIFY
5630 : etc. arguments, otherwise we can ICE before trying to recog
5631 : it. See PR104446. */
5632 324103507 : else if (CONST_SCALAR_INT_P (new_rtx)
5633 58700599 : && GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
5634 0 : return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
5635 : else
5636 324103507 : SUBST (XEXP (x, i), new_rtx);
5637 : }
5638 : }
5639 : }
5640 :
5641 : /* Check if we are loading something from the constant pool via float
5642 : extension; in this case we would undo compress_float_constant
5643 : optimization and degenerate constant load to an immediate value. */
5644 208726009 : if (GET_CODE (x) == FLOAT_EXTEND
5645 314985 : && MEM_P (XEXP (x, 0))
5646 208789048 : && MEM_READONLY_P (XEXP (x, 0)))
5647 : {
5648 35777 : rtx tmp = avoid_constant_pool_reference (x);
5649 35777 : if (x != tmp)
5650 : return x;
5651 : }
5652 :
5653 : /* Try to simplify X. If the simplification changed the code, it is likely
5654 : that further simplification will help, so loop, but limit the number
5655 : of repetitions that will be performed. */
5656 :
5657 216472907 : for (i = 0; i < 4; i++)
5658 : {
5659 : /* If X is sufficiently simple, don't bother trying to do anything
5660 : with it. */
5661 216463196 : if (code != CONST_INT && code != REG && code != CLOBBER)
5662 215820029 : x = combine_simplify_rtx (x, op0_mode, in_dest, in_cond);
5663 :
5664 216463196 : if (GET_CODE (x) == code)
5665 : break;
5666 :
5667 7782545 : code = GET_CODE (x);
5668 :
5669 : /* We no longer know the original mode of operand 0 since we
5670 : have changed the form of X) */
5671 7782545 : op0_mode = VOIDmode;
5672 : }
5673 :
5674 : return x;
5675 : }
5676 :
5677 : /* If X is a commutative operation whose operands are not in the canonical
5678 : order, use substitutions to swap them. */
5679 :
5680 : static void
5681 630876675 : maybe_swap_commutative_operands (rtx x)
5682 : {
5683 630876675 : if (COMMUTATIVE_ARITH_P (x)
5684 630876675 : && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
5685 : {
5686 3544398 : rtx temp = XEXP (x, 0);
5687 3544398 : SUBST (XEXP (x, 0), XEXP (x, 1));
5688 3544398 : SUBST (XEXP (x, 1), temp);
5689 : }
5690 :
5691 : /* Canonicalize (vec_merge (fma op2 op1 op3) op1 mask) to
5692 : (vec_merge (fma op1 op2 op3) op1 mask). */
5693 630876675 : if (GET_CODE (x) == VEC_MERGE
5694 813622 : && GET_CODE (XEXP (x, 0)) == FMA)
5695 : {
5696 25224 : rtx fma_op1 = XEXP (XEXP (x, 0), 0);
5697 25224 : rtx fma_op2 = XEXP (XEXP (x, 0), 1);
5698 25224 : rtx masked_op = XEXP (x, 1);
5699 25224 : if (rtx_equal_p (masked_op, fma_op2))
5700 : {
5701 218 : if (GET_CODE (fma_op1) == NEG)
5702 : {
5703 : /* Keep the negate canonicalized to the first operand. */
5704 150 : fma_op1 = XEXP (fma_op1, 0);
5705 150 : SUBST (XEXP (XEXP (XEXP (x, 0), 0), 0), fma_op2);
5706 150 : SUBST (XEXP (XEXP (x, 0), 1), fma_op1);
5707 : }
5708 : else
5709 : {
5710 68 : SUBST (XEXP (XEXP (x, 0), 0), fma_op2);
5711 68 : SUBST (XEXP (XEXP (x, 0), 1), fma_op1);
5712 : }
5713 : }
5714 : }
5715 :
5716 630876675 : unsigned n_elts = 0;
5717 630876675 : if (GET_CODE (x) == VEC_MERGE
5718 813622 : && CONST_INT_P (XEXP (x, 2))
5719 874422 : && GET_MODE_NUNITS (GET_MODE (x)).is_constant (&n_elts)
5720 631313886 : && (swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1))
5721 : /* Two operands have same precedence, then
5722 : first bit of mask select first operand. */
5723 404064 : || (!swap_commutative_operands_p (XEXP (x, 1), XEXP (x, 0))
5724 105375 : && !(UINTVAL (XEXP (x, 2)) & 1))))
5725 : {
5726 49092 : rtx temp = XEXP (x, 0);
5727 49092 : unsigned HOST_WIDE_INT sel = UINTVAL (XEXP (x, 2));
5728 49092 : unsigned HOST_WIDE_INT mask = HOST_WIDE_INT_1U;
5729 49092 : if (n_elts == HOST_BITS_PER_WIDE_INT)
5730 : mask = -1;
5731 : else
5732 48951 : mask = (HOST_WIDE_INT_1U << n_elts) - 1;
5733 49092 : SUBST (XEXP (x, 0), XEXP (x, 1));
5734 49092 : SUBST (XEXP (x, 1), temp);
5735 49092 : SUBST (XEXP (x, 2), GEN_INT (~sel & mask));
5736 : }
5737 630876675 : }
5738 :
5739 : /* Simplify X, a piece of RTL. We just operate on the expression at the
5740 : outer level; call `subst' to simplify recursively. Return the new
5741 : expression.
5742 :
5743 : OP0_MODE is the original mode of XEXP (x, 0). IN_DEST is true
5744 : if we are inside a SET_DEST. IN_COND is true if we are at the top level
5745 : of a condition. */
5746 :
5747 : static rtx
5748 216122648 : combine_simplify_rtx (rtx x, machine_mode op0_mode, bool in_dest, bool in_cond)
5749 : {
5750 216122648 : enum rtx_code code = GET_CODE (x);
5751 216122648 : machine_mode mode = GET_MODE (x);
5752 216122648 : scalar_int_mode int_mode;
5753 216122648 : rtx temp;
5754 216122648 : int i;
5755 :
5756 : /* If this is a commutative operation, put a constant last and a complex
5757 : expression first. We don't need to do this for comparisons here. */
5758 216122648 : maybe_swap_commutative_operands (x);
5759 :
5760 : /* Try to fold this expression in case we have constants that weren't
5761 : present before. */
5762 216122648 : temp = 0;
5763 216122648 : switch (GET_RTX_CLASS (code))
5764 : {
5765 6849461 : case RTX_UNARY:
5766 6849461 : if (op0_mode == VOIDmode)
5767 157153 : op0_mode = GET_MODE (XEXP (x, 0));
5768 6849461 : temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
5769 6849461 : break;
5770 17569742 : case RTX_COMPARE:
5771 17569742 : case RTX_COMM_COMPARE:
5772 17569742 : {
5773 17569742 : machine_mode cmp_mode = GET_MODE (XEXP (x, 0));
5774 17569742 : if (cmp_mode == VOIDmode)
5775 : {
5776 50390 : cmp_mode = GET_MODE (XEXP (x, 1));
5777 50390 : if (cmp_mode == VOIDmode)
5778 7876 : cmp_mode = op0_mode;
5779 : }
5780 17569742 : temp = simplify_relational_operation (code, mode, cmp_mode,
5781 : XEXP (x, 0), XEXP (x, 1));
5782 : }
5783 17569742 : break;
5784 85399658 : case RTX_COMM_ARITH:
5785 85399658 : case RTX_BIN_ARITH:
5786 85399658 : temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
5787 85399658 : break;
5788 14001452 : case RTX_BITFIELD_OPS:
5789 14001452 : case RTX_TERNARY:
5790 14001452 : temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
5791 : XEXP (x, 1), XEXP (x, 2));
5792 14001452 : break;
5793 : default:
5794 : break;
5795 : }
5796 :
5797 123820313 : if (temp)
5798 : {
5799 16547218 : x = temp;
5800 16547218 : code = GET_CODE (temp);
5801 16547218 : op0_mode = VOIDmode;
5802 16547218 : mode = GET_MODE (temp);
5803 : }
5804 :
5805 : /* If this is a simple operation applied to an IF_THEN_ELSE, try
5806 : applying it to the arms of the IF_THEN_ELSE. This often simplifies
5807 : things. Check for cases where both arms are testing the same
5808 : condition.
5809 :
5810 : Don't do anything if all operands are very simple. */
5811 :
5812 216122648 : if ((BINARY_P (x)
5813 102717042 : && ((!OBJECT_P (XEXP (x, 0))
5814 39906936 : && ! (GET_CODE (XEXP (x, 0)) == SUBREG
5815 4592744 : && OBJECT_P (SUBREG_REG (XEXP (x, 0)))))
5816 65421911 : || (!OBJECT_P (XEXP (x, 1))
5817 4432237 : && ! (GET_CODE (XEXP (x, 1)) == SUBREG
5818 1524315 : && OBJECT_P (SUBREG_REG (XEXP (x, 1)))))))
5819 175666700 : || (UNARY_P (x)
5820 6727723 : && (!OBJECT_P (XEXP (x, 0))
5821 2857923 : && ! (GET_CODE (XEXP (x, 0)) == SUBREG
5822 611378 : && OBJECT_P (SUBREG_REG (XEXP (x, 0)))))))
5823 : {
5824 42772797 : rtx cond, true_rtx, false_rtx;
5825 :
5826 42772797 : cond = if_then_else_cond (x, &true_rtx, &false_rtx);
5827 42772797 : if (cond != 0
5828 : /* If everything is a comparison, what we have is highly unlikely
5829 : to be simpler, so don't use it. */
5830 4047746 : && ! (COMPARISON_P (x)
5831 1232859 : && (COMPARISON_P (true_rtx) || COMPARISON_P (false_rtx)))
5832 : /* Similarly, if we end up with one of the expressions the same
5833 : as the original, it is certainly not simpler. */
5834 3877358 : && ! rtx_equal_p (x, true_rtx)
5835 46650155 : && ! rtx_equal_p (x, false_rtx))
5836 : {
5837 3877358 : rtx cop1 = const0_rtx;
5838 3877358 : enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
5839 :
5840 3877358 : if (cond_code == NE && COMPARISON_P (cond))
5841 583865 : return x;
5842 :
5843 : /* Simplify the alternative arms; this may collapse the true and
5844 : false arms to store-flag values. Be careful to use copy_rtx
5845 : here since true_rtx or false_rtx might share RTL with x as a
5846 : result of the if_then_else_cond call above. */
5847 3293493 : true_rtx = subst (copy_rtx (true_rtx), pc_rtx, pc_rtx,
5848 : false, false, false);
5849 3293493 : false_rtx = subst (copy_rtx (false_rtx), pc_rtx, pc_rtx,
5850 : false, false, false);
5851 :
5852 : /* If true_rtx and false_rtx are not general_operands, an if_then_else
5853 : is unlikely to be simpler. */
5854 3293493 : if (general_operand (true_rtx, VOIDmode)
5855 3293493 : && general_operand (false_rtx, VOIDmode))
5856 : {
5857 1328264 : enum rtx_code reversed;
5858 :
5859 : /* Restarting if we generate a store-flag expression will cause
5860 : us to loop. Just drop through in this case. */
5861 :
5862 : /* If the result values are STORE_FLAG_VALUE and zero, we can
5863 : just make the comparison operation. */
5864 1328264 : if (true_rtx == const_true_rtx && false_rtx == const0_rtx)
5865 487936 : x = simplify_gen_relational (cond_code, mode, VOIDmode,
5866 : cond, cop1);
5867 616560 : else if (true_rtx == const0_rtx && false_rtx == const_true_rtx
5868 840328 : && ((reversed = reversed_comparison_code_parts
5869 560865 : (cond_code, cond, cop1, NULL))
5870 : != UNKNOWN))
5871 560865 : x = simplify_gen_relational (reversed, mode, VOIDmode,
5872 : cond, cop1);
5873 :
5874 : /* Likewise, we can make the negate of a comparison operation
5875 : if the result values are - STORE_FLAG_VALUE and zero. */
5876 279463 : else if (CONST_INT_P (true_rtx)
5877 199939 : && INTVAL (true_rtx) == - STORE_FLAG_VALUE
5878 41890 : && false_rtx == const0_rtx)
5879 39742 : x = simplify_gen_unary (NEG, mode,
5880 : simplify_gen_relational (cond_code,
5881 : mode, VOIDmode,
5882 : cond, cop1),
5883 : mode);
5884 239721 : else if (CONST_INT_P (false_rtx)
5885 190548 : && INTVAL (false_rtx) == - STORE_FLAG_VALUE
5886 22790 : && true_rtx == const0_rtx
5887 239721 : && ((reversed = reversed_comparison_code_parts
5888 20299 : (cond_code, cond, cop1, NULL))
5889 : != UNKNOWN))
5890 20296 : x = simplify_gen_unary (NEG, mode,
5891 : simplify_gen_relational (reversed,
5892 : mode, VOIDmode,
5893 : cond, cop1),
5894 : mode);
5895 :
5896 1328264 : code = GET_CODE (x);
5897 1328264 : op0_mode = VOIDmode;
5898 : }
5899 : }
5900 : }
5901 :
5902 : /* First see if we can apply the inverse distributive law. */
5903 215538783 : if (code == PLUS || code == MINUS
5904 215538783 : || code == AND || code == IOR || code == XOR)
5905 : {
5906 48459724 : x = apply_distributive_law (x);
5907 48459724 : code = GET_CODE (x);
5908 48459724 : op0_mode = VOIDmode;
5909 : }
5910 :
5911 : /* If CODE is an associative operation not otherwise handled, see if we
5912 : can associate some operands. This can win if they are constants or
5913 : if they are logically related (i.e. (a & b) & a). */
5914 215538783 : if ((code == PLUS || code == MINUS || code == MULT || code == DIV
5915 : || code == AND || code == IOR || code == XOR
5916 : || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
5917 52400282 : && ((INTEGRAL_MODE_P (mode) && code != DIV)
5918 4660301 : || (flag_associative_math && FLOAT_MODE_P (mode))))
5919 : {
5920 48374532 : if (GET_CODE (XEXP (x, 0)) == code)
5921 : {
5922 4273411 : rtx other = XEXP (XEXP (x, 0), 0);
5923 4273411 : rtx inner_op0 = XEXP (XEXP (x, 0), 1);
5924 4273411 : rtx inner_op1 = XEXP (x, 1);
5925 4273411 : rtx inner;
5926 :
5927 : /* Make sure we pass the constant operand if any as the second
5928 : one if this is a commutative operation. */
5929 4273411 : if (CONSTANT_P (inner_op0) && COMMUTATIVE_ARITH_P (x))
5930 : std::swap (inner_op0, inner_op1);
5931 4273411 : inner = simplify_binary_operation (code == MINUS ? PLUS
5932 4179863 : : code == DIV ? MULT
5933 : : code,
5934 : mode, inner_op0, inner_op1);
5935 :
5936 : /* For commutative operations, try the other pair if that one
5937 : didn't simplify. */
5938 4273411 : if (inner == 0 && COMMUTATIVE_ARITH_P (x))
5939 : {
5940 4147777 : other = XEXP (XEXP (x, 0), 1);
5941 4147777 : inner = simplify_binary_operation (code, mode,
5942 : XEXP (XEXP (x, 0), 0),
5943 : XEXP (x, 1));
5944 : }
5945 :
5946 4238196 : if (inner)
5947 235068 : return simplify_gen_binary (code, mode, other, inner);
5948 : }
5949 : }
5950 :
5951 : /* A little bit of algebraic simplification here. */
5952 215303715 : switch (code)
5953 : {
5954 22368348 : case MEM:
5955 : /* Ensure that our address has any ASHIFTs converted to MULT in case
5956 : address-recognizing predicates are called later. */
5957 22368348 : temp = make_compound_operation (XEXP (x, 0), MEM);
5958 22368348 : SUBST (XEXP (x, 0), temp);
5959 22368348 : break;
5960 :
5961 8013967 : case SUBREG:
5962 8013967 : if (op0_mode == VOIDmode)
5963 141303 : op0_mode = GET_MODE (SUBREG_REG (x));
5964 :
5965 : /* See if this can be moved to simplify_subreg. */
5966 8013967 : if (CONSTANT_P (SUBREG_REG (x))
5967 20249 : && known_eq (subreg_lowpart_offset (mode, op0_mode), SUBREG_BYTE (x))
5968 : /* Don't call gen_lowpart if the inner mode
5969 : is VOIDmode and we cannot simplify it, as SUBREG without
5970 : inner mode is invalid. */
5971 8034216 : && (GET_MODE (SUBREG_REG (x)) != VOIDmode
5972 0 : || gen_lowpart_common (mode, SUBREG_REG (x))))
5973 20249 : return gen_lowpart (mode, SUBREG_REG (x));
5974 :
5975 7993718 : if (GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_CC)
5976 : break;
5977 7993718 : {
5978 7993718 : rtx temp;
5979 15987436 : temp = simplify_subreg (mode, SUBREG_REG (x), op0_mode,
5980 7993718 : SUBREG_BYTE (x));
5981 7993718 : if (temp)
5982 216122648 : return temp;
5983 :
5984 : /* If op is known to have all lower bits zero, the result is zero. */
5985 7401984 : scalar_int_mode int_mode, int_op0_mode;
5986 7401984 : if (!in_dest
5987 3967309 : && is_a <scalar_int_mode> (mode, &int_mode)
5988 3887577 : && is_a <scalar_int_mode> (op0_mode, &int_op0_mode)
5989 3887577 : && (GET_MODE_PRECISION (int_mode)
5990 3887577 : < GET_MODE_PRECISION (int_op0_mode))
5991 3364940 : && known_eq (subreg_lowpart_offset (int_mode, int_op0_mode),
5992 : SUBREG_BYTE (x))
5993 2913426 : && HWI_COMPUTABLE_MODE_P (int_op0_mode)
5994 2722385 : && ((nonzero_bits (SUBREG_REG (x), int_op0_mode)
5995 2722385 : & GET_MODE_MASK (int_mode)) == 0)
5996 7402799 : && !side_effects_p (SUBREG_REG (x)))
5997 815 : return CONST0_RTX (int_mode);
5998 : }
5999 :
6000 : /* Don't change the mode of the MEM if that would change the meaning
6001 : of the address. */
6002 7401169 : if (MEM_P (SUBREG_REG (x))
6003 7401169 : && (MEM_VOLATILE_P (SUBREG_REG (x))
6004 84290 : || mode_dependent_address_p (XEXP (SUBREG_REG (x), 0),
6005 84365 : MEM_ADDR_SPACE (SUBREG_REG (x)))))
6006 44119 : return gen_rtx_CLOBBER (mode, const0_rtx);
6007 :
6008 : /* Note that we cannot do any narrowing for non-constants since
6009 : we might have been counting on using the fact that some bits were
6010 : zero. We now do this in the SET. */
6011 :
6012 : break;
6013 :
6014 368331 : case NEG:
6015 368331 : temp = expand_compound_operation (XEXP (x, 0));
6016 :
6017 : /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
6018 : replaced by (lshiftrt X C). This will convert
6019 : (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
6020 :
6021 368331 : if (GET_CODE (temp) == ASHIFTRT
6022 14274 : && CONST_INT_P (XEXP (temp, 1))
6023 396817 : && INTVAL (XEXP (temp, 1)) == GET_MODE_UNIT_PRECISION (mode) - 1)
6024 0 : return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (temp, 0),
6025 0 : INTVAL (XEXP (temp, 1)));
6026 :
6027 : /* If X has only a single bit that might be nonzero, say, bit I, convert
6028 : (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
6029 : MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
6030 : (sign_extract X 1 Y). But only do this if TEMP isn't a register
6031 : or a SUBREG of one since we'd be making the expression more
6032 : complex if it was just a register. */
6033 :
6034 368331 : if (!REG_P (temp)
6035 174758 : && ! (GET_CODE (temp) == SUBREG
6036 19135 : && REG_P (SUBREG_REG (temp)))
6037 216254941 : && is_a <scalar_int_mode> (mode, &int_mode)
6038 500624 : && (i = exact_log2 (nonzero_bits (temp, int_mode))) >= 0)
6039 : {
6040 60929 : rtx temp1 = simplify_shift_const
6041 60929 : (NULL_RTX, ASHIFTRT, int_mode,
6042 : simplify_shift_const (NULL_RTX, ASHIFT, int_mode, temp,
6043 60929 : GET_MODE_PRECISION (int_mode) - 1 - i),
6044 60929 : GET_MODE_PRECISION (int_mode) - 1 - i);
6045 :
6046 : /* If all we did was surround TEMP with the two shifts, we
6047 : haven't improved anything, so don't use it. Otherwise,
6048 : we are better off with TEMP1. */
6049 60929 : if (GET_CODE (temp1) != ASHIFTRT
6050 60516 : || GET_CODE (XEXP (temp1, 0)) != ASHIFT
6051 60478 : || XEXP (XEXP (temp1, 0), 0) != temp)
6052 : return temp1;
6053 : }
6054 : break;
6055 :
6056 9886 : case TRUNCATE:
6057 : /* We can't handle truncation to a partial integer mode here
6058 : because we don't know the real bitsize of the partial
6059 : integer mode. */
6060 9886 : if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
6061 : break;
6062 :
6063 9886 : if (HWI_COMPUTABLE_MODE_P (mode))
6064 0 : SUBST (XEXP (x, 0),
6065 : force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
6066 : GET_MODE_MASK (mode), false));
6067 :
6068 : /* We can truncate a constant value and return it. */
6069 9886 : {
6070 9886 : poly_int64 c;
6071 9886 : if (poly_int_rtx_p (XEXP (x, 0), &c))
6072 0 : return gen_int_mode (c, mode);
6073 : }
6074 :
6075 : /* Similarly to what we do in simplify-rtx.cc, a truncate of a register
6076 : whose value is a comparison can be replaced with a subreg if
6077 : STORE_FLAG_VALUE permits. */
6078 9886 : if (HWI_COMPUTABLE_MODE_P (mode)
6079 0 : && (STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
6080 0 : && (temp = get_last_value (XEXP (x, 0)))
6081 0 : && COMPARISON_P (temp)
6082 9886 : && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (XEXP (x, 0))))
6083 0 : return gen_lowpart (mode, XEXP (x, 0));
6084 : break;
6085 :
6086 5520 : case CONST:
6087 : /* (const (const X)) can become (const X). Do it this way rather than
6088 : returning the inner CONST since CONST can be shared with a
6089 : REG_EQUAL note. */
6090 5520 : if (GET_CODE (XEXP (x, 0)) == CONST)
6091 0 : SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
6092 : break;
6093 :
6094 : case LO_SUM:
6095 : /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
6096 : can add in an offset. find_split_point will split this address up
6097 : again if it doesn't match. */
6098 : if (HAVE_lo_sum && GET_CODE (XEXP (x, 0)) == HIGH
6099 : && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
6100 : return XEXP (x, 1);
6101 : break;
6102 :
6103 33290066 : case PLUS:
6104 : /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
6105 : when c is (const_int (pow2 + 1) / 2) is a sign extension of a
6106 : bit-field and can be replaced by either a sign_extend or a
6107 : sign_extract. The `and' may be a zero_extend and the two
6108 : <c>, -<c> constants may be reversed. */
6109 33290066 : if (GET_CODE (XEXP (x, 0)) == XOR
6110 33290066 : && is_a <scalar_int_mode> (mode, &int_mode)
6111 13403 : && CONST_INT_P (XEXP (x, 1))
6112 4523 : && CONST_INT_P (XEXP (XEXP (x, 0), 1))
6113 3993 : && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1))
6114 77 : && ((i = exact_log2 (UINTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
6115 2 : || (i = exact_log2 (UINTVAL (XEXP (x, 1)))) >= 0)
6116 39 : && HWI_COMPUTABLE_MODE_P (int_mode)
6117 33290105 : && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
6118 0 : && CONST_INT_P (XEXP (XEXP (XEXP (x, 0), 0), 1))
6119 0 : && (UINTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
6120 0 : == (HOST_WIDE_INT_1U << (i + 1)) - 1))
6121 39 : || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
6122 0 : && known_eq ((GET_MODE_PRECISION
6123 : (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))),
6124 : (unsigned int) i + 1))))
6125 0 : return simplify_shift_const
6126 0 : (NULL_RTX, ASHIFTRT, int_mode,
6127 : simplify_shift_const (NULL_RTX, ASHIFT, int_mode,
6128 : XEXP (XEXP (XEXP (x, 0), 0), 0),
6129 0 : GET_MODE_PRECISION (int_mode) - (i + 1)),
6130 0 : GET_MODE_PRECISION (int_mode) - (i + 1));
6131 :
6132 : /* If only the low-order bit of X is possibly nonzero, (plus x -1)
6133 : can become (ashiftrt (ashift (xor x 1) C) C) where C is
6134 : the bitsize of the mode - 1. This allows simplification of
6135 : "a = (b & 8) == 0;" */
6136 33290066 : if (XEXP (x, 1) == constm1_rtx
6137 711911 : && !REG_P (XEXP (x, 0))
6138 304374 : && ! (GET_CODE (XEXP (x, 0)) == SUBREG
6139 39214 : && REG_P (SUBREG_REG (XEXP (x, 0))))
6140 33547081 : && is_a <scalar_int_mode> (mode, &int_mode)
6141 33556668 : && nonzero_bits (XEXP (x, 0), int_mode) == 1)
6142 9587 : return simplify_shift_const
6143 9587 : (NULL_RTX, ASHIFTRT, int_mode,
6144 : simplify_shift_const (NULL_RTX, ASHIFT, int_mode,
6145 : gen_rtx_XOR (int_mode, XEXP (x, 0),
6146 : const1_rtx),
6147 9587 : GET_MODE_PRECISION (int_mode) - 1),
6148 19174 : GET_MODE_PRECISION (int_mode) - 1);
6149 :
6150 : /* If we are adding two things that have no bits in common, convert
6151 : the addition into an IOR. This will often be further simplified,
6152 : for example in cases like ((a & 1) + (a & 2)), which can
6153 : become a & 3. */
6154 :
6155 33280479 : if (HWI_COMPUTABLE_MODE_P (mode)
6156 29413006 : && (nonzero_bits (XEXP (x, 0), mode)
6157 29413006 : & nonzero_bits (XEXP (x, 1), mode)) == 0)
6158 : {
6159 : /* Try to simplify the expression further. */
6160 302619 : rtx tor = simplify_gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
6161 302619 : temp = combine_simplify_rtx (tor, VOIDmode, in_dest, false);
6162 :
6163 : /* If we could, great. If not, do not go ahead with the IOR
6164 : replacement, since PLUS appears in many special purpose
6165 : address arithmetic instructions. */
6166 302619 : if (GET_CODE (temp) != CLOBBER
6167 302619 : && (GET_CODE (temp) != IOR
6168 298001 : || ((XEXP (temp, 0) != XEXP (x, 0)
6169 296894 : || XEXP (temp, 1) != XEXP (x, 1))
6170 1107 : && (XEXP (temp, 0) != XEXP (x, 1)
6171 0 : || XEXP (temp, 1) != XEXP (x, 0)))))
6172 : return temp;
6173 : }
6174 :
6175 : /* Canonicalize x + x into x << 1. */
6176 33274754 : if (GET_MODE_CLASS (mode) == MODE_INT
6177 29720969 : && rtx_equal_p (XEXP (x, 0), XEXP (x, 1))
6178 33277916 : && !side_effects_p (XEXP (x, 0)))
6179 3157 : return simplify_gen_binary (ASHIFT, mode, XEXP (x, 0), const1_rtx);
6180 :
6181 : break;
6182 :
6183 3562483 : case MINUS:
6184 : /* (minus <foo> (and <foo> (const_int -pow2))) becomes
6185 : (and <foo> (const_int pow2-1)) */
6186 3562483 : if (is_a <scalar_int_mode> (mode, &int_mode)
6187 2983883 : && GET_CODE (XEXP (x, 1)) == AND
6188 105214 : && CONST_INT_P (XEXP (XEXP (x, 1), 1))
6189 102465 : && pow2p_hwi (-UINTVAL (XEXP (XEXP (x, 1), 1)))
6190 47849 : && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
6191 0 : return simplify_and_const_int (NULL_RTX, int_mode, XEXP (x, 0),
6192 0 : -INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
6193 : break;
6194 :
6195 2946628 : case MULT:
6196 : /* If we have (mult (plus A B) C), apply the distributive law and then
6197 : the inverse distributive law to see if things simplify. This
6198 : occurs mostly in addresses, often when unrolling loops. */
6199 :
6200 2946628 : if (GET_CODE (XEXP (x, 0)) == PLUS)
6201 : {
6202 267424 : rtx result = distribute_and_simplify_rtx (x, 0);
6203 267424 : if (result)
6204 : return result;
6205 : }
6206 :
6207 : /* Try simplify a*(b/c) as (a*b)/c. */
6208 2946067 : if (FLOAT_MODE_P (mode) && flag_associative_math
6209 201580 : && GET_CODE (XEXP (x, 0)) == DIV)
6210 : {
6211 243 : rtx tem = simplify_binary_operation (MULT, mode,
6212 : XEXP (XEXP (x, 0), 0),
6213 : XEXP (x, 1));
6214 243 : if (tem)
6215 31 : return simplify_gen_binary (DIV, mode, tem, XEXP (XEXP (x, 0), 1));
6216 : }
6217 : break;
6218 :
6219 122625 : case UDIV:
6220 : /* If this is a divide by a power of two, treat it as a shift if
6221 : its first operand is a shift. */
6222 122625 : if (is_a <scalar_int_mode> (mode, &int_mode)
6223 122625 : && CONST_INT_P (XEXP (x, 1))
6224 2024 : && (i = exact_log2 (UINTVAL (XEXP (x, 1)))) >= 0
6225 0 : && (GET_CODE (XEXP (x, 0)) == ASHIFT
6226 0 : || GET_CODE (XEXP (x, 0)) == LSHIFTRT
6227 0 : || GET_CODE (XEXP (x, 0)) == ASHIFTRT
6228 0 : || GET_CODE (XEXP (x, 0)) == ROTATE
6229 0 : || GET_CODE (XEXP (x, 0)) == ROTATERT))
6230 0 : return simplify_shift_const (NULL_RTX, LSHIFTRT, int_mode,
6231 0 : XEXP (x, 0), i);
6232 : break;
6233 :
6234 17499483 : case EQ: case NE:
6235 17499483 : case GT: case GTU: case GE: case GEU:
6236 17499483 : case LT: case LTU: case LE: case LEU:
6237 17499483 : case UNEQ: case LTGT:
6238 17499483 : case UNGT: case UNGE:
6239 17499483 : case UNLT: case UNLE:
6240 17499483 : case UNORDERED: case ORDERED:
6241 : /* If the first operand is a condition code, we can't do anything
6242 : with it. */
6243 17499483 : if (GET_CODE (XEXP (x, 0)) == COMPARE
6244 17499483 : || GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC)
6245 : {
6246 13217026 : rtx op0 = XEXP (x, 0);
6247 13217026 : rtx op1 = XEXP (x, 1);
6248 13217026 : enum rtx_code new_code;
6249 :
6250 13217026 : if (GET_CODE (op0) == COMPARE)
6251 0 : op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
6252 :
6253 : /* Simplify our comparison, if possible. */
6254 13217026 : new_code = simplify_comparison (code, &op0, &op1);
6255 :
6256 : /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
6257 : if only the low-order bit is possibly nonzero in X (such as when
6258 : X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
6259 : (xor X 1) or (minus 1 X); we use the former. Finally, if X is
6260 : known to be either 0 or -1, NE becomes a NEG and EQ becomes
6261 : (plus X 1).
6262 :
6263 : Remove any ZERO_EXTRACT we made when thinking this was a
6264 : comparison. It may now be simpler to use, e.g., an AND. If a
6265 : ZERO_EXTRACT is indeed appropriate, it will be placed back by
6266 : the call to make_compound_operation in the SET case.
6267 :
6268 : Don't apply these optimizations if the caller would
6269 : prefer a comparison rather than a value.
6270 : E.g., for the condition in an IF_THEN_ELSE most targets need
6271 : an explicit comparison. */
6272 :
6273 13217026 : if (in_cond)
6274 : ;
6275 :
6276 1994719 : else if (STORE_FLAG_VALUE == 1
6277 : && new_code == NE
6278 2401057 : && is_int_mode (mode, &int_mode)
6279 406560 : && op1 == const0_rtx
6280 216703 : && int_mode == GET_MODE (op0)
6281 2081792 : && nonzero_bits (op0, int_mode) == 1)
6282 222 : return gen_lowpart (int_mode,
6283 447206 : expand_compound_operation (op0));
6284 :
6285 1994497 : else if (STORE_FLAG_VALUE == 1
6286 : && new_code == NE
6287 2400028 : && is_int_mode (mode, &int_mode)
6288 406338 : && op1 == const0_rtx
6289 216481 : && int_mode == GET_MODE (op0)
6290 2081348 : && (num_sign_bit_copies (op0, int_mode)
6291 86851 : == GET_MODE_PRECISION (int_mode)))
6292 : {
6293 807 : op0 = expand_compound_operation (op0);
6294 807 : return simplify_gen_unary (NEG, int_mode,
6295 807 : gen_lowpart (int_mode, op0),
6296 807 : int_mode);
6297 : }
6298 :
6299 1993690 : else if (STORE_FLAG_VALUE == 1
6300 : && new_code == EQ
6301 2272504 : && is_int_mode (mode, &int_mode)
6302 280568 : && op1 == const0_rtx
6303 134039 : && int_mode == GET_MODE (op0)
6304 2039495 : && nonzero_bits (op0, int_mode) == 1)
6305 : {
6306 1754 : op0 = expand_compound_operation (op0);
6307 1754 : return simplify_gen_binary (XOR, int_mode,
6308 1754 : gen_lowpart (int_mode, op0),
6309 1754 : const1_rtx);
6310 : }
6311 :
6312 1991936 : else if (STORE_FLAG_VALUE == 1
6313 : && new_code == EQ
6314 13492492 : && is_int_mode (mode, &int_mode)
6315 278814 : && op1 == const0_rtx
6316 132285 : && int_mode == GET_MODE (op0)
6317 2035987 : && (num_sign_bit_copies (op0, int_mode)
6318 44051 : == GET_MODE_PRECISION (int_mode)))
6319 : {
6320 565 : op0 = expand_compound_operation (op0);
6321 565 : return plus_constant (int_mode, gen_lowpart (int_mode, op0), 1);
6322 : }
6323 :
6324 : /* If STORE_FLAG_VALUE is -1, we have cases similar to
6325 : those above. */
6326 13213678 : if (in_cond)
6327 : ;
6328 :
6329 13213678 : else if (STORE_FLAG_VALUE == -1
6330 : && new_code == NE
6331 : && is_int_mode (mode, &int_mode)
6332 : && op1 == const0_rtx
6333 : && int_mode == GET_MODE (op0)
6334 : && (num_sign_bit_copies (op0, int_mode)
6335 : == GET_MODE_PRECISION (int_mode)))
6336 : return gen_lowpart (int_mode, expand_compound_operation (op0));
6337 :
6338 13213678 : else if (STORE_FLAG_VALUE == -1
6339 : && new_code == NE
6340 : && is_int_mode (mode, &int_mode)
6341 : && op1 == const0_rtx
6342 : && int_mode == GET_MODE (op0)
6343 : && nonzero_bits (op0, int_mode) == 1)
6344 : {
6345 : op0 = expand_compound_operation (op0);
6346 : return simplify_gen_unary (NEG, int_mode,
6347 : gen_lowpart (int_mode, op0),
6348 : int_mode);
6349 : }
6350 :
6351 13213678 : else if (STORE_FLAG_VALUE == -1
6352 : && new_code == EQ
6353 : && is_int_mode (mode, &int_mode)
6354 : && op1 == const0_rtx
6355 : && int_mode == GET_MODE (op0)
6356 : && (num_sign_bit_copies (op0, int_mode)
6357 : == GET_MODE_PRECISION (int_mode)))
6358 : {
6359 : op0 = expand_compound_operation (op0);
6360 : return simplify_gen_unary (NOT, int_mode,
6361 : gen_lowpart (int_mode, op0),
6362 : int_mode);
6363 : }
6364 :
6365 : /* If X is 0/1, (eq X 0) is X-1. */
6366 13213678 : else if (STORE_FLAG_VALUE == -1
6367 : && new_code == EQ
6368 : && is_int_mode (mode, &int_mode)
6369 : && op1 == const0_rtx
6370 : && int_mode == GET_MODE (op0)
6371 : && nonzero_bits (op0, int_mode) == 1)
6372 : {
6373 : op0 = expand_compound_operation (op0);
6374 : return plus_constant (int_mode, gen_lowpart (int_mode, op0), -1);
6375 : }
6376 :
6377 : /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
6378 : one bit that might be nonzero, we can convert (ne x 0) to
6379 : (ashift x c) where C puts the bit in the sign bit. Remove any
6380 : AND with STORE_FLAG_VALUE when we are done, since we are only
6381 : going to test the sign bit. */
6382 13213678 : if (new_code == NE
6383 13615284 : && is_int_mode (mode, &int_mode)
6384 405611 : && HWI_COMPUTABLE_MODE_P (int_mode)
6385 401606 : && val_signbit_p (int_mode, STORE_FLAG_VALUE)
6386 0 : && op1 == const0_rtx
6387 0 : && int_mode == GET_MODE (op0)
6388 13213678 : && (i = exact_log2 (nonzero_bits (op0, int_mode))) >= 0)
6389 : {
6390 0 : x = simplify_shift_const (NULL_RTX, ASHIFT, int_mode,
6391 : expand_compound_operation (op0),
6392 0 : GET_MODE_PRECISION (int_mode) - 1 - i);
6393 0 : if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
6394 0 : return XEXP (x, 0);
6395 : else
6396 : return x;
6397 : }
6398 :
6399 : /* If the code changed, return a whole new comparison.
6400 : We also need to avoid using SUBST in cases where
6401 : simplify_comparison has widened a comparison with a CONST_INT,
6402 : since in that case the wider CONST_INT may fail the sanity
6403 : checks in do_SUBST. */
6404 13213678 : if (new_code != code
6405 12778731 : || (CONST_INT_P (op1)
6406 7212932 : && GET_MODE (op0) != GET_MODE (XEXP (x, 0))
6407 10112 : && GET_MODE (op0) != GET_MODE (XEXP (x, 1))))
6408 443858 : return gen_rtx_fmt_ee (new_code, mode, op0, op1);
6409 :
6410 : /* Otherwise, keep this operation, but maybe change its operands.
6411 : This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
6412 12769820 : SUBST (XEXP (x, 0), op0);
6413 12769820 : SUBST (XEXP (x, 1), op1);
6414 : }
6415 : break;
6416 :
6417 12859608 : case IF_THEN_ELSE:
6418 12859608 : return simplify_if_then_else (x);
6419 :
6420 4674302 : case ZERO_EXTRACT:
6421 4674302 : case SIGN_EXTRACT:
6422 4674302 : case ZERO_EXTEND:
6423 4674302 : case SIGN_EXTEND:
6424 : /* If we are processing SET_DEST, we are done. */
6425 4674302 : if (in_dest)
6426 : return x;
6427 :
6428 4671467 : return expand_compound_operation (x);
6429 :
6430 46042429 : case SET:
6431 46042429 : return simplify_set (x);
6432 :
6433 10805438 : case AND:
6434 10805438 : case IOR:
6435 10805438 : return simplify_logical (x);
6436 :
6437 13231616 : case ASHIFT:
6438 13231616 : case LSHIFTRT:
6439 13231616 : case ASHIFTRT:
6440 13231616 : case ROTATE:
6441 13231616 : case ROTATERT:
6442 : /* If this is a shift by a constant amount, simplify it. */
6443 13231616 : if (CONST_INT_P (XEXP (x, 1)))
6444 12748090 : return simplify_shift_const (x, code, mode, XEXP (x, 0),
6445 12748090 : INTVAL (XEXP (x, 1)));
6446 :
6447 : else if (SHIFT_COUNT_TRUNCATED && !REG_P (XEXP (x, 1)))
6448 : SUBST (XEXP (x, 1),
6449 : force_to_mode (XEXP (x, 1), GET_MODE (XEXP (x, 1)),
6450 : (HOST_WIDE_INT_1U
6451 : << exact_log2 (GET_MODE_UNIT_BITSIZE
6452 : (GET_MODE (x)))) - 1, false));
6453 : break;
6454 1824615 : case VEC_SELECT:
6455 1824615 : {
6456 1824615 : rtx trueop0 = XEXP (x, 0);
6457 1824615 : mode = GET_MODE (trueop0);
6458 1824615 : rtx trueop1 = XEXP (x, 1);
6459 : /* If we select a low-part subreg, return that. */
6460 1824615 : if (vec_series_lowpart_p (GET_MODE (x), mode, trueop1))
6461 : {
6462 1091 : rtx new_rtx = lowpart_subreg (GET_MODE (x), trueop0, mode);
6463 1091 : if (new_rtx != NULL_RTX)
6464 : return new_rtx;
6465 : }
6466 : }
6467 :
6468 : default:
6469 : break;
6470 : }
6471 :
6472 : return x;
6473 : }
6474 :
6475 : /* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
6476 :
6477 : static rtx
6478 12859608 : simplify_if_then_else (rtx x)
6479 : {
6480 12859608 : machine_mode mode = GET_MODE (x);
6481 12859608 : rtx cond = XEXP (x, 0);
6482 12859608 : rtx true_rtx = XEXP (x, 1);
6483 12859608 : rtx false_rtx = XEXP (x, 2);
6484 12859608 : enum rtx_code true_code = GET_CODE (cond);
6485 12859608 : bool comparison_p = COMPARISON_P (cond);
6486 12859608 : rtx temp;
6487 12859608 : int i;
6488 12859608 : enum rtx_code false_code;
6489 12859608 : rtx reversed;
6490 12859608 : scalar_int_mode int_mode, inner_mode;
6491 :
6492 : /* Simplify storing of the truth value. */
6493 12859608 : if (comparison_p && true_rtx == const_true_rtx && false_rtx == const0_rtx)
6494 0 : return simplify_gen_relational (true_code, mode, VOIDmode,
6495 0 : XEXP (cond, 0), XEXP (cond, 1));
6496 :
6497 : /* Also when the truth value has to be reversed. */
6498 12859063 : if (comparison_p
6499 12859063 : && true_rtx == const0_rtx && false_rtx == const_true_rtx
6500 0 : && (reversed = reversed_comparison (cond, mode)))
6501 : return reversed;
6502 :
6503 : /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
6504 : in it is being compared against certain values. Get the true and false
6505 : comparisons and see if that says anything about the value of each arm. */
6506 :
6507 12859608 : if (comparison_p
6508 12859063 : && ((false_code = reversed_comparison_code (cond, NULL))
6509 : != UNKNOWN)
6510 25572723 : && REG_P (XEXP (cond, 0)))
6511 : {
6512 7909013 : HOST_WIDE_INT nzb;
6513 7909013 : rtx from = XEXP (cond, 0);
6514 7909013 : rtx true_val = XEXP (cond, 1);
6515 7909013 : rtx false_val = true_val;
6516 7909013 : bool swapped = false;
6517 :
6518 : /* If FALSE_CODE is EQ, swap the codes and arms. */
6519 :
6520 7909013 : if (false_code == EQ)
6521 : {
6522 2856653 : swapped = true, true_code = EQ, false_code = NE;
6523 2856653 : std::swap (true_rtx, false_rtx);
6524 : }
6525 :
6526 7909013 : scalar_int_mode from_mode;
6527 7909013 : if (is_a <scalar_int_mode> (GET_MODE (from), &from_mode))
6528 : {
6529 : /* If we are comparing against zero and the expression being
6530 : tested has only a single bit that might be nonzero, that is
6531 : its value when it is not equal to zero. Similarly if it is
6532 : known to be -1 or 0. */
6533 6658781 : if (true_code == EQ
6534 4828905 : && true_val == const0_rtx
6535 8660205 : && pow2p_hwi (nzb = nonzero_bits (from, from_mode)))
6536 : {
6537 205956 : false_code = EQ;
6538 205956 : false_val = gen_int_mode (nzb, from_mode);
6539 : }
6540 6452825 : else if (true_code == EQ
6541 4622949 : && true_val == const0_rtx
6542 8248293 : && (num_sign_bit_copies (from, from_mode)
6543 1795468 : == GET_MODE_PRECISION (from_mode)))
6544 : {
6545 675 : false_code = EQ;
6546 675 : false_val = constm1_rtx;
6547 : }
6548 : }
6549 :
6550 : /* Now simplify an arm if we know the value of the register in the
6551 : branch and it is used in the arm. Be careful due to the potential
6552 : of locally-shared RTL. */
6553 :
6554 7909013 : if (reg_mentioned_p (from, true_rtx))
6555 300556 : true_rtx = subst (known_cond (copy_rtx (true_rtx), true_code,
6556 : from, true_val),
6557 : pc_rtx, pc_rtx, false, false, false);
6558 7909013 : if (reg_mentioned_p (from, false_rtx))
6559 99919 : false_rtx = subst (known_cond (copy_rtx (false_rtx), false_code,
6560 : from, false_val),
6561 : pc_rtx, pc_rtx, false, false, false);
6562 :
6563 12961373 : SUBST (XEXP (x, 1), swapped ? false_rtx : true_rtx);
6564 12961373 : SUBST (XEXP (x, 2), swapped ? true_rtx : false_rtx);
6565 :
6566 7909013 : true_rtx = XEXP (x, 1);
6567 7909013 : false_rtx = XEXP (x, 2);
6568 7909013 : true_code = GET_CODE (cond);
6569 : }
6570 :
6571 : /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
6572 : reversed, do so to avoid needing two sets of patterns for
6573 : subtract-and-branch insns. Similarly if we have a constant in the true
6574 : arm, the false arm is the same as the first operand of the comparison, or
6575 : the false arm is more complicated than the true arm. */
6576 :
6577 12859608 : if (comparison_p
6578 12859063 : && reversed_comparison_code (cond, NULL) != UNKNOWN
6579 25572723 : && (true_rtx == pc_rtx
6580 12713115 : || (CONSTANT_P (true_rtx)
6581 10668238 : && !CONST_INT_P (false_rtx) && false_rtx != pc_rtx)
6582 12684256 : || true_rtx == const0_rtx
6583 12684024 : || (OBJECT_P (true_rtx) && !OBJECT_P (false_rtx))
6584 12647901 : || (GET_CODE (true_rtx) == SUBREG && OBJECT_P (SUBREG_REG (true_rtx))
6585 12439 : && !OBJECT_P (false_rtx))
6586 12645624 : || reg_mentioned_p (true_rtx, false_rtx)
6587 12645494 : || rtx_equal_p (false_rtx, XEXP (cond, 0))))
6588 : {
6589 95329 : SUBST (XEXP (x, 0), reversed_comparison (cond, GET_MODE (cond)));
6590 95329 : SUBST (XEXP (x, 1), false_rtx);
6591 95329 : SUBST (XEXP (x, 2), true_rtx);
6592 :
6593 95329 : std::swap (true_rtx, false_rtx);
6594 95329 : cond = XEXP (x, 0);
6595 :
6596 : /* It is possible that the conditional has been simplified out. */
6597 95329 : true_code = GET_CODE (cond);
6598 95329 : comparison_p = COMPARISON_P (cond);
6599 : }
6600 :
6601 : /* If the two arms are identical, we don't need the comparison. */
6602 :
6603 12859608 : if (rtx_equal_p (true_rtx, false_rtx) && ! side_effects_p (cond))
6604 : return true_rtx;
6605 :
6606 : /* Convert a == b ? b : a to "a". */
6607 3919501 : if (true_code == EQ && ! side_effects_p (cond)
6608 3893422 : && !HONOR_NANS (mode)
6609 3823200 : && rtx_equal_p (XEXP (cond, 0), false_rtx)
6610 12859971 : && rtx_equal_p (XEXP (cond, 1), true_rtx))
6611 : return false_rtx;
6612 4569552 : else if (true_code == NE && ! side_effects_p (cond)
6613 4528148 : && !HONOR_NANS (mode)
6614 4522593 : && rtx_equal_p (XEXP (cond, 0), true_rtx)
6615 12920045 : && rtx_equal_p (XEXP (cond, 1), false_rtx))
6616 : return true_rtx;
6617 :
6618 : /* Look for cases where we have (abs x) or (neg (abs X)). */
6619 :
6620 12859602 : if (GET_MODE_CLASS (mode) == MODE_INT
6621 1914195 : && comparison_p
6622 1914175 : && XEXP (cond, 1) == const0_rtx
6623 1486580 : && GET_CODE (false_rtx) == NEG
6624 136 : && rtx_equal_p (true_rtx, XEXP (false_rtx, 0))
6625 15 : && rtx_equal_p (true_rtx, XEXP (cond, 0))
6626 12859617 : && ! side_effects_p (true_rtx))
6627 15 : switch (true_code)
6628 : {
6629 15 : case GT:
6630 15 : case GE:
6631 15 : return simplify_gen_unary (ABS, mode, true_rtx, mode);
6632 0 : case LT:
6633 0 : case LE:
6634 0 : return
6635 0 : simplify_gen_unary (NEG, mode,
6636 : simplify_gen_unary (ABS, mode, true_rtx, mode),
6637 0 : mode);
6638 : default:
6639 : break;
6640 : }
6641 :
6642 : /* Look for MIN or MAX. */
6643 :
6644 12859587 : if ((! FLOAT_MODE_P (mode)
6645 84738 : || (flag_unsafe_math_optimizations
6646 370 : && !HONOR_NANS (mode)
6647 370 : && !HONOR_SIGNED_ZEROS (mode)))
6648 12775219 : && comparison_p
6649 12774842 : && rtx_equal_p (XEXP (cond, 0), true_rtx)
6650 103144 : && rtx_equal_p (XEXP (cond, 1), false_rtx)
6651 13149 : && ! side_effects_p (cond))
6652 13145 : switch (true_code)
6653 : {
6654 5018 : case GE:
6655 5018 : case GT:
6656 5018 : return simplify_gen_binary (SMAX, mode, true_rtx, false_rtx);
6657 4536 : case LE:
6658 4536 : case LT:
6659 4536 : return simplify_gen_binary (SMIN, mode, true_rtx, false_rtx);
6660 2697 : case GEU:
6661 2697 : case GTU:
6662 2697 : return simplify_gen_binary (UMAX, mode, true_rtx, false_rtx);
6663 894 : case LEU:
6664 894 : case LTU:
6665 894 : return simplify_gen_binary (UMIN, mode, true_rtx, false_rtx);
6666 : default:
6667 : break;
6668 : }
6669 :
6670 : /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
6671 : second operand is zero, this can be done as (OP Z (mult COND C2)) where
6672 : C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
6673 : SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
6674 : We can do this kind of thing in some cases when STORE_FLAG_VALUE is
6675 : neither 1 or -1, but it isn't worth checking for. */
6676 :
6677 12846442 : if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6678 : && comparison_p
6679 14666375 : && is_int_mode (mode, &int_mode)
6680 14747479 : && ! side_effects_p (x))
6681 : {
6682 1897063 : rtx t = make_compound_operation (true_rtx, SET);
6683 1897063 : rtx f = make_compound_operation (false_rtx, SET);
6684 1897063 : rtx cond_op0 = XEXP (cond, 0);
6685 1897063 : rtx cond_op1 = XEXP (cond, 1);
6686 1897063 : enum rtx_code op = UNKNOWN, extend_op = UNKNOWN;
6687 1897063 : scalar_int_mode m = int_mode;
6688 1897063 : rtx z = 0, c1 = NULL_RTX;
6689 :
6690 1897063 : if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
6691 : || GET_CODE (t) == IOR || GET_CODE (t) == XOR
6692 : || GET_CODE (t) == ASHIFT
6693 : || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
6694 194266 : && rtx_equal_p (XEXP (t, 0), f))
6695 72638 : c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
6696 :
6697 : /* If an identity-zero op is commutative, check whether there
6698 : would be a match if we swapped the operands. */
6699 1760976 : else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
6700 1750164 : || GET_CODE (t) == XOR)
6701 1835900 : && rtx_equal_p (XEXP (t, 1), f))
6702 8466 : c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
6703 1815959 : else if (GET_CODE (t) == SIGN_EXTEND
6704 1848 : && is_a <scalar_int_mode> (GET_MODE (XEXP (t, 0)), &inner_mode)
6705 1848 : && (GET_CODE (XEXP (t, 0)) == PLUS
6706 1848 : || GET_CODE (XEXP (t, 0)) == MINUS
6707 : || GET_CODE (XEXP (t, 0)) == IOR
6708 : || GET_CODE (XEXP (t, 0)) == XOR
6709 : || GET_CODE (XEXP (t, 0)) == ASHIFT
6710 : || GET_CODE (XEXP (t, 0)) == LSHIFTRT
6711 : || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
6712 80 : && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
6713 54 : && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
6714 54 : && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
6715 1815959 : && (num_sign_bit_copies (f, GET_MODE (f))
6716 0 : > (unsigned int)
6717 0 : (GET_MODE_PRECISION (int_mode)
6718 0 : - GET_MODE_PRECISION (inner_mode))))
6719 : {
6720 0 : c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
6721 0 : extend_op = SIGN_EXTEND;
6722 0 : m = inner_mode;
6723 : }
6724 1815959 : else if (GET_CODE (t) == SIGN_EXTEND
6725 1848 : && is_a <scalar_int_mode> (GET_MODE (XEXP (t, 0)), &inner_mode)
6726 1848 : && (GET_CODE (XEXP (t, 0)) == PLUS
6727 1774 : || GET_CODE (XEXP (t, 0)) == IOR
6728 1770 : || GET_CODE (XEXP (t, 0)) == XOR)
6729 78 : && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
6730 4 : && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
6731 4 : && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
6732 1815963 : && (num_sign_bit_copies (f, GET_MODE (f))
6733 4 : > (unsigned int)
6734 4 : (GET_MODE_PRECISION (int_mode)
6735 4 : - GET_MODE_PRECISION (inner_mode))))
6736 : {
6737 0 : c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
6738 0 : extend_op = SIGN_EXTEND;
6739 0 : m = inner_mode;
6740 : }
6741 1815959 : else if (GET_CODE (t) == ZERO_EXTEND
6742 4775 : && is_a <scalar_int_mode> (GET_MODE (XEXP (t, 0)), &inner_mode)
6743 4775 : && (GET_CODE (XEXP (t, 0)) == PLUS
6744 4775 : || GET_CODE (XEXP (t, 0)) == MINUS
6745 : || GET_CODE (XEXP (t, 0)) == IOR
6746 : || GET_CODE (XEXP (t, 0)) == XOR
6747 : || GET_CODE (XEXP (t, 0)) == ASHIFT
6748 : || GET_CODE (XEXP (t, 0)) == LSHIFTRT
6749 : || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
6750 1357 : && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
6751 103 : && HWI_COMPUTABLE_MODE_P (int_mode)
6752 103 : && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
6753 103 : && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
6754 1815959 : && ((nonzero_bits (f, GET_MODE (f))
6755 0 : & ~GET_MODE_MASK (inner_mode))
6756 : == 0))
6757 : {
6758 0 : c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
6759 0 : extend_op = ZERO_EXTEND;
6760 0 : m = inner_mode;
6761 : }
6762 1815959 : else if (GET_CODE (t) == ZERO_EXTEND
6763 4775 : && is_a <scalar_int_mode> (GET_MODE (XEXP (t, 0)), &inner_mode)
6764 4775 : && (GET_CODE (XEXP (t, 0)) == PLUS
6765 3990 : || GET_CODE (XEXP (t, 0)) == IOR
6766 3990 : || GET_CODE (XEXP (t, 0)) == XOR)
6767 785 : && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
6768 16 : && HWI_COMPUTABLE_MODE_P (int_mode)
6769 16 : && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
6770 16 : && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
6771 1815959 : && ((nonzero_bits (f, GET_MODE (f))
6772 0 : & ~GET_MODE_MASK (inner_mode))
6773 : == 0))
6774 : {
6775 0 : c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
6776 0 : extend_op = ZERO_EXTEND;
6777 0 : m = inner_mode;
6778 : }
6779 :
6780 81104 : if (z)
6781 : {
6782 81104 : machine_mode cm = m;
6783 81104 : if ((op == ASHIFT || op == LSHIFTRT || op == ASHIFTRT)
6784 2085 : && GET_MODE (c1) != VOIDmode)
6785 1605 : cm = GET_MODE (c1);
6786 81104 : temp = subst (simplify_gen_relational (true_code, cm, VOIDmode,
6787 : cond_op0, cond_op1),
6788 : pc_rtx, pc_rtx, false, false, false);
6789 81104 : temp = simplify_gen_binary (MULT, cm, temp,
6790 : simplify_gen_binary (MULT, cm, c1,
6791 : const_true_rtx));
6792 81104 : temp = subst (temp, pc_rtx, pc_rtx, false, false, false);
6793 81104 : temp = simplify_gen_binary (op, m, gen_lowpart (m, z), temp);
6794 :
6795 81104 : if (extend_op != UNKNOWN)
6796 0 : temp = simplify_gen_unary (extend_op, int_mode, temp, m);
6797 :
6798 81104 : return temp;
6799 : }
6800 : }
6801 :
6802 : /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
6803 : 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
6804 : negation of a single bit, we can convert this operation to a shift. We
6805 : can actually do this more generally, but it doesn't seem worth it. */
6806 :
6807 12765338 : if (true_code == NE
6808 12765337 : && is_a <scalar_int_mode> (mode, &int_mode)
6809 389105 : && XEXP (cond, 1) == const0_rtx
6810 283279 : && false_rtx == const0_rtx
6811 44816 : && CONST_INT_P (true_rtx)
6812 12765748 : && ((nonzero_bits (XEXP (cond, 0), int_mode) == 1
6813 2 : && (i = exact_log2 (UINTVAL (true_rtx))) >= 0)
6814 409 : || ((num_sign_bit_copies (XEXP (cond, 0), int_mode)
6815 409 : == GET_MODE_PRECISION (int_mode))
6816 0 : && (i = exact_log2 (-UINTVAL (true_rtx))) >= 0)))
6817 1 : return
6818 1 : simplify_shift_const (NULL_RTX, ASHIFT, int_mode,
6819 2 : gen_lowpart (int_mode, XEXP (cond, 0)), i);
6820 :
6821 : /* (IF_THEN_ELSE (NE A 0) C1 0) is A or a zero-extend of A if the only
6822 : non-zero bit in A is C1. */
6823 4551620 : if (true_code == NE && XEXP (cond, 1) == const0_rtx
6824 2036832 : && false_rtx == const0_rtx && CONST_INT_P (true_rtx)
6825 12860017 : && is_a <scalar_int_mode> (mode, &int_mode)
6826 409 : && is_a <scalar_int_mode> (GET_MODE (XEXP (cond, 0)), &inner_mode)
6827 41 : && (UINTVAL (true_rtx) & GET_MODE_MASK (int_mode))
6828 41 : == nonzero_bits (XEXP (cond, 0), inner_mode)
6829 12765337 : && (i = exact_log2 (UINTVAL (true_rtx) & GET_MODE_MASK (int_mode))) >= 0)
6830 : {
6831 0 : rtx val = XEXP (cond, 0);
6832 0 : if (inner_mode == int_mode)
6833 : return val;
6834 0 : else if (GET_MODE_PRECISION (inner_mode) < GET_MODE_PRECISION (int_mode))
6835 0 : return simplify_gen_unary (ZERO_EXTEND, int_mode, val, inner_mode);
6836 : }
6837 :
6838 : return x;
6839 : }
6840 :
6841 : /* Simplify X, a SET expression. Return the new expression. */
6842 :
6843 : static rtx
6844 46042429 : simplify_set (rtx x)
6845 : {
6846 46042429 : rtx src = SET_SRC (x);
6847 46042429 : rtx dest = SET_DEST (x);
6848 103230491 : machine_mode mode
6849 46042429 : = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
6850 46042429 : rtx_insn *other_insn;
6851 46042429 : rtx *cc_use;
6852 46042429 : scalar_int_mode int_mode;
6853 :
6854 : /* (set (pc) (return)) gets written as (return). */
6855 46042429 : if (GET_CODE (dest) == PC && ANY_RETURN_P (src))
6856 : return src;
6857 :
6858 : /* Now that we know for sure which bits of SRC we are using, see if we can
6859 : simplify the expression for the object knowing that we only need the
6860 : low-order bits. */
6861 :
6862 46042429 : if (GET_MODE_CLASS (mode) == MODE_INT && HWI_COMPUTABLE_MODE_P (mode))
6863 : {
6864 20164849 : src = force_to_mode (src, mode, HOST_WIDE_INT_M1U, false);
6865 20164849 : SUBST (SET_SRC (x), src);
6866 : }
6867 :
6868 : /* If the source is a COMPARE, look for the use of the comparison result
6869 : and try to simplify it unless we already have used undobuf.other_insn. */
6870 39528852 : if ((GET_MODE_CLASS (mode) == MODE_CC || GET_CODE (src) == COMPARE)
6871 6513577 : && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
6872 5931804 : && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
6873 5931804 : && COMPARISON_P (*cc_use)
6874 51973749 : && rtx_equal_p (XEXP (*cc_use, 0), dest))
6875 : {
6876 5929784 : enum rtx_code old_code = GET_CODE (*cc_use);
6877 5929784 : enum rtx_code new_code;
6878 5929784 : rtx op0, op1, tmp;
6879 5929784 : bool other_changed = false;
6880 5929784 : rtx inner_compare = NULL_RTX;
6881 5929784 : machine_mode compare_mode = GET_MODE (dest);
6882 :
6883 5929784 : if (GET_CODE (src) == COMPARE)
6884 : {
6885 5502740 : op0 = XEXP (src, 0), op1 = XEXP (src, 1);
6886 5502740 : if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
6887 : {
6888 0 : inner_compare = op0;
6889 0 : op0 = XEXP (inner_compare, 0), op1 = XEXP (inner_compare, 1);
6890 : }
6891 : }
6892 : else
6893 427044 : op0 = src, op1 = CONST0_RTX (GET_MODE (src));
6894 :
6895 5929784 : tmp = simplify_relational_operation (old_code, compare_mode, VOIDmode,
6896 : op0, op1);
6897 5929784 : if (!tmp)
6898 : new_code = old_code;
6899 451064 : else if (!CONSTANT_P (tmp))
6900 : {
6901 446170 : new_code = GET_CODE (tmp);
6902 446170 : op0 = XEXP (tmp, 0);
6903 446170 : op1 = XEXP (tmp, 1);
6904 : }
6905 : else
6906 : {
6907 4894 : rtx pat = PATTERN (other_insn);
6908 4894 : undobuf.other_insn = other_insn;
6909 4894 : SUBST (*cc_use, tmp);
6910 :
6911 : /* Attempt to simplify CC user. */
6912 4894 : if (GET_CODE (pat) == SET)
6913 : {
6914 4396 : rtx new_rtx = simplify_rtx (SET_SRC (pat));
6915 4396 : if (new_rtx != NULL_RTX)
6916 3940 : SUBST (SET_SRC (pat), new_rtx);
6917 : }
6918 :
6919 : /* Convert X into a no-op move. */
6920 4894 : SUBST (SET_DEST (x), pc_rtx);
6921 4894 : SUBST (SET_SRC (x), pc_rtx);
6922 4894 : return x;
6923 : }
6924 :
6925 : /* Simplify our comparison, if possible. */
6926 5924890 : new_code = simplify_comparison (new_code, &op0, &op1);
6927 :
6928 : #ifdef SELECT_CC_MODE
6929 : /* If this machine has CC modes other than CCmode, check to see if we
6930 : need to use a different CC mode here. */
6931 5924890 : if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
6932 630893 : compare_mode = GET_MODE (op0);
6933 5293997 : else if (inner_compare
6934 0 : && GET_MODE_CLASS (GET_MODE (inner_compare)) == MODE_CC
6935 0 : && new_code == old_code
6936 0 : && op0 == XEXP (inner_compare, 0)
6937 0 : && op1 == XEXP (inner_compare, 1))
6938 0 : compare_mode = GET_MODE (inner_compare);
6939 : else
6940 5293997 : compare_mode = SELECT_CC_MODE (new_code, op0, op1);
6941 :
6942 : /* If the mode changed, we have to change SET_DEST, the mode in the
6943 : compare, and the mode in the place SET_DEST is used. If SET_DEST is
6944 : a hard register, just build new versions with the proper mode. If it
6945 : is a pseudo, we lose unless it is only time we set the pseudo, in
6946 : which case we can safely change its mode. */
6947 5924890 : if (compare_mode != GET_MODE (dest))
6948 : {
6949 207264 : if (can_change_dest_mode (dest, 0, compare_mode))
6950 : {
6951 207264 : unsigned int regno = REGNO (dest);
6952 207264 : rtx new_dest;
6953 :
6954 207264 : if (regno < FIRST_PSEUDO_REGISTER)
6955 207264 : new_dest = gen_rtx_REG (compare_mode, regno);
6956 : else
6957 : {
6958 0 : subst_mode (regno, compare_mode);
6959 0 : new_dest = regno_reg_rtx[regno];
6960 : }
6961 :
6962 207264 : SUBST (SET_DEST (x), new_dest);
6963 207264 : SUBST (XEXP (*cc_use, 0), new_dest);
6964 207264 : other_changed = true;
6965 :
6966 207264 : dest = new_dest;
6967 : }
6968 : }
6969 : #endif /* SELECT_CC_MODE */
6970 :
6971 : /* If the code changed, we have to build a new comparison in
6972 : undobuf.other_insn. */
6973 5924890 : if (new_code != old_code)
6974 : {
6975 583797 : bool other_changed_previously = other_changed;
6976 583797 : unsigned HOST_WIDE_INT mask;
6977 583797 : rtx old_cc_use = *cc_use;
6978 :
6979 583797 : SUBST (*cc_use, gen_rtx_fmt_ee (new_code, GET_MODE (*cc_use),
6980 : dest, const0_rtx));
6981 583797 : other_changed = true;
6982 :
6983 : /* If the only change we made was to change an EQ into an NE or
6984 : vice versa, OP0 has only one bit that might be nonzero, and OP1
6985 : is zero, check if changing the user of the condition code will
6986 : produce a valid insn. If it won't, we can keep the original code
6987 : in that insn by surrounding our operation with an XOR. */
6988 :
6989 583797 : if (((old_code == NE && new_code == EQ)
6990 554811 : || (old_code == EQ && new_code == NE))
6991 62931 : && ! other_changed_previously && op1 == const0_rtx
6992 60281 : && HWI_COMPUTABLE_MODE_P (GET_MODE (op0))
6993 591913 : && pow2p_hwi (mask = nonzero_bits (op0, GET_MODE (op0))))
6994 : {
6995 8105 : rtx pat = PATTERN (other_insn), note = 0;
6996 :
6997 8105 : if ((recog_for_combine (&pat, other_insn, ¬e) < 0
6998 8105 : && ! check_asm_operands (pat)))
6999 : {
7000 4 : *cc_use = old_cc_use;
7001 4 : other_changed = false;
7002 :
7003 4 : op0 = simplify_gen_binary (XOR, GET_MODE (op0), op0,
7004 4 : gen_int_mode (mask,
7005 4 : GET_MODE (op0)));
7006 : }
7007 : }
7008 : }
7009 :
7010 5349198 : if (other_changed)
7011 603066 : undobuf.other_insn = other_insn;
7012 :
7013 : /* Don't generate a compare of a CC with 0, just use that CC. */
7014 5924890 : if (GET_MODE (op0) == compare_mode && op1 == const0_rtx)
7015 : {
7016 630893 : SUBST (SET_SRC (x), op0);
7017 630893 : src = SET_SRC (x);
7018 : }
7019 : /* Otherwise, if we didn't previously have the same COMPARE we
7020 : want, create it from scratch. */
7021 5293997 : else if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode
7022 5175196 : || XEXP (src, 0) != op0 || XEXP (src, 1) != op1)
7023 : {
7024 1384247 : SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
7025 1384247 : src = SET_SRC (x);
7026 : }
7027 : }
7028 : else
7029 : {
7030 : /* Get SET_SRC in a form where we have placed back any
7031 : compound expressions. Then do the checks below. */
7032 40112645 : src = make_compound_operation (src, SET);
7033 40112645 : SUBST (SET_SRC (x), src);
7034 : }
7035 :
7036 : /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
7037 : and X being a REG or (subreg (reg)), we may be able to convert this to
7038 : (set (subreg:m2 x) (op)).
7039 :
7040 : We can always do this if M1 is narrower than M2 because that means that
7041 : we only care about the low bits of the result.
7042 :
7043 : However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
7044 : perform a narrower operation than requested since the high-order bits will
7045 : be undefined. On machine where it is defined, this transformation is safe
7046 : as long as M1 and M2 have the same number of words. */
7047 :
7048 403008 : if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
7049 387164 : && !OBJECT_P (SUBREG_REG (src))
7050 : && (known_equal_after_align_up
7051 245874 : (GET_MODE_SIZE (GET_MODE (src)),
7052 491748 : GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))),
7053 245874 : UNITS_PER_WORD))
7054 217030 : && (WORD_REGISTER_OPERATIONS || !paradoxical_subreg_p (src))
7055 212696 : && ! (REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER
7056 227 : && !REG_CAN_CHANGE_MODE_P (REGNO (dest),
7057 : GET_MODE (SUBREG_REG (src)),
7058 : GET_MODE (src)))
7059 46250004 : && (REG_P (dest)
7060 101231 : || (GET_CODE (dest) == SUBREG
7061 275 : && REG_P (SUBREG_REG (dest)))))
7062 : {
7063 111513 : SUBST (SET_DEST (x),
7064 : gen_lowpart (GET_MODE (SUBREG_REG (src)),
7065 : dest));
7066 111513 : SUBST (SET_SRC (x), SUBREG_REG (src));
7067 :
7068 111513 : src = SET_SRC (x), dest = SET_DEST (x);
7069 : }
7070 :
7071 : /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
7072 : would require a paradoxical subreg. Replace the subreg with a
7073 : zero_extend to avoid the reload that would otherwise be required.
7074 : Don't do this unless we have a scalar integer mode, otherwise the
7075 : transformation is incorrect. */
7076 :
7077 46037535 : enum rtx_code extend_op;
7078 46037535 : if (paradoxical_subreg_p (src)
7079 : && MEM_P (SUBREG_REG (src))
7080 : && SCALAR_INT_MODE_P (GET_MODE (src))
7081 : && (extend_op = load_extend_op (GET_MODE (SUBREG_REG (src)))) != UNKNOWN)
7082 : {
7083 : SUBST (SET_SRC (x),
7084 : gen_rtx_fmt_e (extend_op, GET_MODE (src), SUBREG_REG (src)));
7085 :
7086 : src = SET_SRC (x);
7087 : }
7088 :
7089 : /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
7090 : are comparing an item known to be 0 or -1 against 0, use a logical
7091 : operation instead. Check for one of the arms being an IOR of the other
7092 : arm with some value. We compute three terms to be IOR'ed together. In
7093 : practice, at most two will be nonzero. Then we do the IOR's. */
7094 :
7095 46037535 : if (GET_CODE (dest) != PC
7096 35295331 : && GET_CODE (src) == IF_THEN_ELSE
7097 1116516 : && is_int_mode (GET_MODE (src), &int_mode)
7098 1021782 : && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
7099 413998 : && XEXP (XEXP (src, 0), 1) == const0_rtx
7100 297276 : && int_mode == GET_MODE (XEXP (XEXP (src, 0), 0))
7101 96795 : && (!HAVE_conditional_move
7102 96795 : || ! can_conditionally_move_p (int_mode))
7103 0 : && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0), int_mode)
7104 0 : == GET_MODE_PRECISION (int_mode))
7105 46037535 : && ! side_effects_p (src))
7106 : {
7107 0 : rtx true_rtx = (GET_CODE (XEXP (src, 0)) == NE
7108 0 : ? XEXP (src, 1) : XEXP (src, 2));
7109 0 : rtx false_rtx = (GET_CODE (XEXP (src, 0)) == NE
7110 0 : ? XEXP (src, 2) : XEXP (src, 1));
7111 0 : rtx term1 = const0_rtx, term2, term3;
7112 :
7113 0 : if (GET_CODE (true_rtx) == IOR
7114 0 : && rtx_equal_p (XEXP (true_rtx, 0), false_rtx))
7115 0 : term1 = false_rtx, true_rtx = XEXP (true_rtx, 1), false_rtx = const0_rtx;
7116 0 : else if (GET_CODE (true_rtx) == IOR
7117 0 : && rtx_equal_p (XEXP (true_rtx, 1), false_rtx))
7118 0 : term1 = false_rtx, true_rtx = XEXP (true_rtx, 0), false_rtx = const0_rtx;
7119 0 : else if (GET_CODE (false_rtx) == IOR
7120 0 : && rtx_equal_p (XEXP (false_rtx, 0), true_rtx))
7121 0 : term1 = true_rtx, false_rtx = XEXP (false_rtx, 1), true_rtx = const0_rtx;
7122 0 : else if (GET_CODE (false_rtx) == IOR
7123 0 : && rtx_equal_p (XEXP (false_rtx, 1), true_rtx))
7124 0 : term1 = true_rtx, false_rtx = XEXP (false_rtx, 0), true_rtx = const0_rtx;
7125 :
7126 0 : term2 = simplify_gen_binary (AND, int_mode,
7127 0 : XEXP (XEXP (src, 0), 0), true_rtx);
7128 0 : term3 = simplify_gen_binary (AND, int_mode,
7129 : simplify_gen_unary (NOT, int_mode,
7130 0 : XEXP (XEXP (src, 0), 0),
7131 : int_mode),
7132 : false_rtx);
7133 :
7134 0 : SUBST (SET_SRC (x),
7135 : simplify_gen_binary (IOR, int_mode,
7136 : simplify_gen_binary (IOR, int_mode,
7137 : term1, term2),
7138 : term3));
7139 :
7140 0 : src = SET_SRC (x);
7141 : }
7142 :
7143 : /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
7144 : whole thing fail. */
7145 46037535 : if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
7146 : return src;
7147 46037515 : else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
7148 : return dest;
7149 : else
7150 : /* Convert this into a field assignment operation, if possible. */
7151 46037515 : return make_field_assignment (x);
7152 : }
7153 :
7154 : /* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
7155 : result. */
7156 :
7157 : static rtx
7158 10805438 : simplify_logical (rtx x)
7159 : {
7160 10805438 : rtx op0 = XEXP (x, 0);
7161 10805438 : rtx op1 = XEXP (x, 1);
7162 10805438 : scalar_int_mode mode;
7163 :
7164 10805438 : switch (GET_CODE (x))
7165 : {
7166 6701627 : case AND:
7167 : /* We can call simplify_and_const_int only if we don't lose
7168 : any (sign) bits when converting INTVAL (op1) to
7169 : "unsigned HOST_WIDE_INT". */
7170 6701627 : if (is_a <scalar_int_mode> (GET_MODE (x), &mode)
7171 6187691 : && CONST_INT_P (op1)
7172 4851899 : && (HWI_COMPUTABLE_MODE_P (mode)
7173 6629 : || INTVAL (op1) > 0))
7174 : {
7175 4848633 : x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
7176 4848633 : if (GET_CODE (x) != AND)
7177 : return x;
7178 :
7179 4820977 : op0 = XEXP (x, 0);
7180 4820977 : op1 = XEXP (x, 1);
7181 : }
7182 :
7183 : /* If we have any of (and (ior A B) C) or (and (xor A B) C),
7184 : apply the distributive law and then the inverse distributive
7185 : law to see if things simplify. */
7186 6673971 : if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
7187 : {
7188 122446 : rtx result = distribute_and_simplify_rtx (x, 0);
7189 122446 : if (result)
7190 : return result;
7191 : }
7192 6660720 : if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
7193 : {
7194 1809 : rtx result = distribute_and_simplify_rtx (x, 1);
7195 1809 : if (result)
7196 : return result;
7197 : }
7198 : break;
7199 :
7200 4103811 : case IOR:
7201 : /* If we have (ior (and A B) C), apply the distributive law and then
7202 : the inverse distributive law to see if things simplify. */
7203 :
7204 4103811 : if (GET_CODE (op0) == AND)
7205 : {
7206 1163508 : rtx result = distribute_and_simplify_rtx (x, 0);
7207 1163508 : if (result)
7208 : return result;
7209 : }
7210 :
7211 4101124 : if (GET_CODE (op1) == AND)
7212 : {
7213 60882 : rtx result = distribute_and_simplify_rtx (x, 1);
7214 60882 : if (result)
7215 : return result;
7216 : }
7217 : break;
7218 :
7219 0 : default:
7220 0 : gcc_unreachable ();
7221 : }
7222 :
7223 : return x;
7224 : }
7225 :
7226 : /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
7227 : operations" because they can be replaced with two more basic operations.
7228 : ZERO_EXTEND is also considered "compound" because it can be replaced with
7229 : an AND operation, which is simpler, though only one operation.
7230 :
7231 : The function expand_compound_operation is called with an rtx expression
7232 : and will convert it to the appropriate shifts and AND operations,
7233 : simplifying at each stage.
7234 :
7235 : The function make_compound_operation is called to convert an expression
7236 : consisting of shifts and ANDs into the equivalent compound expression.
7237 : It is the inverse of this function, loosely speaking. */
7238 :
7239 : static rtx
7240 15363814 : expand_compound_operation (rtx x)
7241 : {
7242 15363814 : unsigned HOST_WIDE_INT pos = 0, len;
7243 15363814 : bool unsignedp = false;
7244 15363814 : unsigned int modewidth;
7245 15363814 : rtx tem;
7246 15363814 : scalar_int_mode inner_mode;
7247 :
7248 15363814 : switch (GET_CODE (x))
7249 : {
7250 4464697 : case ZERO_EXTEND:
7251 4464697 : unsignedp = true;
7252 : /* FALLTHRU */
7253 5792092 : case SIGN_EXTEND:
7254 : /* We can't necessarily use a const_int for a multiword mode;
7255 : it depends on implicitly extending the value.
7256 : Since we don't know the right way to extend it,
7257 : we can't tell whether the implicit way is right.
7258 :
7259 : Even for a mode that is no wider than a const_int,
7260 : we can't win, because we need to sign extend one of its bits through
7261 : the rest of it, and we don't know which bit. */
7262 5792092 : if (CONST_INT_P (XEXP (x, 0)))
7263 : return x;
7264 :
7265 : /* Reject modes that aren't scalar integers because turning vector
7266 : or complex modes into shifts causes problems. */
7267 5792092 : if (!is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &inner_mode))
7268 : return x;
7269 :
7270 : /* Return if (subreg:MODE FROM 0) is not a safe replacement for
7271 : (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
7272 : because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
7273 : reloaded. If not for that, MEM's would very rarely be safe.
7274 :
7275 : Reject modes bigger than a word, because we might not be able
7276 : to reference a two-register group starting with an arbitrary register
7277 : (and currently gen_lowpart might crash for a SUBREG). */
7278 :
7279 11700009 : if (GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD)
7280 : return x;
7281 :
7282 5445972 : len = GET_MODE_PRECISION (inner_mode);
7283 : /* If the inner object has VOIDmode (the only way this can happen
7284 : is if it is an ASM_OPERANDS), we can't do anything since we don't
7285 : know how much masking to do. */
7286 5445972 : if (len == 0)
7287 : return x;
7288 :
7289 : break;
7290 :
7291 928126 : case ZERO_EXTRACT:
7292 928126 : unsignedp = true;
7293 :
7294 : /* fall through */
7295 :
7296 953821 : case SIGN_EXTRACT:
7297 : /* If the operand is a CLOBBER, just return it. */
7298 953821 : if (GET_CODE (XEXP (x, 0)) == CLOBBER)
7299 : return XEXP (x, 0);
7300 :
7301 953821 : if (!CONST_INT_P (XEXP (x, 1))
7302 953686 : || !CONST_INT_P (XEXP (x, 2)))
7303 : return x;
7304 :
7305 : /* Reject modes that aren't scalar integers because turning vector
7306 : or complex modes into shifts causes problems. */
7307 12840543 : if (!is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &inner_mode))
7308 : return x;
7309 :
7310 878352 : len = INTVAL (XEXP (x, 1));
7311 878352 : pos = INTVAL (XEXP (x, 2));
7312 :
7313 : /* This should stay within the object being extracted, fail otherwise. */
7314 878352 : if (len + pos > GET_MODE_PRECISION (inner_mode))
7315 : return x;
7316 :
7317 : if (BITS_BIG_ENDIAN)
7318 : pos = GET_MODE_PRECISION (inner_mode) - len - pos;
7319 :
7320 : break;
7321 :
7322 : default:
7323 : return x;
7324 : }
7325 :
7326 : /* We've rejected non-scalar operations by now. */
7327 6324275 : scalar_int_mode mode = as_a <scalar_int_mode> (GET_MODE (x));
7328 :
7329 : /* Convert sign extension to zero extension, if we know that the high
7330 : bit is not set, as this is easier to optimize. It will be converted
7331 : back to cheaper alternative in make_extraction. */
7332 6324275 : if (GET_CODE (x) == SIGN_EXTEND
7333 1173568 : && HWI_COMPUTABLE_MODE_P (mode)
7334 7382815 : && ((nonzero_bits (XEXP (x, 0), inner_mode)
7335 1058540 : & ~(((unsigned HOST_WIDE_INT) GET_MODE_MASK (inner_mode)) >> 1))
7336 : == 0))
7337 : {
7338 572 : rtx temp = gen_rtx_ZERO_EXTEND (mode, XEXP (x, 0));
7339 572 : rtx temp2 = expand_compound_operation (temp);
7340 :
7341 : /* Make sure this is a profitable operation. */
7342 572 : if (set_src_cost (x, mode, optimize_this_for_speed_p)
7343 572 : > set_src_cost (temp2, mode, optimize_this_for_speed_p))
7344 : return temp2;
7345 558 : else if (set_src_cost (x, mode, optimize_this_for_speed_p)
7346 558 : > set_src_cost (temp, mode, optimize_this_for_speed_p))
7347 : return temp;
7348 : else
7349 : return x;
7350 : }
7351 :
7352 : /* We can optimize some special cases of ZERO_EXTEND. */
7353 6323703 : if (GET_CODE (x) == ZERO_EXTEND)
7354 : {
7355 : /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
7356 : know that the last value didn't have any inappropriate bits
7357 : set. */
7358 4272404 : if (GET_CODE (XEXP (x, 0)) == TRUNCATE
7359 191 : && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode
7360 191 : && HWI_COMPUTABLE_MODE_P (mode)
7361 4272595 : && (nonzero_bits (XEXP (XEXP (x, 0), 0), mode)
7362 191 : & ~GET_MODE_MASK (inner_mode)) == 0)
7363 36 : return XEXP (XEXP (x, 0), 0);
7364 :
7365 : /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
7366 4272368 : if (GET_CODE (XEXP (x, 0)) == SUBREG
7367 630351 : && GET_MODE (SUBREG_REG (XEXP (x, 0))) == mode
7368 588218 : && subreg_lowpart_p (XEXP (x, 0))
7369 273268 : && HWI_COMPUTABLE_MODE_P (mode)
7370 4523798 : && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), mode)
7371 251430 : & ~GET_MODE_MASK (inner_mode)) == 0)
7372 88 : return SUBREG_REG (XEXP (x, 0));
7373 :
7374 : /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
7375 : is a comparison and STORE_FLAG_VALUE permits. This is like
7376 : the first case, but it works even when MODE is larger
7377 : than HOST_WIDE_INT. */
7378 4272280 : if (GET_CODE (XEXP (x, 0)) == TRUNCATE
7379 155 : && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode
7380 155 : && COMPARISON_P (XEXP (XEXP (x, 0), 0))
7381 0 : && GET_MODE_PRECISION (inner_mode) <= HOST_BITS_PER_WIDE_INT
7382 4272280 : && (STORE_FLAG_VALUE & ~GET_MODE_MASK (inner_mode)) == 0)
7383 : return XEXP (XEXP (x, 0), 0);
7384 :
7385 : /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
7386 4272280 : if (GET_CODE (XEXP (x, 0)) == SUBREG
7387 630263 : && GET_MODE (SUBREG_REG (XEXP (x, 0))) == mode
7388 588130 : && subreg_lowpart_p (XEXP (x, 0))
7389 273180 : && COMPARISON_P (SUBREG_REG (XEXP (x, 0)))
7390 0 : && GET_MODE_PRECISION (inner_mode) <= HOST_BITS_PER_WIDE_INT
7391 4272280 : && (STORE_FLAG_VALUE & ~GET_MODE_MASK (inner_mode)) == 0)
7392 : return SUBREG_REG (XEXP (x, 0));
7393 :
7394 : }
7395 :
7396 : /* If we reach here, we want to return a pair of shifts. The inner
7397 : shift is a left shift of BITSIZE - POS - LEN bits. The outer
7398 : shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
7399 : logical depending on the value of UNSIGNEDP.
7400 :
7401 : If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
7402 : converted into an AND of a shift.
7403 :
7404 : We must check for the case where the left shift would have a negative
7405 : count. This can happen in a case like (x >> 31) & 255 on machines
7406 : that can't shift by a constant. On those machines, we would first
7407 : combine the shift with the AND to produce a variable-position
7408 : extraction. Then the constant of 31 would be substituted in
7409 : to produce such a position. */
7410 :
7411 6323579 : modewidth = GET_MODE_PRECISION (mode);
7412 6323579 : if (modewidth >= pos + len)
7413 : {
7414 6323578 : tem = gen_lowpart (mode, XEXP (x, 0));
7415 6323578 : if (!tem || GET_CODE (tem) == CLOBBER)
7416 : return x;
7417 6801960 : tem = simplify_shift_const (NULL_RTX, ASHIFT, mode,
7418 3400980 : tem, modewidth - pos - len);
7419 3400980 : tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
7420 3400980 : mode, tem, modewidth - len);
7421 : }
7422 1 : else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
7423 : {
7424 0 : tem = simplify_shift_const (NULL_RTX, LSHIFTRT, inner_mode,
7425 : XEXP (x, 0), pos);
7426 0 : tem = gen_lowpart (mode, tem);
7427 0 : if (!tem || GET_CODE (tem) == CLOBBER)
7428 : return x;
7429 0 : tem = simplify_and_const_int (NULL_RTX, mode, tem,
7430 0 : (HOST_WIDE_INT_1U << len) - 1);
7431 : }
7432 : else
7433 : /* Any other cases we can't handle. */
7434 : return x;
7435 :
7436 : /* If we couldn't do this for some reason, return the original
7437 : expression. */
7438 3400980 : if (GET_CODE (tem) == CLOBBER)
7439 : return x;
7440 :
7441 : return tem;
7442 : }
7443 :
7444 : /* X is a SET which contains an assignment of one object into
7445 : a part of another (such as a bit-field assignment, STRICT_LOW_PART,
7446 : or certain SUBREGS). If possible, convert it into a series of
7447 : logical operations.
7448 :
7449 : We half-heartedly support variable positions, but do not at all
7450 : support variable lengths. */
7451 :
7452 : static const_rtx
7453 82618691 : expand_field_assignment (const_rtx x)
7454 : {
7455 82618691 : rtx inner;
7456 82618691 : rtx pos; /* Always counts from low bit. */
7457 82618691 : int len, inner_len;
7458 82618691 : rtx mask, cleared, masked;
7459 82618691 : scalar_int_mode compute_mode;
7460 :
7461 : /* Loop until we find something we can't simplify. */
7462 82882162 : while (1)
7463 : {
7464 82882162 : if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
7465 15653 : && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
7466 : {
7467 15653 : rtx x0 = XEXP (SET_DEST (x), 0);
7468 15653 : if (!GET_MODE_PRECISION (GET_MODE (x0)).is_constant (&len))
7469 : break;
7470 15653 : inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
7471 15653 : pos = gen_int_mode (subreg_lsb (XEXP (SET_DEST (x), 0)),
7472 : MAX_MODE_INT);
7473 15653 : }
7474 82866509 : else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
7475 4387 : && CONST_INT_P (XEXP (SET_DEST (x), 1)))
7476 : {
7477 4387 : inner = XEXP (SET_DEST (x), 0);
7478 4387 : if (!GET_MODE_PRECISION (GET_MODE (inner)).is_constant (&inner_len))
7479 : break;
7480 :
7481 4387 : len = INTVAL (XEXP (SET_DEST (x), 1));
7482 4387 : pos = XEXP (SET_DEST (x), 2);
7483 :
7484 : /* A constant position should stay within the width of INNER. */
7485 4387 : if (CONST_INT_P (pos) && INTVAL (pos) + len > inner_len)
7486 : break;
7487 :
7488 : if (BITS_BIG_ENDIAN)
7489 : {
7490 : if (CONST_INT_P (pos))
7491 : pos = GEN_INT (inner_len - len - INTVAL (pos));
7492 : else if (GET_CODE (pos) == MINUS
7493 : && CONST_INT_P (XEXP (pos, 1))
7494 : && INTVAL (XEXP (pos, 1)) == inner_len - len)
7495 : /* If position is ADJUST - X, new position is X. */
7496 : pos = XEXP (pos, 0);
7497 : else
7498 : pos = simplify_gen_binary (MINUS, GET_MODE (pos),
7499 : gen_int_mode (inner_len - len,
7500 : GET_MODE (pos)),
7501 : pos);
7502 : }
7503 : }
7504 :
7505 : /* If the destination is a subreg that overwrites the whole of the inner
7506 : register, we can move the subreg to the source. */
7507 83111741 : else if (GET_CODE (SET_DEST (x)) == SUBREG
7508 : /* We need SUBREGs to compute nonzero_bits properly. */
7509 858042 : && nonzero_sign_valid
7510 83634671 : && !read_modify_subreg_p (SET_DEST (x)))
7511 : {
7512 249619 : x = gen_rtx_SET (SUBREG_REG (SET_DEST (x)),
7513 : gen_lowpart
7514 : (GET_MODE (SUBREG_REG (SET_DEST (x))),
7515 : SET_SRC (x)));
7516 249619 : continue;
7517 : }
7518 : else
7519 : break;
7520 :
7521 22017 : while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
7522 1977 : inner = SUBREG_REG (inner);
7523 :
7524 : /* Don't attempt bitwise arithmetic on non scalar integer modes. */
7525 20040 : if (!is_a <scalar_int_mode> (GET_MODE (inner), &compute_mode))
7526 : {
7527 : /* Don't do anything for vector or complex integral types. */
7528 4275 : if (! FLOAT_MODE_P (GET_MODE (inner)))
7529 : break;
7530 :
7531 : /* Try to find an integral mode to pun with. */
7532 38 : if (!int_mode_for_size (GET_MODE_BITSIZE (GET_MODE (inner)), 0)
7533 0 : .exists (&compute_mode))
7534 : break;
7535 :
7536 19 : inner = gen_lowpart (compute_mode, inner);
7537 : }
7538 :
7539 : /* Compute a mask of LEN bits, if we can do this on the host machine. */
7540 15784 : if (len >= HOST_BITS_PER_WIDE_INT)
7541 : break;
7542 :
7543 : /* Don't try to compute in too wide unsupported modes. */
7544 15784 : if (!targetm.scalar_mode_supported_p (compute_mode))
7545 : break;
7546 :
7547 : /* gen_lowpart_for_combine returns CLOBBER on failure. */
7548 15784 : rtx lowpart = gen_lowpart (compute_mode, SET_SRC (x));
7549 15784 : if (GET_CODE (lowpart) == CLOBBER)
7550 : break;
7551 :
7552 : /* Now compute the equivalent expression. Make a copy of INNER
7553 : for the SET_DEST in case it is a MEM into which we will substitute;
7554 : we don't want shared RTL in that case. */
7555 13852 : mask = gen_int_mode ((HOST_WIDE_INT_1U << len) - 1,
7556 : compute_mode);
7557 13852 : cleared = simplify_gen_binary (AND, compute_mode,
7558 : simplify_gen_unary (NOT, compute_mode,
7559 : simplify_gen_binary (ASHIFT,
7560 : compute_mode,
7561 : mask, pos),
7562 : compute_mode),
7563 : inner);
7564 13852 : masked = simplify_gen_binary (ASHIFT, compute_mode,
7565 : simplify_gen_binary (
7566 : AND, compute_mode, lowpart, mask),
7567 : pos);
7568 :
7569 13852 : x = gen_rtx_SET (copy_rtx (inner),
7570 : simplify_gen_binary (IOR, compute_mode,
7571 : cleared, masked));
7572 : }
7573 :
7574 82618691 : return x;
7575 : }
7576 :
7577 : /* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
7578 : it is an RTX that represents the (variable) starting position; otherwise,
7579 : POS is the (constant) starting bit position. Both are counted from the LSB.
7580 :
7581 : UNSIGNEDP is true for an unsigned reference and zero for a signed one.
7582 :
7583 : IN_DEST is true if this is a reference in the destination of a SET.
7584 : This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If nonzero,
7585 : a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
7586 : be used.
7587 :
7588 : IN_COMPARE is true if we are in a COMPARE. This means that a
7589 : ZERO_EXTRACT should be built even for bits starting at bit 0.
7590 :
7591 : MODE is the desired mode of the result (if IN_DEST == 0).
7592 :
7593 : The result is an RTX for the extraction or NULL_RTX if the target
7594 : can't handle it. */
7595 :
7596 : static rtx
7597 5030204 : make_extraction (machine_mode mode, rtx inner, HOST_WIDE_INT pos,
7598 : rtx pos_rtx, unsigned HOST_WIDE_INT len, bool unsignedp,
7599 : bool in_dest, bool in_compare)
7600 : {
7601 : /* This mode describes the size of the storage area
7602 : to fetch the overall value from. Within that, we
7603 : ignore the POS lowest bits, etc. */
7604 5030204 : machine_mode is_mode = GET_MODE (inner);
7605 5030204 : machine_mode inner_mode;
7606 5030204 : scalar_int_mode wanted_inner_mode;
7607 5030204 : scalar_int_mode wanted_inner_reg_mode = word_mode;
7608 5030204 : scalar_int_mode pos_mode = word_mode;
7609 5030204 : machine_mode extraction_mode = word_mode;
7610 5030204 : rtx new_rtx = 0;
7611 5030204 : rtx orig_pos_rtx = pos_rtx;
7612 5030204 : HOST_WIDE_INT orig_pos;
7613 :
7614 5030204 : if (pos_rtx && CONST_INT_P (pos_rtx))
7615 900826 : pos = INTVAL (pos_rtx), pos_rtx = 0;
7616 :
7617 5030204 : if (GET_CODE (inner) == SUBREG
7618 2588738 : && subreg_lowpart_p (inner)
7619 7615044 : && (paradoxical_subreg_p (inner)
7620 : /* If trying or potentially trying to extract
7621 : bits outside of is_mode, don't look through
7622 : non-paradoxical SUBREGs. See PR82192. */
7623 148198 : || (pos_rtx == NULL_RTX
7624 148143 : && known_le (pos + len, GET_MODE_PRECISION (is_mode)))))
7625 : {
7626 : /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
7627 : consider just the QI as the memory to extract from.
7628 : The subreg adds or removes high bits; its mode is
7629 : irrelevant to the meaning of this extraction,
7630 : since POS and LEN count from the lsb. */
7631 2584785 : if (MEM_P (SUBREG_REG (inner)))
7632 525013 : is_mode = GET_MODE (SUBREG_REG (inner));
7633 : inner = SUBREG_REG (inner);
7634 : }
7635 2445419 : else if (GET_CODE (inner) == ASHIFT
7636 130274 : && CONST_INT_P (XEXP (inner, 1))
7637 129065 : && pos_rtx == 0 && pos == 0
7638 129033 : && len > UINTVAL (XEXP (inner, 1)))
7639 : {
7640 : /* We're extracting the least significant bits of an rtx
7641 : (ashift X (const_int C)), where LEN > C. Extract the
7642 : least significant (LEN - C) bits of X, giving an rtx
7643 : whose mode is MODE, then shift it left C times. */
7644 129033 : new_rtx = make_extraction (mode, XEXP (inner, 0),
7645 : 0, 0, len - INTVAL (XEXP (inner, 1)),
7646 : unsignedp, in_dest, in_compare);
7647 129033 : if (new_rtx != 0)
7648 127387 : return gen_rtx_ASHIFT (mode, new_rtx, XEXP (inner, 1));
7649 : }
7650 2316386 : else if (GET_CODE (inner) == MULT
7651 169313 : && CONST_INT_P (XEXP (inner, 1))
7652 129842 : && pos_rtx == 0 && pos == 0)
7653 : {
7654 : /* We're extracting the least significant bits of an rtx
7655 : (mult X (const_int 2^C)), where LEN > C. Extract the
7656 : least significant (LEN - C) bits of X, giving an rtx
7657 : whose mode is MODE, then multiply it by 2^C. */
7658 108822 : const HOST_WIDE_INT shift_amt = exact_log2 (INTVAL (XEXP (inner, 1)));
7659 108822 : if (len > 1 && IN_RANGE (shift_amt, 1, len - 1))
7660 : {
7661 104428 : new_rtx = make_extraction (mode, XEXP (inner, 0),
7662 : 0, 0, len - shift_amt,
7663 : unsignedp, in_dest, in_compare);
7664 104428 : if (new_rtx)
7665 104428 : return gen_rtx_MULT (mode, new_rtx, XEXP (inner, 1));
7666 : }
7667 : }
7668 2207564 : else if (GET_CODE (inner) == TRUNCATE
7669 : /* If trying or potentially trying to extract
7670 : bits outside of is_mode, don't look through
7671 : TRUNCATE. See PR82192. */
7672 0 : && pos_rtx == NULL_RTX
7673 2207564 : && known_le (pos + len, GET_MODE_PRECISION (is_mode)))
7674 0 : inner = XEXP (inner, 0);
7675 :
7676 4798389 : inner_mode = GET_MODE (inner);
7677 :
7678 : /* See if this can be done without an extraction. We never can if the
7679 : width of the field is not the same as that of some integer mode. For
7680 : registers, we can only avoid the extraction if the position is at the
7681 : low-order bit and this is either not in the destination or we have the
7682 : appropriate STRICT_LOW_PART operation available.
7683 :
7684 : For MEM, we can avoid an extract if the field starts on an appropriate
7685 : boundary and we can change the mode of the memory reference. */
7686 :
7687 4798389 : scalar_int_mode tmode;
7688 4798389 : if (int_mode_for_size (len, 1).exists (&tmode)
7689 2360377 : && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
7690 2083795 : && !MEM_P (inner)
7691 1703087 : && (pos == 0 || REG_P (inner))
7692 1703087 : && (inner_mode == tmode
7693 269705 : || !REG_P (inner)
7694 243459 : || TRULY_NOOP_TRUNCATION_MODES_P (tmode, inner_mode)
7695 0 : || reg_truncated_to_mode (tmode, inner))
7696 1703087 : && (! in_dest
7697 31 : || (REG_P (inner)
7698 31 : && have_insn_for (STRICT_LOW_PART, tmode))))
7699 529008 : || (MEM_P (inner) && pos_rtx == 0
7700 382029 : && (pos
7701 : % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
7702 : : BITS_PER_UNIT)) == 0
7703 : /* We can't do this if we are widening INNER_MODE (it
7704 : may not be aligned, for one thing). */
7705 381055 : && !paradoxical_subreg_p (tmode, inner_mode)
7706 381055 : && known_le (pos + len, GET_MODE_PRECISION (is_mode))
7707 381055 : && (inner_mode == tmode
7708 1078 : || (! mode_dependent_address_p (XEXP (inner, 0),
7709 1078 : MEM_ADDR_SPACE (inner))
7710 1078 : && ! MEM_VOLATILE_P (inner))))))
7711 : {
7712 : /* If INNER is a MEM, make a new MEM that encompasses just the desired
7713 : field. If the original and current mode are the same, we need not
7714 : adjust the offset. Otherwise, we do if bytes big endian.
7715 :
7716 : If INNER is not a MEM, get a piece consisting of just the field
7717 : of interest (in this case POS % BITS_PER_WORD must be 0). */
7718 :
7719 2084111 : if (MEM_P (inner))
7720 : {
7721 381042 : poly_int64 offset;
7722 :
7723 : /* POS counts from lsb, but make OFFSET count in memory order. */
7724 381042 : if (BYTES_BIG_ENDIAN)
7725 : offset = bits_to_bytes_round_down (GET_MODE_PRECISION (is_mode)
7726 : - len - pos);
7727 : else
7728 381042 : offset = pos / BITS_PER_UNIT;
7729 :
7730 381042 : new_rtx = adjust_address_nv (inner, tmode, offset);
7731 : }
7732 1703069 : else if (REG_P (inner))
7733 : {
7734 1145477 : if (tmode != inner_mode)
7735 : {
7736 : /* We can't call gen_lowpart in a DEST since we
7737 : always want a SUBREG (see below) and it would sometimes
7738 : return a new hard register. */
7739 243441 : if (pos || in_dest)
7740 : {
7741 16 : poly_uint64 offset
7742 16 : = subreg_offset_from_lsb (tmode, inner_mode, pos);
7743 :
7744 : /* Avoid creating invalid subregs, for example when
7745 : simplifying (x>>32)&255. */
7746 16 : if (!validate_subreg (tmode, inner_mode, inner, offset))
7747 0 : return NULL_RTX;
7748 :
7749 16 : new_rtx = gen_rtx_SUBREG (tmode, inner, offset);
7750 16 : }
7751 : else
7752 243425 : new_rtx = gen_lowpart (tmode, inner);
7753 : }
7754 : else
7755 : new_rtx = inner;
7756 : }
7757 : else
7758 1115184 : new_rtx = force_to_mode (inner, tmode,
7759 : len >= HOST_BITS_PER_WIDE_INT
7760 : ? HOST_WIDE_INT_M1U
7761 557592 : : (HOST_WIDE_INT_1U << len) - 1, false);
7762 :
7763 : /* If this extraction is going into the destination of a SET,
7764 : make a STRICT_LOW_PART unless we made a MEM. */
7765 :
7766 2084111 : if (in_dest)
7767 49 : return (MEM_P (new_rtx) ? new_rtx
7768 : : (GET_CODE (new_rtx) != SUBREG
7769 13 : ? gen_rtx_CLOBBER (tmode, const0_rtx)
7770 13 : : gen_rtx_STRICT_LOW_PART (VOIDmode, new_rtx)));
7771 :
7772 2084062 : if (mode == tmode)
7773 : return new_rtx;
7774 :
7775 2084033 : if (CONST_SCALAR_INT_P (new_rtx))
7776 5 : return simplify_unary_operation (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
7777 5 : mode, new_rtx, tmode);
7778 :
7779 : /* If we know that no extraneous bits are set, and that the high
7780 : bit is not set, convert the extraction to the cheaper of
7781 : sign and zero extension, that are equivalent in these cases. */
7782 2084028 : if (flag_expensive_optimizations
7783 2084028 : && (HWI_COMPUTABLE_MODE_P (tmode)
7784 1936868 : && ((nonzero_bits (new_rtx, tmode)
7785 1936868 : & ~(((unsigned HOST_WIDE_INT)GET_MODE_MASK (tmode)) >> 1))
7786 : == 0)))
7787 : {
7788 8371 : rtx temp = gen_rtx_ZERO_EXTEND (mode, new_rtx);
7789 8371 : rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new_rtx);
7790 :
7791 : /* Prefer ZERO_EXTENSION, since it gives more information to
7792 : backends. */
7793 8371 : if (set_src_cost (temp, mode, optimize_this_for_speed_p)
7794 8371 : <= set_src_cost (temp1, mode, optimize_this_for_speed_p))
7795 : return temp;
7796 0 : return temp1;
7797 : }
7798 :
7799 : /* Otherwise, sign- or zero-extend unless we already are in the
7800 : proper mode. */
7801 :
7802 2075657 : return (gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
7803 2075657 : mode, new_rtx));
7804 : }
7805 :
7806 : /* Unless this is a COMPARE or we have a funny memory reference,
7807 : don't do anything with zero-extending field extracts starting at
7808 : the low-order bit since they are simple AND operations. */
7809 2714278 : if (pos_rtx == 0 && pos == 0 && ! in_dest
7810 1680394 : && ! in_compare && unsignedp)
7811 : return 0;
7812 :
7813 : /* Unless INNER is not MEM, reject this if we would be spanning bytes or
7814 : if the position is not a constant and the length is not 1. In all
7815 : other cases, we would only be going outside our object in cases when
7816 : an original shift would have been undefined. */
7817 1426955 : if (MEM_P (inner)
7818 1426955 : && ((pos_rtx == 0 && maybe_gt (pos + len, GET_MODE_PRECISION (is_mode)))
7819 3025 : || (pos_rtx != 0 && len != 1)))
7820 : return 0;
7821 :
7822 1539736 : enum extraction_pattern pattern = (in_dest ? EP_insv
7823 1420183 : : unsignedp ? EP_extzv : EP_extv);
7824 :
7825 : /* If INNER is not from memory, we want it to have the mode of a register
7826 : extraction pattern's structure operand, or word_mode if there is no
7827 : such pattern. The same applies to extraction_mode and pos_mode
7828 : and their respective operands.
7829 :
7830 : For memory, assume that the desired extraction_mode and pos_mode
7831 : are the same as for a register operation, since at present we don't
7832 : have named patterns for aligned memory structures. */
7833 1426915 : class extraction_insn insn;
7834 1426915 : unsigned int inner_size;
7835 2853830 : if (GET_MODE_BITSIZE (inner_mode).is_constant (&inner_size)
7836 1426915 : && get_best_reg_extraction_insn (&insn, pattern, inner_size, mode))
7837 : {
7838 1320555 : wanted_inner_reg_mode = insn.struct_mode.require ();
7839 1320555 : pos_mode = insn.pos_mode;
7840 1320555 : extraction_mode = insn.field_mode;
7841 : }
7842 :
7843 : /* Never narrow an object, since that might not be safe. */
7844 :
7845 1426915 : if (mode != VOIDmode
7846 1426915 : && partial_subreg_p (extraction_mode, mode))
7847 : extraction_mode = mode;
7848 :
7849 : /* Punt if len is too large for extraction_mode. */
7850 1426915 : if (maybe_gt (len, GET_MODE_PRECISION (extraction_mode)))
7851 : return NULL_RTX;
7852 :
7853 1426903 : if (!MEM_P (inner))
7854 1251401 : wanted_inner_mode = wanted_inner_reg_mode;
7855 : else
7856 : {
7857 : /* Be careful not to go beyond the extracted object and maintain the
7858 : natural alignment of the memory. */
7859 175502 : wanted_inner_mode = smallest_int_mode_for_size (len).require ();
7860 354171 : while (pos % GET_MODE_BITSIZE (wanted_inner_mode) + len
7861 357338 : > GET_MODE_BITSIZE (wanted_inner_mode))
7862 3167 : wanted_inner_mode = GET_MODE_WIDER_MODE (wanted_inner_mode).require ();
7863 : }
7864 :
7865 1426903 : orig_pos = pos;
7866 :
7867 1426903 : if (BITS_BIG_ENDIAN)
7868 : {
7869 : /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
7870 : BITS_BIG_ENDIAN style. If position is constant, compute new
7871 : position. Otherwise, build subtraction.
7872 : Note that POS is relative to the mode of the original argument.
7873 : If it's a MEM we need to recompute POS relative to that.
7874 : However, if we're extracting from (or inserting into) a register,
7875 : we want to recompute POS relative to wanted_inner_mode. */
7876 : int width;
7877 : if (!MEM_P (inner))
7878 : width = GET_MODE_BITSIZE (wanted_inner_mode);
7879 : else if (!GET_MODE_BITSIZE (is_mode).is_constant (&width))
7880 : return NULL_RTX;
7881 :
7882 : if (pos_rtx == 0)
7883 : pos = width - len - pos;
7884 : else
7885 : pos_rtx
7886 : = gen_rtx_MINUS (GET_MODE (pos_rtx),
7887 : gen_int_mode (width - len, GET_MODE (pos_rtx)),
7888 : pos_rtx);
7889 : /* POS may be less than 0 now, but we check for that below.
7890 : Note that it can only be less than 0 if !MEM_P (inner). */
7891 : }
7892 :
7893 : /* If INNER has a wider mode, and this is a constant extraction, try to
7894 : make it smaller and adjust the byte to point to the byte containing
7895 : the value. */
7896 1426903 : if (wanted_inner_mode != VOIDmode
7897 1426903 : && inner_mode != wanted_inner_mode
7898 192274 : && ! pos_rtx
7899 184177 : && partial_subreg_p (wanted_inner_mode, is_mode)
7900 112800 : && MEM_P (inner)
7901 29323 : && ! mode_dependent_address_p (XEXP (inner, 0), MEM_ADDR_SPACE (inner))
7902 1456226 : && ! MEM_VOLATILE_P (inner))
7903 : {
7904 27704 : poly_int64 offset = 0;
7905 :
7906 : /* The computations below will be correct if the machine is big
7907 : endian in both bits and bytes or little endian in bits and bytes.
7908 : If it is mixed, we must adjust. */
7909 :
7910 : /* If bytes are big endian and we had a paradoxical SUBREG, we must
7911 : adjust OFFSET to compensate. */
7912 27704 : if (BYTES_BIG_ENDIAN
7913 : && paradoxical_subreg_p (is_mode, inner_mode))
7914 : offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
7915 :
7916 : /* We can now move to the desired byte. */
7917 55408 : offset += (pos / GET_MODE_BITSIZE (wanted_inner_mode))
7918 27704 : * GET_MODE_SIZE (wanted_inner_mode);
7919 27704 : pos %= GET_MODE_BITSIZE (wanted_inner_mode);
7920 :
7921 27704 : if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
7922 : && is_mode != wanted_inner_mode)
7923 : offset = (GET_MODE_SIZE (is_mode)
7924 : - GET_MODE_SIZE (wanted_inner_mode) - offset);
7925 :
7926 27704 : inner = adjust_address_nv (inner, wanted_inner_mode, offset);
7927 : }
7928 :
7929 : /* If INNER is not memory, get it into the proper mode. If we are changing
7930 : its mode, POS must be a constant and smaller than the size of the new
7931 : mode. */
7932 1399199 : else if (!MEM_P (inner))
7933 : {
7934 : /* On the LHS, don't create paradoxical subregs implicitly truncating
7935 : the register unless TARGET_TRULY_NOOP_TRUNCATION. */
7936 1251401 : if (in_dest
7937 1251401 : && !TRULY_NOOP_TRUNCATION_MODES_P (GET_MODE (inner),
7938 : wanted_inner_mode))
7939 0 : return NULL_RTX;
7940 :
7941 1251401 : if (GET_MODE (inner) != wanted_inner_mode
7942 1251401 : && (pos_rtx != 0
7943 309708 : || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
7944 : return NULL_RTX;
7945 :
7946 1185787 : if (orig_pos < 0)
7947 : return NULL_RTX;
7948 :
7949 2352097 : inner = force_to_mode (inner, wanted_inner_mode,
7950 : pos_rtx
7951 1166310 : || len + orig_pos >= HOST_BITS_PER_WIDE_INT
7952 : ? HOST_WIDE_INT_M1U
7953 1034135 : : (((HOST_WIDE_INT_1U << len) - 1)
7954 1034135 : << orig_pos), false);
7955 : }
7956 :
7957 : /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
7958 : have to zero extend. Otherwise, we can just use a SUBREG.
7959 :
7960 : We dealt with constant rtxes earlier, so pos_rtx cannot
7961 : have VOIDmode at this point. */
7962 1361289 : if (pos_rtx != 0
7963 1361289 : && (GET_MODE_SIZE (pos_mode)
7964 1383751 : > GET_MODE_SIZE (as_a <scalar_int_mode> (GET_MODE (pos_rtx)))))
7965 : {
7966 74 : rtx temp = simplify_gen_unary (ZERO_EXTEND, pos_mode, pos_rtx,
7967 : GET_MODE (pos_rtx));
7968 :
7969 : /* If we know that no extraneous bits are set, and that the high
7970 : bit is not set, convert extraction to cheaper one - either
7971 : SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
7972 : cases. */
7973 74 : if (flag_expensive_optimizations
7974 74 : && (HWI_COMPUTABLE_MODE_P (GET_MODE (pos_rtx))
7975 74 : && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx))
7976 74 : & ~(((unsigned HOST_WIDE_INT)
7977 74 : GET_MODE_MASK (GET_MODE (pos_rtx)))
7978 74 : >> 1))
7979 : == 0)))
7980 : {
7981 46 : rtx temp1 = simplify_gen_unary (SIGN_EXTEND, pos_mode, pos_rtx,
7982 : GET_MODE (pos_rtx));
7983 :
7984 : /* Prefer ZERO_EXTENSION, since it gives more information to
7985 : backends. */
7986 46 : if (set_src_cost (temp1, pos_mode, optimize_this_for_speed_p)
7987 46 : < set_src_cost (temp, pos_mode, optimize_this_for_speed_p))
7988 1361289 : temp = temp1;
7989 : }
7990 : pos_rtx = temp;
7991 : }
7992 :
7993 : /* Make POS_RTX unless we already have it and it is correct. If we don't
7994 : have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
7995 : be a CONST_INT. */
7996 1361289 : if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
7997 : pos_rtx = orig_pos_rtx;
7998 :
7999 484640 : else if (pos_rtx == 0)
8000 462178 : pos_rtx = GEN_INT (pos);
8001 :
8002 : /* Make the required operation. See if we can use existing rtx. */
8003 1361289 : new_rtx = gen_rtx_fmt_eee (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
8004 : extraction_mode, inner, GEN_INT (len), pos_rtx);
8005 1361289 : if (! in_dest)
8006 1354601 : new_rtx = gen_lowpart (mode, new_rtx);
8007 :
8008 : return new_rtx;
8009 : }
8010 :
8011 : /* See if X (of mode MODE) contains an ASHIFT of COUNT or more bits that
8012 : can be commuted with any other operations in X. Return X without
8013 : that shift if so. */
8014 :
8015 : static rtx
8016 1558561 : extract_left_shift (scalar_int_mode mode, rtx x, int count)
8017 : {
8018 1558561 : enum rtx_code code = GET_CODE (x);
8019 1558561 : rtx tem;
8020 :
8021 1558561 : switch (code)
8022 : {
8023 249015 : case ASHIFT:
8024 : /* This is the shift itself. If it is wide enough, we will return
8025 : either the value being shifted if the shift count is equal to
8026 : COUNT or a shift for the difference. */
8027 249015 : if (CONST_INT_P (XEXP (x, 1))
8028 243765 : && INTVAL (XEXP (x, 1)) >= count)
8029 242623 : return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
8030 242623 : INTVAL (XEXP (x, 1)) - count);
8031 : break;
8032 :
8033 5321 : case NEG: case NOT:
8034 5321 : if ((tem = extract_left_shift (mode, XEXP (x, 0), count)) != 0)
8035 2609 : return simplify_gen_unary (code, mode, tem, mode);
8036 :
8037 : break;
8038 :
8039 551004 : case PLUS: case IOR: case XOR: case AND:
8040 : /* If we can safely shift this constant and we find the inner shift,
8041 : make a new operation. */
8042 551004 : if (CONST_INT_P (XEXP (x, 1))
8043 283533 : && (UINTVAL (XEXP (x, 1))
8044 283533 : & (((HOST_WIDE_INT_1U << count)) - 1)) == 0
8045 684981 : && (tem = extract_left_shift (mode, XEXP (x, 0), count)) != 0)
8046 : {
8047 6786 : HOST_WIDE_INT val = INTVAL (XEXP (x, 1)) >> count;
8048 6786 : return simplify_gen_binary (code, mode, tem,
8049 6786 : gen_int_mode (val, mode));
8050 : }
8051 : break;
8052 :
8053 : default:
8054 : break;
8055 : }
8056 :
8057 : return 0;
8058 : }
8059 :
8060 : /* Subroutine of make_compound_operation. *X_PTR is the rtx at the current
8061 : level of the expression and MODE is its mode. IN_CODE is as for
8062 : make_compound_operation. *NEXT_CODE_PTR is the value of IN_CODE
8063 : that should be used when recursing on operands of *X_PTR.
8064 :
8065 : There are two possible actions:
8066 :
8067 : - Return null. This tells the caller to recurse on *X_PTR with IN_CODE
8068 : equal to *NEXT_CODE_PTR, after which *X_PTR holds the final value.
8069 :
8070 : - Return a new rtx, which the caller returns directly. */
8071 :
8072 : static rtx
8073 271494452 : make_compound_operation_int (scalar_int_mode mode, rtx *x_ptr,
8074 : enum rtx_code in_code,
8075 : enum rtx_code *next_code_ptr)
8076 : {
8077 271494452 : rtx x = *x_ptr;
8078 271494452 : enum rtx_code next_code = *next_code_ptr;
8079 271494452 : enum rtx_code code = GET_CODE (x);
8080 271494452 : int mode_width = GET_MODE_PRECISION (mode);
8081 271494452 : rtx rhs, lhs;
8082 271494452 : rtx new_rtx = 0;
8083 271494452 : int i;
8084 271494452 : rtx tem;
8085 271494452 : scalar_int_mode inner_mode;
8086 271494452 : bool equality_comparison = false;
8087 :
8088 271494452 : if (in_code == EQ)
8089 : {
8090 8580449 : equality_comparison = true;
8091 8580449 : in_code = COMPARE;
8092 : }
8093 :
8094 : /* Process depending on the code of this operation. If NEW is set
8095 : nonzero, it will be returned. */
8096 :
8097 271494452 : switch (code)
8098 : {
8099 6459240 : case ASHIFT:
8100 : /* Convert shifts by constants into multiplications if inside
8101 : an address. */
8102 6459240 : if (in_code == MEM && CONST_INT_P (XEXP (x, 1))
8103 2053771 : && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
8104 2053771 : && INTVAL (XEXP (x, 1)) >= 0)
8105 : {
8106 2053771 : HOST_WIDE_INT count = INTVAL (XEXP (x, 1));
8107 2053771 : HOST_WIDE_INT multval = HOST_WIDE_INT_1 << count;
8108 :
8109 2053771 : new_rtx = make_compound_operation (XEXP (x, 0), next_code);
8110 2053771 : if (GET_CODE (new_rtx) == NEG)
8111 : {
8112 9 : new_rtx = XEXP (new_rtx, 0);
8113 9 : multval = -multval;
8114 : }
8115 2053771 : multval = trunc_int_for_mode (multval, mode);
8116 2053771 : new_rtx = gen_rtx_MULT (mode, new_rtx, gen_int_mode (multval, mode));
8117 : }
8118 : break;
8119 :
8120 53924251 : case PLUS:
8121 53924251 : case MINUS:
8122 53924251 : lhs = make_compound_operation (XEXP (x, 0), next_code);
8123 53924251 : rhs = make_compound_operation (XEXP (x, 1), next_code);
8124 53924251 : if (lhs != XEXP (x, 0) || rhs != XEXP (x, 1))
8125 3439912 : return simplify_gen_binary (code, mode, lhs, rhs);
8126 : return x;
8127 :
8128 7258773 : case AND:
8129 : /* If the second operand is not a constant, we can't do anything
8130 : with it. */
8131 7258773 : if (!CONST_INT_P (XEXP (x, 1)))
8132 : break;
8133 :
8134 : /* If the constant is a power of two minus one and the first operand
8135 : is a logical right shift, make an extraction. */
8136 5782547 : if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
8137 5782547 : && (i = exact_log2 (UINTVAL (XEXP (x, 1)) + 1)) >= 0)
8138 : {
8139 645973 : new_rtx = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
8140 645973 : new_rtx = make_extraction (mode, new_rtx, 0, XEXP (XEXP (x, 0), 1),
8141 : i, true, false, in_code == COMPARE);
8142 : }
8143 :
8144 : /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
8145 5136574 : else if (GET_CODE (XEXP (x, 0)) == SUBREG
8146 1336715 : && subreg_lowpart_p (XEXP (x, 0))
8147 6437922 : && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (XEXP (x, 0))),
8148 : &inner_mode)
8149 1332144 : && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
8150 5168469 : && (i = exact_log2 (UINTVAL (XEXP (x, 1)) + 1)) >= 0)
8151 : {
8152 30796 : rtx inner_x0 = SUBREG_REG (XEXP (x, 0));
8153 30796 : new_rtx = make_compound_operation (XEXP (inner_x0, 0), next_code);
8154 30796 : new_rtx = make_extraction (inner_mode, new_rtx, 0,
8155 : XEXP (inner_x0, 1),
8156 : i, true, false, in_code == COMPARE);
8157 :
8158 : /* If we narrowed the mode when dropping the subreg, then we lose. */
8159 92388 : if (GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (mode))
8160 30796 : new_rtx = NULL;
8161 :
8162 : /* If that didn't give anything, see if the AND simplifies on
8163 : its own. */
8164 30796 : if (!new_rtx && i >= 0)
8165 : {
8166 3721 : new_rtx = make_compound_operation (XEXP (x, 0), next_code);
8167 3721 : new_rtx = make_extraction (mode, new_rtx, 0, NULL_RTX, i,
8168 : true, false, in_code == COMPARE);
8169 : }
8170 : }
8171 : /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
8172 5105778 : else if ((GET_CODE (XEXP (x, 0)) == XOR
8173 5105778 : || GET_CODE (XEXP (x, 0)) == IOR)
8174 27927 : && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
8175 2512 : && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
8176 5105788 : && (i = exact_log2 (UINTVAL (XEXP (x, 1)) + 1)) >= 0)
8177 : {
8178 : /* Apply the distributive law, and then try to make extractions. */
8179 10 : new_rtx = gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)), mode,
8180 : gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
8181 : XEXP (x, 1)),
8182 : gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
8183 : XEXP (x, 1)));
8184 10 : new_rtx = make_compound_operation (new_rtx, in_code);
8185 : }
8186 :
8187 : /* If we are have (and (rotate X C) M) and C is larger than the number
8188 : of bits in M, this is an extraction. */
8189 :
8190 5105768 : else if (GET_CODE (XEXP (x, 0)) == ROTATE
8191 1674 : && CONST_INT_P (XEXP (XEXP (x, 0), 1))
8192 1674 : && (i = exact_log2 (UINTVAL (XEXP (x, 1)) + 1)) >= 0
8193 5105800 : && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
8194 : {
8195 0 : new_rtx = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
8196 0 : new_rtx = make_extraction (mode, new_rtx,
8197 0 : (GET_MODE_PRECISION (mode)
8198 0 : - INTVAL (XEXP (XEXP (x, 0), 1))),
8199 : NULL_RTX, i, true, false,
8200 : in_code == COMPARE);
8201 : }
8202 :
8203 : /* On machines without logical shifts, if the operand of the AND is
8204 : a logical shift and our mask turns off all the propagated sign
8205 : bits, we can replace the logical shift with an arithmetic shift. */
8206 5105768 : else if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
8207 84475 : && !have_insn_for (LSHIFTRT, mode)
8208 0 : && have_insn_for (ASHIFTRT, mode)
8209 0 : && CONST_INT_P (XEXP (XEXP (x, 0), 1))
8210 0 : && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
8211 0 : && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
8212 5105768 : && mode_width <= HOST_BITS_PER_WIDE_INT)
8213 : {
8214 0 : unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
8215 :
8216 0 : mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
8217 0 : if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
8218 0 : SUBST (XEXP (x, 0),
8219 : gen_rtx_ASHIFTRT (mode,
8220 : make_compound_operation (XEXP (XEXP (x,
8221 : 0),
8222 : 0),
8223 : next_code),
8224 : XEXP (XEXP (x, 0), 1)));
8225 : }
8226 :
8227 : /* If the constant is one less than a power of two, this might be
8228 : representable by an extraction even if no shift is present.
8229 : If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
8230 : we are in a COMPARE. */
8231 5105768 : else if ((i = exact_log2 (UINTVAL (XEXP (x, 1)) + 1)) >= 0)
8232 2620259 : new_rtx = make_extraction (mode,
8233 : make_compound_operation (XEXP (x, 0),
8234 : next_code),
8235 : 0, NULL_RTX, i,
8236 : true, false, in_code == COMPARE);
8237 :
8238 : /* If we are in a comparison and this is an AND with a power of two,
8239 : convert this into the appropriate bit extract. */
8240 2485509 : else if (in_code == COMPARE
8241 486332 : && (i = exact_log2 (UINTVAL (XEXP (x, 1)))) >= 0
8242 2575067 : && (equality_comparison || i < GET_MODE_PRECISION (mode) - 1))
8243 89558 : new_rtx = make_extraction (mode,
8244 : make_compound_operation (XEXP (x, 0),
8245 : next_code),
8246 : i, NULL_RTX, 1, true, false, true);
8247 :
8248 : /* If the one operand is a paradoxical subreg of a register or memory and
8249 : the constant (limited to the smaller mode) has only zero bits where
8250 : the sub expression has known zero bits, this can be expressed as
8251 : a zero_extend. */
8252 2395951 : else if (GET_CODE (XEXP (x, 0)) == SUBREG)
8253 : {
8254 62340 : rtx sub;
8255 :
8256 62340 : sub = XEXP (XEXP (x, 0), 0);
8257 62340 : machine_mode sub_mode = GET_MODE (sub);
8258 62340 : int sub_width;
8259 29873 : if ((REG_P (sub) || MEM_P (sub))
8260 33129 : && GET_MODE_PRECISION (sub_mode).is_constant (&sub_width)
8261 33129 : && sub_width < mode_width
8262 62340 : && (!WORD_REGISTER_OPERATIONS
8263 : || sub_width >= BITS_PER_WORD
8264 : /* On WORD_REGISTER_OPERATIONS targets the bits
8265 : beyond sub_mode aren't considered undefined,
8266 : so optimize only if it is a MEM load when MEM loads
8267 : zero extend, because then the upper bits are all zero. */
8268 : || (MEM_P (sub)
8269 : && load_extend_op (sub_mode) == ZERO_EXTEND)))
8270 : {
8271 25722 : unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (sub_mode);
8272 25722 : unsigned HOST_WIDE_INT mask;
8273 :
8274 : /* Original AND constant with all the known zero bits set. */
8275 25722 : mask = UINTVAL (XEXP (x, 1)) | (~nonzero_bits (sub, sub_mode));
8276 25722 : if ((mask & mode_mask) == mode_mask)
8277 : {
8278 22117 : new_rtx = make_compound_operation (sub, next_code);
8279 22117 : new_rtx = make_extraction (mode, new_rtx, 0, 0, sub_width,
8280 : true, false, in_code == COMPARE);
8281 : }
8282 : }
8283 : }
8284 :
8285 : break;
8286 :
8287 1909626 : case LSHIFTRT:
8288 : /* If the sign bit is known to be zero, replace this with an
8289 : arithmetic shift. */
8290 1909626 : if (have_insn_for (ASHIFTRT, mode)
8291 1909626 : && ! have_insn_for (LSHIFTRT, mode)
8292 0 : && mode_width <= HOST_BITS_PER_WIDE_INT
8293 1909626 : && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
8294 : {
8295 0 : new_rtx = gen_rtx_ASHIFTRT (mode,
8296 : make_compound_operation (XEXP (x, 0),
8297 : next_code),
8298 : XEXP (x, 1));
8299 0 : break;
8300 : }
8301 :
8302 : /* fall through */
8303 :
8304 4391753 : case ASHIFTRT:
8305 4391753 : lhs = XEXP (x, 0);
8306 4391753 : rhs = XEXP (x, 1);
8307 :
8308 : /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
8309 : this is a SIGN_EXTRACT. */
8310 4391753 : if (CONST_INT_P (rhs)
8311 4223629 : && GET_CODE (lhs) == ASHIFT
8312 1121958 : && CONST_INT_P (XEXP (lhs, 1))
8313 1116712 : && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1))
8314 883223 : && INTVAL (XEXP (lhs, 1)) >= 0
8315 883219 : && INTVAL (rhs) < mode_width)
8316 : {
8317 883219 : new_rtx = make_compound_operation (XEXP (lhs, 0), next_code);
8318 883219 : new_rtx = make_extraction (mode, new_rtx,
8319 883219 : INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
8320 883219 : NULL_RTX, mode_width - INTVAL (rhs),
8321 : code == LSHIFTRT, false,
8322 : in_code == COMPARE);
8323 883219 : break;
8324 : }
8325 :
8326 : /* See if we have operations between an ASHIFTRT and an ASHIFT.
8327 : If so, try to merge the shifts into a SIGN_EXTEND. We could
8328 : also do this for some cases of SIGN_EXTRACT, but it doesn't
8329 : seem worth the effort; the case checked for occurs on Alpha. */
8330 :
8331 3508534 : if (!OBJECT_P (lhs)
8332 1517800 : && ! (GET_CODE (lhs) == SUBREG
8333 85610 : && (OBJECT_P (SUBREG_REG (lhs))))
8334 1446706 : && CONST_INT_P (rhs)
8335 1423926 : && INTVAL (rhs) >= 0
8336 1423926 : && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
8337 1419263 : && INTVAL (rhs) < mode_width
8338 4927797 : && (new_rtx = extract_left_shift (mode, lhs, INTVAL (rhs))) != 0)
8339 242623 : new_rtx = make_extraction (mode, make_compound_operation (new_rtx,
8340 : next_code),
8341 242623 : 0, NULL_RTX, mode_width - INTVAL (rhs),
8342 : code == LSHIFTRT, false, in_code == COMPARE);
8343 :
8344 : break;
8345 :
8346 8606354 : case SUBREG:
8347 : /* Call ourselves recursively on the inner expression. If we are
8348 : narrowing the object and it has a different RTL code from
8349 : what it originally did, do this SUBREG as a force_to_mode. */
8350 8606354 : {
8351 8606354 : rtx inner = SUBREG_REG (x), simplified;
8352 8606354 : enum rtx_code subreg_code = in_code;
8353 :
8354 : /* If the SUBREG is masking of a logical right shift,
8355 : make an extraction. */
8356 8606354 : if (GET_CODE (inner) == LSHIFTRT
8357 8618763 : && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
8358 528210 : && GET_MODE_SIZE (mode) < GET_MODE_SIZE (inner_mode)
8359 258069 : && CONST_INT_P (XEXP (inner, 1))
8360 253040 : && UINTVAL (XEXP (inner, 1)) < GET_MODE_PRECISION (inner_mode)
8361 8859394 : && subreg_lowpart_p (x))
8362 : {
8363 251696 : new_rtx = make_compound_operation (XEXP (inner, 0), next_code);
8364 251696 : int width = GET_MODE_PRECISION (inner_mode)
8365 251696 : - INTVAL (XEXP (inner, 1));
8366 251696 : if (width > mode_width)
8367 : width = mode_width;
8368 251696 : new_rtx = make_extraction (mode, new_rtx, 0, XEXP (inner, 1),
8369 : width, true, false, in_code == COMPARE);
8370 251696 : break;
8371 : }
8372 :
8373 : /* If in_code is COMPARE, it isn't always safe to pass it through
8374 : to the recursive make_compound_operation call. */
8375 8354658 : if (subreg_code == COMPARE
8376 8354658 : && (!subreg_lowpart_p (x)
8377 152153 : || GET_CODE (inner) == SUBREG
8378 : /* (subreg:SI (and:DI (reg:DI) (const_int 0x800000000)) 0)
8379 : is (const_int 0), rather than
8380 : (subreg:SI (lshiftrt:DI (reg:DI) (const_int 35)) 0).
8381 : Similarly (subreg:QI (and:SI (reg:SI) (const_int 0x80)) 0)
8382 : for non-equality comparisons against 0 is not equivalent
8383 : to (subreg:QI (lshiftrt:SI (reg:SI) (const_int 7)) 0). */
8384 152153 : || (GET_CODE (inner) == AND
8385 1195 : && CONST_INT_P (XEXP (inner, 1))
8386 143 : && partial_subreg_p (x)
8387 286 : && exact_log2 (UINTVAL (XEXP (inner, 1)))
8388 143 : >= GET_MODE_BITSIZE (mode) - 1)))
8389 : subreg_code = SET;
8390 :
8391 8354658 : tem = make_compound_operation (inner, subreg_code);
8392 :
8393 : /* TEM's code might be CLOBBER if combine_simplify_rtx
8394 : could not transform a subexpression, e.g. a volatile MEM.
8395 : simplify_subreg cannot be called with clobber, so bail out. */
8396 8354658 : if (GET_CODE (tem) == CLOBBER)
8397 : return NULL_RTX;
8398 :
8399 8354639 : simplified
8400 8354639 : = simplify_subreg (mode, tem, GET_MODE (inner), SUBREG_BYTE (x));
8401 8354639 : if (simplified)
8402 14846 : tem = simplified;
8403 :
8404 8354639 : if (GET_CODE (tem) != GET_CODE (inner)
8405 20659 : && partial_subreg_p (x)
8406 8373116 : && subreg_lowpart_p (x))
8407 : {
8408 18461 : rtx newer
8409 18461 : = force_to_mode (tem, mode, HOST_WIDE_INT_M1U, false);
8410 :
8411 : /* If we have something other than a SUBREG, we might have
8412 : done an expansion, so rerun ourselves. */
8413 18461 : if (GET_CODE (newer) != SUBREG)
8414 15274 : newer = make_compound_operation (newer, in_code);
8415 :
8416 : /* force_to_mode can expand compounds. If it just re-expanded
8417 : the compound, use gen_lowpart to convert to the desired
8418 : mode. */
8419 18461 : if (rtx_equal_p (newer, x)
8420 : /* Likewise if it re-expanded the compound only partially.
8421 : This happens for SUBREG of ZERO_EXTRACT if they extract
8422 : the same number of bits. */
8423 18461 : || (GET_CODE (newer) == SUBREG
8424 2252 : && (GET_CODE (SUBREG_REG (newer)) == LSHIFTRT
8425 2252 : || GET_CODE (SUBREG_REG (newer)) == ASHIFTRT)
8426 158 : && GET_CODE (inner) == AND
8427 0 : && rtx_equal_p (SUBREG_REG (newer), XEXP (inner, 0))))
8428 2477 : return gen_lowpart (GET_MODE (x), tem);
8429 :
8430 15984 : return newer;
8431 : }
8432 :
8433 8336178 : if (simplified)
8434 : return tem;
8435 : }
8436 : break;
8437 :
8438 : default:
8439 : break;
8440 : }
8441 :
8442 10146156 : if (new_rtx)
8443 5492123 : *x_ptr = gen_lowpart (mode, new_rtx);
8444 217551135 : *next_code_ptr = next_code;
8445 217551135 : return NULL_RTX;
8446 : }
8447 :
8448 : /* Look at the expression rooted at X. Look for expressions
8449 : equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
8450 : Form these expressions.
8451 :
8452 : Return the new rtx, usually just X.
8453 :
8454 : Also, for machines like the VAX that don't have logical shift insns,
8455 : try to convert logical to arithmetic shift operations in cases where
8456 : they are equivalent. This undoes the canonicalizations to logical
8457 : shifts done elsewhere.
8458 :
8459 : We try, as much as possible, to re-use rtl expressions to save memory.
8460 :
8461 : IN_CODE says what kind of expression we are processing. Normally, it is
8462 : SET. In a memory address it is MEM. When processing the arguments of
8463 : a comparison or a COMPARE against zero, it is COMPARE, or EQ if more
8464 : precisely it is an equality comparison against zero. */
8465 :
8466 : rtx
8467 463691318 : make_compound_operation (rtx x, enum rtx_code in_code)
8468 : {
8469 463691318 : enum rtx_code code = GET_CODE (x);
8470 463691318 : const char *fmt;
8471 463691318 : int i, j;
8472 463691318 : enum rtx_code next_code;
8473 463691318 : rtx new_rtx, tem;
8474 :
8475 : /* Select the code to be used in recursive calls. Once we are inside an
8476 : address, we stay there. If we have a comparison, set to COMPARE,
8477 : but once inside, go back to our default of SET. */
8478 :
8479 463691318 : next_code = (code == MEM ? MEM
8480 435891670 : : ((code == COMPARE || COMPARISON_P (x))
8481 455590079 : && XEXP (x, 1) == const0_rtx) ? COMPARE
8482 428205838 : : in_code == COMPARE || in_code == EQ ? SET : in_code);
8483 :
8484 463691318 : scalar_int_mode mode;
8485 463691318 : if (is_a <scalar_int_mode> (GET_MODE (x), &mode))
8486 : {
8487 271494452 : rtx new_rtx = make_compound_operation_int (mode, &x, in_code,
8488 : &next_code);
8489 271494452 : if (new_rtx)
8490 : return new_rtx;
8491 217551154 : code = GET_CODE (x);
8492 : }
8493 :
8494 : /* Now recursively process each operand of this operation. We need to
8495 : handle ZERO_EXTEND specially so that we don't lose track of the
8496 : inner mode. */
8497 409748020 : if (code == ZERO_EXTEND)
8498 : {
8499 3265286 : new_rtx = make_compound_operation (XEXP (x, 0), next_code);
8500 6530572 : tem = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
8501 3265286 : new_rtx, GET_MODE (XEXP (x, 0)));
8502 3265286 : if (tem)
8503 : return tem;
8504 3254978 : SUBST (XEXP (x, 0), new_rtx);
8505 3254978 : return x;
8506 : }
8507 :
8508 406482734 : fmt = GET_RTX_FORMAT (code);
8509 945692395 : for (i = 0; i < GET_RTX_LENGTH (code); i++)
8510 539209661 : if (fmt[i] == 'e')
8511 : {
8512 206598180 : new_rtx = make_compound_operation (XEXP (x, i), next_code);
8513 206598180 : SUBST (XEXP (x, i), new_rtx);
8514 : }
8515 332611481 : else if (fmt[i] == 'E')
8516 25289856 : for (j = 0; j < XVECLEN (x, i); j++)
8517 : {
8518 18273043 : new_rtx = make_compound_operation (XVECEXP (x, i, j), next_code);
8519 18273043 : SUBST (XVECEXP (x, i, j), new_rtx);
8520 : }
8521 :
8522 406482734 : maybe_swap_commutative_operands (x);
8523 406482734 : return x;
8524 : }
8525 :
8526 : /* Given M see if it is a value that would select a field of bits
8527 : within an item, but not the entire word. Return -1 if not.
8528 : Otherwise, return the starting position of the field, where 0 is the
8529 : low-order bit.
8530 :
8531 : *PLEN is set to the length of the field. */
8532 :
8533 : static int
8534 8468 : get_pos_from_mask (unsigned HOST_WIDE_INT m, unsigned HOST_WIDE_INT *plen)
8535 : {
8536 : /* Get the bit number of the first 1 bit from the right, -1 if none. */
8537 8468 : int pos = m ? ctz_hwi (m) : -1;
8538 8468 : int len = 0;
8539 :
8540 8468 : if (pos >= 0)
8541 : /* Now shift off the low-order zero bits and see if we have a
8542 : power of two minus 1. */
8543 8468 : len = exact_log2 ((m >> pos) + 1);
8544 :
8545 6819 : if (len <= 0)
8546 : pos = -1;
8547 :
8548 8468 : *plen = len;
8549 8468 : return pos;
8550 : }
8551 :
8552 : /* If X refers to a register that equals REG in value, replace these
8553 : references with REG. */
8554 : static rtx
8555 9588 : canon_reg_for_combine (rtx x, rtx reg)
8556 : {
8557 9588 : rtx op0, op1, op2;
8558 9588 : const char *fmt;
8559 9588 : int i;
8560 9588 : bool copied;
8561 :
8562 9588 : enum rtx_code code = GET_CODE (x);
8563 9588 : switch (GET_RTX_CLASS (code))
8564 : {
8565 0 : case RTX_UNARY:
8566 0 : op0 = canon_reg_for_combine (XEXP (x, 0), reg);
8567 0 : if (op0 != XEXP (x, 0))
8568 0 : return simplify_gen_unary (GET_CODE (x), GET_MODE (x), op0,
8569 0 : GET_MODE (reg));
8570 : break;
8571 :
8572 1736 : case RTX_BIN_ARITH:
8573 1736 : case RTX_COMM_ARITH:
8574 1736 : op0 = canon_reg_for_combine (XEXP (x, 0), reg);
8575 1736 : op1 = canon_reg_for_combine (XEXP (x, 1), reg);
8576 1736 : if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
8577 0 : return simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
8578 : break;
8579 :
8580 14 : case RTX_COMPARE:
8581 14 : case RTX_COMM_COMPARE:
8582 14 : op0 = canon_reg_for_combine (XEXP (x, 0), reg);
8583 14 : op1 = canon_reg_for_combine (XEXP (x, 1), reg);
8584 14 : if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
8585 0 : return simplify_gen_relational (GET_CODE (x), GET_MODE (x),
8586 0 : GET_MODE (op0), op0, op1);
8587 : break;
8588 :
8589 0 : case RTX_TERNARY:
8590 0 : case RTX_BITFIELD_OPS:
8591 0 : op0 = canon_reg_for_combine (XEXP (x, 0), reg);
8592 0 : op1 = canon_reg_for_combine (XEXP (x, 1), reg);
8593 0 : op2 = canon_reg_for_combine (XEXP (x, 2), reg);
8594 0 : if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1) || op2 != XEXP (x, 2))
8595 0 : return simplify_gen_ternary (GET_CODE (x), GET_MODE (x),
8596 0 : GET_MODE (op0), op0, op1, op2);
8597 : /* FALLTHRU */
8598 :
8599 5589 : case RTX_OBJ:
8600 5589 : if (REG_P (x))
8601 : {
8602 5583 : if (rtx_equal_p (get_last_value (reg), x)
8603 5583 : || rtx_equal_p (reg, get_last_value (x)))
8604 0 : return reg;
8605 : else
8606 : break;
8607 : }
8608 :
8609 : /* fall through */
8610 :
8611 2255 : default:
8612 2255 : fmt = GET_RTX_FORMAT (code);
8613 2255 : copied = false;
8614 4571 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8615 2316 : if (fmt[i] == 'e')
8616 : {
8617 61 : rtx op = canon_reg_for_combine (XEXP (x, i), reg);
8618 61 : if (op != XEXP (x, i))
8619 : {
8620 0 : if (!copied)
8621 : {
8622 0 : copied = true;
8623 0 : x = copy_rtx (x);
8624 : }
8625 0 : XEXP (x, i) = op;
8626 : }
8627 : }
8628 2255 : else if (fmt[i] == 'E')
8629 : {
8630 : int j;
8631 0 : for (j = 0; j < XVECLEN (x, i); j++)
8632 : {
8633 0 : rtx op = canon_reg_for_combine (XVECEXP (x, i, j), reg);
8634 0 : if (op != XVECEXP (x, i, j))
8635 : {
8636 0 : if (!copied)
8637 : {
8638 0 : copied = true;
8639 0 : x = copy_rtx (x);
8640 : }
8641 0 : XVECEXP (x, i, j) = op;
8642 : }
8643 : }
8644 : }
8645 :
8646 : break;
8647 : }
8648 :
8649 : return x;
8650 : }
8651 :
8652 : /* Return X converted to MODE. If the value is already truncated to
8653 : MODE we can just return a subreg even though in the general case we
8654 : would need an explicit truncation. */
8655 :
8656 : static rtx
8657 112950534 : gen_lowpart_or_truncate (machine_mode mode, rtx x)
8658 : {
8659 112950534 : if (!CONST_INT_P (x)
8660 107459206 : && partial_subreg_p (mode, GET_MODE (x))
8661 112950534 : && !TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x))
8662 112950534 : && !(REG_P (x) && reg_truncated_to_mode (mode, x)))
8663 : {
8664 : /* Bit-cast X into an integer mode. */
8665 0 : if (!SCALAR_INT_MODE_P (GET_MODE (x)))
8666 0 : x = gen_lowpart (int_mode_for_mode (GET_MODE (x)).require (), x);
8667 0 : x = simplify_gen_unary (TRUNCATE, int_mode_for_mode (mode).require (),
8668 0 : x, GET_MODE (x));
8669 : }
8670 :
8671 112950534 : return gen_lowpart (mode, x);
8672 : }
8673 :
8674 : /* See if X can be simplified knowing that we will only refer to it in
8675 : MODE and will only refer to those bits that are nonzero in MASK.
8676 : If other bits are being computed or if masking operations are done
8677 : that select a superset of the bits in MASK, they can sometimes be
8678 : ignored.
8679 :
8680 : Return a possibly simplified expression, but always convert X to
8681 : MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
8682 :
8683 : If JUST_SELECT is true, don't optimize by noticing that bits in MASK
8684 : are all off in X. This is used when X will be complemented, by either
8685 : NOT, NEG, or XOR. */
8686 :
8687 : static rtx
8688 84607569 : force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask,
8689 : bool just_select)
8690 : {
8691 84607569 : enum rtx_code code = GET_CODE (x);
8692 84607569 : bool next_select = just_select || code == XOR || code == NOT || code == NEG;
8693 84607569 : machine_mode op_mode;
8694 84607569 : unsigned HOST_WIDE_INT nonzero;
8695 :
8696 : /* If this is a CALL or ASM_OPERANDS, don't do anything. Some of the
8697 : code below will do the wrong thing since the mode of such an
8698 : expression is VOIDmode.
8699 :
8700 : Also do nothing if X is a CLOBBER; this can happen if X was
8701 : the return value from a call to gen_lowpart. */
8702 84607569 : if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
8703 : return x;
8704 :
8705 : /* We want to perform the operation in its present mode unless we know
8706 : that the operation is valid in MODE, in which case we do the operation
8707 : in MODE. */
8708 138488100 : op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
8709 78581087 : && have_insn_for (code, mode))
8710 132538908 : ? mode : GET_MODE (x));
8711 :
8712 : /* It is not valid to do a right-shift in a narrower mode
8713 : than the one it came in with. */
8714 84530279 : if ((code == LSHIFTRT || code == ASHIFTRT)
8715 84530279 : && partial_subreg_p (mode, GET_MODE (x)))
8716 361854 : op_mode = GET_MODE (x);
8717 :
8718 : /* Truncate MASK to fit OP_MODE. */
8719 84530279 : if (op_mode)
8720 78613255 : mask &= GET_MODE_MASK (op_mode);
8721 :
8722 : /* Determine what bits of X are guaranteed to be (non)zero. */
8723 84530279 : nonzero = nonzero_bits (x, mode);
8724 :
8725 : /* If none of the bits in X are needed, return a zero. */
8726 84530279 : if (!just_select && (nonzero & mask) == 0 && !side_effects_p (x))
8727 608924 : x = const0_rtx;
8728 :
8729 : /* If X is a CONST_INT, return a new one. Do this here since the
8730 : test below will fail. */
8731 84530279 : if (CONST_INT_P (x))
8732 : {
8733 6110437 : if (SCALAR_INT_MODE_P (mode))
8734 6110437 : return gen_int_mode (INTVAL (x) & mask, mode);
8735 : else
8736 : {
8737 0 : x = GEN_INT (INTVAL (x) & mask);
8738 0 : return gen_lowpart_common (mode, x);
8739 : }
8740 : }
8741 :
8742 : /* If X is narrower than MODE and we want all the bits in X's mode, just
8743 : get X in the proper mode. */
8744 78419842 : if (paradoxical_subreg_p (mode, GET_MODE (x))
8745 78419842 : && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
8746 3112289 : return gen_lowpart (mode, x);
8747 :
8748 : /* We can ignore the effect of a SUBREG if it narrows the mode or
8749 : if the constant masks to zero all the bits the mode doesn't have. */
8750 75307553 : if (GET_CODE (x) == SUBREG
8751 6713723 : && subreg_lowpart_p (x)
8752 81871509 : && (partial_subreg_p (x)
8753 5041981 : || (mask
8754 5041981 : & GET_MODE_MASK (GET_MODE (x))
8755 5041981 : & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))) == 0))
8756 6536447 : return force_to_mode (SUBREG_REG (x), mode, mask, next_select);
8757 :
8758 68771106 : scalar_int_mode int_mode, xmode;
8759 68771106 : if (is_a <scalar_int_mode> (mode, &int_mode)
8760 68771106 : && is_a <scalar_int_mode> (GET_MODE (x), &xmode))
8761 : /* OP_MODE is either MODE or XMODE, so it must be a scalar
8762 : integer too. */
8763 68739335 : return force_int_to_mode (x, int_mode, xmode,
8764 : as_a <scalar_int_mode> (op_mode),
8765 68739335 : mask, just_select);
8766 :
8767 31771 : return gen_lowpart_or_truncate (mode, x);
8768 : }
8769 :
8770 : /* Subroutine of force_to_mode that handles cases in which both X and
8771 : the result are scalar integers. MODE is the mode of the result,
8772 : XMODE is the mode of X, and OP_MODE says which of MODE or XMODE
8773 : is preferred for simplified versions of X. The other arguments
8774 : are as for force_to_mode. */
8775 :
8776 : static rtx
8777 68739335 : force_int_to_mode (rtx x, scalar_int_mode mode, scalar_int_mode xmode,
8778 : scalar_int_mode op_mode, unsigned HOST_WIDE_INT mask,
8779 : bool just_select)
8780 : {
8781 68739335 : enum rtx_code code = GET_CODE (x);
8782 68739335 : bool next_select = just_select || code == XOR || code == NOT || code == NEG;
8783 68739335 : unsigned HOST_WIDE_INT fuller_mask;
8784 68739335 : rtx op0, op1, temp;
8785 68739335 : poly_int64 const_op0;
8786 :
8787 : /* When we have an arithmetic operation, or a shift whose count we
8788 : do not know, we need to assume that all bits up to the highest-order
8789 : bit in MASK will be needed. This is how we form such a mask. */
8790 68739335 : if (mask & (HOST_WIDE_INT_1U << (HOST_BITS_PER_WIDE_INT - 1)))
8791 : fuller_mask = HOST_WIDE_INT_M1U;
8792 : else
8793 75762105 : fuller_mask = ((HOST_WIDE_INT_1U << (floor_log2 (mask) + 1)) - 1);
8794 :
8795 68739335 : switch (code)
8796 : {
8797 : case CLOBBER:
8798 : /* If X is a (clobber (const_int)), return it since we know we are
8799 : generating something that won't match. */
8800 : return x;
8801 :
8802 319228 : case SIGN_EXTEND:
8803 319228 : case ZERO_EXTEND:
8804 319228 : case ZERO_EXTRACT:
8805 319228 : case SIGN_EXTRACT:
8806 319228 : x = expand_compound_operation (x);
8807 319228 : if (GET_CODE (x) != code)
8808 192492 : return force_to_mode (x, mode, mask, next_select);
8809 : break;
8810 :
8811 144 : case TRUNCATE:
8812 : /* Similarly for a truncate. */
8813 144 : return force_to_mode (XEXP (x, 0), mode, mask, next_select);
8814 :
8815 3425902 : case AND:
8816 : /* If this is an AND with a constant, convert it into an AND
8817 : whose constant is the AND of that constant with MASK. If it
8818 : remains an AND of MASK, delete it since it is redundant. */
8819 :
8820 3425902 : if (CONST_INT_P (XEXP (x, 1)))
8821 : {
8822 5476992 : x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
8823 2738496 : mask & INTVAL (XEXP (x, 1)));
8824 2738496 : xmode = op_mode;
8825 :
8826 : /* If X is still an AND, see if it is an AND with a mask that
8827 : is just some low-order bits. If so, and it is MASK, we don't
8828 : need it. */
8829 :
8830 2715348 : if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1))
8831 5453844 : && (INTVAL (XEXP (x, 1)) & GET_MODE_MASK (xmode)) == mask)
8832 19872 : x = XEXP (x, 0);
8833 :
8834 : /* If it remains an AND, try making another AND with the bits
8835 : in the mode mask that aren't in MASK turned on. If the
8836 : constant in the AND is wide enough, this might make a
8837 : cheaper constant. */
8838 :
8839 2695480 : if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1))
8840 2695476 : && GET_MODE_MASK (xmode) != mask
8841 2823495 : && HWI_COMPUTABLE_MODE_P (xmode))
8842 : {
8843 84999 : unsigned HOST_WIDE_INT cval
8844 84999 : = UINTVAL (XEXP (x, 1)) | (GET_MODE_MASK (xmode) & ~mask);
8845 84999 : rtx y;
8846 :
8847 84999 : y = simplify_gen_binary (AND, xmode, XEXP (x, 0),
8848 84999 : gen_int_mode (cval, xmode));
8849 84999 : if (set_src_cost (y, xmode, optimize_this_for_speed_p)
8850 84999 : < set_src_cost (x, xmode, optimize_this_for_speed_p))
8851 68455744 : x = y;
8852 : }
8853 :
8854 : break;
8855 : }
8856 :
8857 687406 : goto binop;
8858 :
8859 9652577 : case PLUS:
8860 : /* In (and (plus FOO C1) M), if M is a mask that just turns off
8861 : low-order bits (as in an alignment operation) and FOO is already
8862 : aligned to that boundary, mask C1 to that boundary as well.
8863 : This may eliminate that PLUS and, later, the AND. */
8864 :
8865 9652577 : {
8866 9652577 : unsigned int width = GET_MODE_PRECISION (mode);
8867 9652577 : unsigned HOST_WIDE_INT smask = mask;
8868 :
8869 : /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
8870 : number, sign extend it. */
8871 :
8872 9652577 : if (width < HOST_BITS_PER_WIDE_INT
8873 2996296 : && (smask & (HOST_WIDE_INT_1U << (width - 1))) != 0)
8874 2661933 : smask |= HOST_WIDE_INT_M1U << width;
8875 :
8876 9652577 : if (CONST_INT_P (XEXP (x, 1))
8877 3580757 : && pow2p_hwi (- smask)
8878 3030790 : && (nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0
8879 12306228 : && (INTVAL (XEXP (x, 1)) & ~smask) != 0)
8880 11405 : return force_to_mode (plus_constant (xmode, XEXP (x, 0),
8881 11405 : (INTVAL (XEXP (x, 1)) & smask)),
8882 : mode, smask, next_select);
8883 : }
8884 :
8885 : /* fall through */
8886 :
8887 11466310 : case MULT:
8888 : /* Substituting into the operands of a widening MULT is not likely to
8889 : create RTL matching a machine insn. */
8890 11466310 : if (code == MULT
8891 1825138 : && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
8892 1825138 : || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
8893 76515 : && (GET_CODE (XEXP (x, 1)) == ZERO_EXTEND
8894 76515 : || GET_CODE (XEXP (x, 1)) == SIGN_EXTEND)
8895 36442 : && REG_P (XEXP (XEXP (x, 0), 0))
8896 28735 : && REG_P (XEXP (XEXP (x, 1), 0)))
8897 20987 : return gen_lowpart_or_truncate (mode, x);
8898 :
8899 : /* For PLUS, MINUS and MULT, we need any bits less significant than the
8900 : most significant bit in MASK since carries from those bits will
8901 : affect the bits we are interested in. */
8902 11445323 : mask = fuller_mask;
8903 11445323 : goto binop;
8904 :
8905 2171088 : case MINUS:
8906 : /* If X is (minus C Y) where C's least set bit is larger than any bit
8907 : in the mask, then we may replace with (neg Y). */
8908 2171088 : if (poly_int_rtx_p (XEXP (x, 0), &const_op0)
8909 145958 : && known_alignment (poly_uint64 (const_op0)) > mask)
8910 : {
8911 20 : x = simplify_gen_unary (NEG, xmode, XEXP (x, 1), xmode);
8912 20 : return force_to_mode (x, mode, mask, next_select);
8913 : }
8914 :
8915 : /* Similarly, if C contains every bit in the fuller_mask, then we may
8916 : replace with (not Y). */
8917 2171068 : if (CONST_INT_P (XEXP (x, 0))
8918 145938 : && ((UINTVAL (XEXP (x, 0)) | fuller_mask) == UINTVAL (XEXP (x, 0))))
8919 : {
8920 454 : x = simplify_gen_unary (NOT, xmode, XEXP (x, 1), xmode);
8921 454 : return force_to_mode (x, mode, mask, next_select);
8922 : }
8923 :
8924 2170614 : mask = fuller_mask;
8925 2170614 : goto binop;
8926 :
8927 2377276 : case IOR:
8928 2377276 : case XOR:
8929 : /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
8930 : LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
8931 : operation which may be a bitfield extraction. Ensure that the
8932 : constant we form is not wider than the mode of X. */
8933 :
8934 2377276 : if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
8935 74898 : && CONST_INT_P (XEXP (XEXP (x, 0), 1))
8936 64709 : && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
8937 64709 : && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
8938 64709 : && CONST_INT_P (XEXP (x, 1))
8939 8023 : && ((INTVAL (XEXP (XEXP (x, 0), 1))
8940 16046 : + floor_log2 (INTVAL (XEXP (x, 1))))
8941 8023 : < GET_MODE_PRECISION (xmode))
8942 2377276 : && (UINTVAL (XEXP (x, 1))
8943 4581 : & ~nonzero_bits (XEXP (x, 0), xmode)) == 0)
8944 : {
8945 8426 : temp = gen_int_mode ((INTVAL (XEXP (x, 1)) & mask)
8946 4213 : << INTVAL (XEXP (XEXP (x, 0), 1)),
8947 : xmode);
8948 8426 : temp = simplify_gen_binary (GET_CODE (x), xmode,
8949 4213 : XEXP (XEXP (x, 0), 0), temp);
8950 8426 : x = simplify_gen_binary (LSHIFTRT, xmode, temp,
8951 4213 : XEXP (XEXP (x, 0), 1));
8952 4213 : return force_to_mode (x, mode, mask, next_select);
8953 : }
8954 :
8955 16676406 : binop:
8956 : /* For most binary operations, just propagate into the operation and
8957 : change the mode if we have an operation of that mode. */
8958 :
8959 16676406 : op0 = force_to_mode (XEXP (x, 0), mode, mask, next_select);
8960 16676406 : op1 = force_to_mode (XEXP (x, 1), mode, mask, next_select);
8961 :
8962 : /* If we ended up truncating both operands, truncate the result of the
8963 : operation instead. */
8964 16676406 : if (GET_CODE (op0) == TRUNCATE
8965 0 : && GET_CODE (op1) == TRUNCATE)
8966 : {
8967 0 : op0 = XEXP (op0, 0);
8968 0 : op1 = XEXP (op1, 0);
8969 : }
8970 :
8971 16676406 : op0 = gen_lowpart_or_truncate (op_mode, op0);
8972 16676406 : op1 = gen_lowpart_or_truncate (op_mode, op1);
8973 :
8974 16676406 : if (op_mode != xmode || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
8975 : {
8976 1958774 : x = simplify_gen_binary (code, op_mode, op0, op1);
8977 1958774 : xmode = op_mode;
8978 : }
8979 : break;
8980 :
8981 4181568 : case ASHIFT:
8982 : /* For left shifts, do the same, but just for the first operand.
8983 : However, we cannot do anything with shifts where we cannot
8984 : guarantee that the counts are smaller than the size of the mode
8985 : because such a count will have a different meaning in a
8986 : wider mode. */
8987 :
8988 3981453 : if (! (CONST_INT_P (XEXP (x, 1))
8989 3981478 : && INTVAL (XEXP (x, 1)) >= 0
8990 3981453 : && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (mode))
8991 4184075 : && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
8992 200090 : && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
8993 200090 : < (unsigned HOST_WIDE_INT) GET_MODE_PRECISION (mode))))
8994 : break;
8995 :
8996 : /* If the shift count is a constant and we can do arithmetic in
8997 : the mode of the shift, refine which bits we need. Otherwise, use the
8998 : conservative form of the mask. */
8999 4044351 : if (CONST_INT_P (XEXP (x, 1))
9000 3978971 : && INTVAL (XEXP (x, 1)) >= 0
9001 3978971 : && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (op_mode)
9002 8023322 : && HWI_COMPUTABLE_MODE_P (op_mode))
9003 3978285 : mask >>= INTVAL (XEXP (x, 1));
9004 : else
9005 : mask = fuller_mask;
9006 :
9007 4044351 : op0 = gen_lowpart_or_truncate (op_mode,
9008 : force_to_mode (XEXP (x, 0), mode,
9009 : mask, next_select));
9010 :
9011 4044351 : if (op_mode != xmode || op0 != XEXP (x, 0))
9012 : {
9013 973364 : x = simplify_gen_binary (code, op_mode, op0, XEXP (x, 1));
9014 973364 : xmode = op_mode;
9015 : }
9016 : break;
9017 :
9018 3068743 : case LSHIFTRT:
9019 : /* Here we can only do something if the shift count is a constant,
9020 : this shift constant is valid for the host, and we can do arithmetic
9021 : in OP_MODE. */
9022 :
9023 3068743 : if (CONST_INT_P (XEXP (x, 1))
9024 2960269 : && INTVAL (XEXP (x, 1)) >= 0
9025 2960268 : && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
9026 6028993 : && HWI_COMPUTABLE_MODE_P (op_mode))
9027 : {
9028 2956833 : rtx inner = XEXP (x, 0);
9029 2956833 : unsigned HOST_WIDE_INT inner_mask;
9030 :
9031 : /* Select the mask of the bits we need for the shift operand. */
9032 2956833 : inner_mask = mask << INTVAL (XEXP (x, 1));
9033 :
9034 : /* We can only change the mode of the shift if we can do arithmetic
9035 : in the mode of the shift and INNER_MASK is no wider than the
9036 : width of X's mode. */
9037 2956833 : if ((inner_mask & ~GET_MODE_MASK (xmode)) != 0)
9038 292384 : op_mode = xmode;
9039 :
9040 2956833 : inner = force_to_mode (inner, op_mode, inner_mask, next_select);
9041 :
9042 2956833 : if (xmode != op_mode || inner != XEXP (x, 0))
9043 : {
9044 779648 : x = simplify_gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
9045 779648 : xmode = op_mode;
9046 : }
9047 : }
9048 :
9049 : /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
9050 : shift and AND produces only copies of the sign bit (C2 is one less
9051 : than a power of two), we can do this with just a shift. */
9052 :
9053 3068743 : if (GET_CODE (x) == LSHIFTRT
9054 3068689 : && CONST_INT_P (XEXP (x, 1))
9055 : /* The shift puts one of the sign bit copies in the least significant
9056 : bit. */
9057 5920430 : && ((INTVAL (XEXP (x, 1))
9058 2960215 : + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
9059 2960215 : >= GET_MODE_PRECISION (xmode))
9060 245340 : && pow2p_hwi (mask + 1)
9061 : /* Number of bits left after the shift must be more than the mask
9062 : needs. */
9063 73212 : && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1))
9064 73212 : <= GET_MODE_PRECISION (xmode))
9065 : /* Must be more sign bit copies than the mask needs. */
9066 3097398 : && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
9067 28655 : >= exact_log2 (mask + 1)))
9068 : {
9069 28655 : int nbits = GET_MODE_PRECISION (xmode) - exact_log2 (mask + 1);
9070 28655 : x = simplify_gen_binary (LSHIFTRT, xmode, XEXP (x, 0),
9071 28655 : gen_int_shift_amount (xmode, nbits));
9072 : }
9073 3068743 : goto shiftrt;
9074 :
9075 1868816 : case ASHIFTRT:
9076 : /* If we are just looking for the sign bit, we don't need this shift at
9077 : all, even if it has a variable count. */
9078 1868816 : if (val_signbit_p (xmode, mask))
9079 1275 : return force_to_mode (XEXP (x, 0), mode, mask, next_select);
9080 :
9081 : /* If this is a shift by a constant, get a mask that contains those bits
9082 : that are not copies of the sign bit. We then have two cases: If
9083 : MASK only includes those bits, this can be a logical shift, which may
9084 : allow simplifications. If MASK is a single-bit field not within
9085 : those bits, we are requesting a copy of the sign bit and hence can
9086 : shift the sign bit to the appropriate location. */
9087 :
9088 1867541 : if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0
9089 1826299 : && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
9090 : {
9091 1826188 : unsigned HOST_WIDE_INT nonzero;
9092 1826188 : int i;
9093 :
9094 : /* If the considered data is wider than HOST_WIDE_INT, we can't
9095 : represent a mask for all its bits in a single scalar.
9096 : But we only care about the lower bits, so calculate these. */
9097 :
9098 1826188 : if (GET_MODE_PRECISION (xmode) > HOST_BITS_PER_WIDE_INT)
9099 : {
9100 408 : nonzero = HOST_WIDE_INT_M1U;
9101 :
9102 : /* GET_MODE_PRECISION (GET_MODE (x)) - INTVAL (XEXP (x, 1))
9103 : is the number of bits a full-width mask would have set.
9104 : We need only shift if these are fewer than nonzero can
9105 : hold. If not, we must keep all bits set in nonzero. */
9106 :
9107 408 : if (GET_MODE_PRECISION (xmode) - INTVAL (XEXP (x, 1))
9108 : < HOST_BITS_PER_WIDE_INT)
9109 0 : nonzero >>= INTVAL (XEXP (x, 1))
9110 0 : + HOST_BITS_PER_WIDE_INT
9111 0 : - GET_MODE_PRECISION (xmode);
9112 : }
9113 : else
9114 : {
9115 1825780 : nonzero = GET_MODE_MASK (xmode);
9116 1825780 : nonzero >>= INTVAL (XEXP (x, 1));
9117 : }
9118 :
9119 1826188 : if ((mask & ~nonzero) == 0)
9120 : {
9121 44863 : x = simplify_shift_const (NULL_RTX, LSHIFTRT, xmode,
9122 : XEXP (x, 0), INTVAL (XEXP (x, 1)));
9123 44863 : if (GET_CODE (x) != ASHIFTRT)
9124 44863 : return force_to_mode (x, mode, mask, next_select);
9125 : }
9126 :
9127 1781325 : else if ((i = exact_log2 (mask)) >= 0)
9128 : {
9129 73 : x = simplify_shift_const
9130 146 : (NULL_RTX, LSHIFTRT, xmode, XEXP (x, 0),
9131 73 : GET_MODE_PRECISION (xmode) - 1 - i);
9132 :
9133 73 : if (GET_CODE (x) != ASHIFTRT)
9134 73 : return force_to_mode (x, mode, mask, next_select);
9135 : }
9136 : }
9137 :
9138 : /* If MASK is 1, convert this to an LSHIFTRT. This can be done
9139 : even if the shift count isn't a constant. */
9140 1822605 : if (mask == 1)
9141 3126 : x = simplify_gen_binary (LSHIFTRT, xmode, XEXP (x, 0), XEXP (x, 1));
9142 :
9143 1819479 : shiftrt:
9144 :
9145 : /* If this is a zero- or sign-extension operation that just affects bits
9146 : we don't care about, remove it. Be sure the call above returned
9147 : something that is still a shift. */
9148 :
9149 4891348 : if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
9150 4891294 : && CONST_INT_P (XEXP (x, 1))
9151 4741578 : && INTVAL (XEXP (x, 1)) >= 0
9152 4741577 : && (INTVAL (XEXP (x, 1))
9153 9483154 : <= GET_MODE_PRECISION (xmode) - (floor_log2 (mask) + 1))
9154 1735155 : && GET_CODE (XEXP (x, 0)) == ASHIFT
9155 4892286 : && XEXP (XEXP (x, 0), 1) == XEXP (x, 1))
9156 770 : return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask, next_select);
9157 :
9158 : break;
9159 :
9160 38569 : case ROTATE:
9161 38569 : case ROTATERT:
9162 : /* If the shift count is constant and we can do computations
9163 : in the mode of X, compute where the bits we care about are.
9164 : Otherwise, we can't do anything. Don't change the mode of
9165 : the shift or propagate MODE into the shift, though. */
9166 38569 : if (CONST_INT_P (XEXP (x, 1))
9167 29459 : && INTVAL (XEXP (x, 1)) >= 0)
9168 : {
9169 29457 : temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
9170 29457 : xmode, gen_int_mode (mask, xmode),
9171 : XEXP (x, 1));
9172 29457 : if (temp && CONST_INT_P (temp))
9173 29457 : x = simplify_gen_binary (code, xmode,
9174 : force_to_mode (XEXP (x, 0), xmode,
9175 29457 : INTVAL (temp), next_select),
9176 : XEXP (x, 1));
9177 : }
9178 : break;
9179 :
9180 152538 : case NEG:
9181 : /* If we just want the low-order bit, the NEG isn't needed since it
9182 : won't change the low-order bit. */
9183 152538 : if (mask == 1)
9184 319 : return force_to_mode (XEXP (x, 0), mode, mask, just_select);
9185 :
9186 : /* We need any bits less significant than the most significant bit in
9187 : MASK since carries from those bits will affect the bits we are
9188 : interested in. */
9189 152219 : mask = fuller_mask;
9190 152219 : goto unop;
9191 :
9192 419290 : case NOT:
9193 : /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
9194 : same as the XOR case above. Ensure that the constant we form is not
9195 : wider than the mode of X. */
9196 :
9197 419290 : if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
9198 14762 : && CONST_INT_P (XEXP (XEXP (x, 0), 1))
9199 14162 : && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
9200 28324 : && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
9201 14162 : < GET_MODE_PRECISION (xmode))
9202 425866 : && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
9203 : {
9204 6576 : temp = gen_int_mode (mask << INTVAL (XEXP (XEXP (x, 0), 1)), xmode);
9205 6576 : temp = simplify_gen_binary (XOR, xmode, XEXP (XEXP (x, 0), 0), temp);
9206 13152 : x = simplify_gen_binary (LSHIFTRT, xmode,
9207 6576 : temp, XEXP (XEXP (x, 0), 1));
9208 :
9209 6576 : return force_to_mode (x, mode, mask, next_select);
9210 : }
9211 :
9212 : /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
9213 : use the full mask inside the NOT. */
9214 : mask = fuller_mask;
9215 :
9216 564933 : unop:
9217 564933 : op0 = gen_lowpart_or_truncate (op_mode,
9218 : force_to_mode (XEXP (x, 0), mode, mask,
9219 : next_select));
9220 564933 : if (op_mode != xmode || op0 != XEXP (x, 0))
9221 : {
9222 60202 : x = simplify_gen_unary (code, op_mode, op0, op_mode);
9223 60202 : xmode = op_mode;
9224 : }
9225 : break;
9226 :
9227 544521 : case NE:
9228 : /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
9229 : in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
9230 : which is equal to STORE_FLAG_VALUE. */
9231 544521 : if ((mask & ~STORE_FLAG_VALUE) == 0
9232 3054 : && XEXP (x, 1) == const0_rtx
9233 3033 : && GET_MODE (XEXP (x, 0)) == mode
9234 9 : && pow2p_hwi (nonzero_bits (XEXP (x, 0), mode))
9235 544521 : && (nonzero_bits (XEXP (x, 0), mode)
9236 : == (unsigned HOST_WIDE_INT) STORE_FLAG_VALUE))
9237 0 : return force_to_mode (XEXP (x, 0), mode, mask, next_select);
9238 :
9239 : break;
9240 :
9241 1351181 : case IF_THEN_ELSE:
9242 : /* We have no way of knowing if the IF_THEN_ELSE can itself be
9243 : written in a narrower mode. We play it safe and do not do so. */
9244 :
9245 1351181 : op0 = gen_lowpart_or_truncate (xmode,
9246 : force_to_mode (XEXP (x, 1), mode,
9247 : mask, next_select));
9248 1351181 : op1 = gen_lowpart_or_truncate (xmode,
9249 : force_to_mode (XEXP (x, 2), mode,
9250 : mask, next_select));
9251 1351181 : if (op0 != XEXP (x, 1) || op1 != XEXP (x, 2))
9252 233117 : x = simplify_gen_ternary (IF_THEN_ELSE, xmode,
9253 233117 : GET_MODE (XEXP (x, 0)), XEXP (x, 0),
9254 : op0, op1);
9255 : break;
9256 :
9257 : default:
9258 : break;
9259 : }
9260 :
9261 : /* Ensure we return a value of the proper mode. */
9262 68455744 : return gen_lowpart_or_truncate (mode, x);
9263 : }
9264 :
9265 : /* Return nonzero if X is an expression that has one of two values depending on
9266 : whether some other value is zero or nonzero. In that case, we return the
9267 : value that is being tested, *PTRUE is set to the value if the rtx being
9268 : returned has a nonzero value, and *PFALSE is set to the other alternative.
9269 :
9270 : If we return zero, we set *PTRUE and *PFALSE to X. */
9271 :
9272 : static rtx
9273 227890933 : if_then_else_cond (rtx x, rtx *ptrue, rtx *pfalse)
9274 : {
9275 227890933 : machine_mode mode = GET_MODE (x);
9276 227890933 : enum rtx_code code = GET_CODE (x);
9277 227890933 : rtx cond0, cond1, true0, true1, false0, false1;
9278 227890933 : unsigned HOST_WIDE_INT nz;
9279 227890933 : scalar_int_mode int_mode;
9280 :
9281 : /* If we are comparing a value against zero, we are done. */
9282 227890933 : if ((code == NE || code == EQ)
9283 2559395 : && XEXP (x, 1) == const0_rtx)
9284 : {
9285 1547722 : *ptrue = (code == NE) ? const_true_rtx : const0_rtx;
9286 1547722 : *pfalse = (code == NE) ? const0_rtx : const_true_rtx;
9287 1547722 : return XEXP (x, 0);
9288 : }
9289 :
9290 : /* If this is a unary operation whose operand has one of two values, apply
9291 : our opcode to compute those values. */
9292 226343211 : else if (UNARY_P (x)
9293 226343211 : && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
9294 : {
9295 445652 : *ptrue = simplify_gen_unary (code, mode, true0, GET_MODE (XEXP (x, 0)));
9296 891304 : *pfalse = simplify_gen_unary (code, mode, false0,
9297 445652 : GET_MODE (XEXP (x, 0)));
9298 445652 : return cond0;
9299 : }
9300 :
9301 : /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
9302 : make can't possibly match and would suppress other optimizations. */
9303 225897559 : else if (code == COMPARE)
9304 : ;
9305 :
9306 : /* If this is a binary operation, see if either side has only one of two
9307 : values. If either one does or if both do and they are conditional on
9308 : the same value, compute the new true and false values. */
9309 221690480 : else if (BINARY_P (x))
9310 : {
9311 82730703 : rtx op0 = XEXP (x, 0);
9312 82730703 : rtx op1 = XEXP (x, 1);
9313 82730703 : cond0 = if_then_else_cond (op0, &true0, &false0);
9314 82730703 : cond1 = if_then_else_cond (op1, &true1, &false1);
9315 :
9316 515278 : if ((cond0 != 0 && cond1 != 0 && !rtx_equal_p (cond0, cond1))
9317 83192440 : && (REG_P (op0) || REG_P (op1)))
9318 : {
9319 : /* Try to enable a simplification by undoing work done by
9320 : if_then_else_cond if it converted a REG into something more
9321 : complex. */
9322 394260 : if (REG_P (op0))
9323 : {
9324 106250 : cond0 = 0;
9325 106250 : true0 = false0 = op0;
9326 : }
9327 : else
9328 : {
9329 288010 : cond1 = 0;
9330 288010 : true1 = false1 = op1;
9331 : }
9332 : }
9333 :
9334 82730703 : if ((cond0 != 0 || cond1 != 0)
9335 82730703 : && ! (cond0 != 0 && cond1 != 0 && !rtx_equal_p (cond0, cond1)))
9336 : {
9337 : /* If if_then_else_cond returned zero, then true/false are the
9338 : same rtl. We must copy one of them to prevent invalid rtl
9339 : sharing. */
9340 4107486 : if (cond0 == 0)
9341 1206828 : true0 = copy_rtx (true0);
9342 2900658 : else if (cond1 == 0)
9343 2847117 : true1 = copy_rtx (true1);
9344 :
9345 4107486 : if (COMPARISON_P (x))
9346 : {
9347 263801 : *ptrue = simplify_gen_relational (code, mode, VOIDmode,
9348 : true0, true1);
9349 263801 : *pfalse = simplify_gen_relational (code, mode, VOIDmode,
9350 : false0, false1);
9351 : }
9352 : else
9353 : {
9354 3843685 : *ptrue = simplify_gen_binary (code, mode, true0, true1);
9355 3843685 : *pfalse = simplify_gen_binary (code, mode, false0, false1);
9356 : }
9357 :
9358 5314314 : return cond0 ? cond0 : cond1;
9359 : }
9360 :
9361 : /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
9362 : operands is zero when the other is nonzero, and vice-versa,
9363 : and STORE_FLAG_VALUE is 1 or -1. */
9364 :
9365 78623217 : if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9366 78623217 : && (code == PLUS || code == IOR || code == XOR || code == MINUS
9367 : || code == UMAX)
9368 33334563 : && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
9369 : {
9370 35483 : rtx op0 = XEXP (XEXP (x, 0), 1);
9371 35483 : rtx op1 = XEXP (XEXP (x, 1), 1);
9372 :
9373 35483 : cond0 = XEXP (XEXP (x, 0), 0);
9374 35483 : cond1 = XEXP (XEXP (x, 1), 0);
9375 :
9376 35483 : if (COMPARISON_P (cond0)
9377 1 : && COMPARISON_P (cond1)
9378 0 : && SCALAR_INT_MODE_P (mode)
9379 0 : && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
9380 0 : && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
9381 0 : && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
9382 0 : || ((swap_condition (GET_CODE (cond0))
9383 0 : == reversed_comparison_code (cond1, NULL))
9384 0 : && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
9385 0 : && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
9386 35483 : && ! side_effects_p (x))
9387 : {
9388 0 : *ptrue = simplify_gen_binary (MULT, mode, op0, const_true_rtx);
9389 0 : *pfalse = simplify_gen_binary (MULT, mode,
9390 : (code == MINUS
9391 0 : ? simplify_gen_unary (NEG, mode,
9392 : op1, mode)
9393 : : op1),
9394 : const_true_rtx);
9395 0 : return cond0;
9396 : }
9397 : }
9398 :
9399 : /* Similarly for MULT, AND and UMIN, except that for these the result
9400 : is always zero. */
9401 78623217 : if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9402 78623217 : && (code == MULT || code == AND || code == UMIN)
9403 10990511 : && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
9404 : {
9405 886 : cond0 = XEXP (XEXP (x, 0), 0);
9406 886 : cond1 = XEXP (XEXP (x, 1), 0);
9407 :
9408 886 : if (COMPARISON_P (cond0)
9409 0 : && COMPARISON_P (cond1)
9410 0 : && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
9411 0 : && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
9412 0 : && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
9413 0 : || ((swap_condition (GET_CODE (cond0))
9414 0 : == reversed_comparison_code (cond1, NULL))
9415 0 : && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
9416 0 : && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
9417 886 : && ! side_effects_p (x))
9418 : {
9419 0 : *ptrue = *pfalse = const0_rtx;
9420 0 : return cond0;
9421 : }
9422 : }
9423 : }
9424 :
9425 138959777 : else if (code == IF_THEN_ELSE)
9426 : {
9427 : /* If we have IF_THEN_ELSE already, extract the condition and
9428 : canonicalize it if it is NE or EQ. */
9429 617981 : cond0 = XEXP (x, 0);
9430 617981 : *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
9431 617981 : if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
9432 252317 : return XEXP (cond0, 0);
9433 365664 : else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
9434 : {
9435 23275 : *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
9436 23275 : return XEXP (cond0, 0);
9437 : }
9438 : else
9439 : return cond0;
9440 : }
9441 :
9442 : /* If X is a SUBREG, we can narrow both the true and false values
9443 : if the inner expression, if there is a condition. */
9444 138341796 : else if (code == SUBREG
9445 138341796 : && (cond0 = if_then_else_cond (SUBREG_REG (x), &true0,
9446 : &false0)) != 0)
9447 : {
9448 793360 : true0 = simplify_gen_subreg (mode, true0,
9449 396680 : GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
9450 793360 : false0 = simplify_gen_subreg (mode, false0,
9451 396680 : GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
9452 396680 : if (true0 && false0)
9453 : {
9454 396680 : *ptrue = true0;
9455 396680 : *pfalse = false0;
9456 396680 : return cond0;
9457 : }
9458 : }
9459 :
9460 : /* If X is a constant, this isn't special and will cause confusions
9461 : if we treat it as such. Likewise if it is equivalent to a constant. */
9462 137945116 : else if (CONSTANT_P (x)
9463 137945116 : || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
9464 : ;
9465 :
9466 : /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that
9467 : will be least confusing to the rest of the compiler. */
9468 92401966 : else if (mode == BImode)
9469 : {
9470 0 : *ptrue = GEN_INT (STORE_FLAG_VALUE), *pfalse = const0_rtx;
9471 0 : return x;
9472 : }
9473 :
9474 : /* If X is known to be either 0 or -1, those are the true and
9475 : false values when testing X. */
9476 92401966 : else if (x == constm1_rtx || x == const0_rtx
9477 92401966 : || (is_a <scalar_int_mode> (mode, &int_mode)
9478 65286441 : && (num_sign_bit_copies (x, int_mode)
9479 65286441 : == GET_MODE_PRECISION (int_mode))))
9480 : {
9481 678149 : *ptrue = constm1_rtx, *pfalse = const0_rtx;
9482 678149 : return x;
9483 : }
9484 :
9485 : /* Likewise for 0 or a single bit. */
9486 91723817 : else if (HWI_COMPUTABLE_MODE_P (mode)
9487 61428967 : && pow2p_hwi (nz = nonzero_bits (x, mode)))
9488 : {
9489 1786649 : *ptrue = gen_int_mode (nz, mode), *pfalse = const0_rtx;
9490 1786649 : return x;
9491 : }
9492 :
9493 : /* Otherwise fail; show no condition with true and false values the same. */
9494 218310614 : *ptrue = *pfalse = x;
9495 218310614 : return 0;
9496 : }
9497 :
9498 : /* Return the value of expression X given the fact that condition COND
9499 : is known to be true when applied to REG as its first operand and VAL
9500 : as its second. X is known to not be shared and so can be modified in
9501 : place.
9502 :
9503 : We only handle the simplest cases, and specifically those cases that
9504 : arise with IF_THEN_ELSE expressions. */
9505 :
9506 : static rtx
9507 604218 : known_cond (rtx x, enum rtx_code cond, rtx reg, rtx val)
9508 : {
9509 604218 : enum rtx_code code = GET_CODE (x);
9510 604218 : const char *fmt;
9511 604218 : int i, j;
9512 :
9513 604218 : if (side_effects_p (x))
9514 : return x;
9515 :
9516 : /* If either operand of the condition is a floating point value,
9517 : then we have to avoid collapsing an EQ comparison. */
9518 604218 : if (cond == EQ
9519 122765 : && rtx_equal_p (x, reg)
9520 81714 : && ! FLOAT_MODE_P (GET_MODE (x))
9521 685932 : && ! FLOAT_MODE_P (GET_MODE (val)))
9522 : return val;
9523 :
9524 522504 : if (cond == UNEQ && rtx_equal_p (x, reg))
9525 : return val;
9526 :
9527 : /* If X is (abs REG) and we know something about REG's relationship
9528 : with zero, we may be able to simplify this. */
9529 :
9530 522504 : if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
9531 3 : switch (cond)
9532 : {
9533 1 : case GE: case GT: case EQ:
9534 1 : return XEXP (x, 0);
9535 2 : case LT: case LE:
9536 4 : return simplify_gen_unary (NEG, GET_MODE (XEXP (x, 0)),
9537 : XEXP (x, 0),
9538 2 : GET_MODE (XEXP (x, 0)));
9539 : default:
9540 : break;
9541 : }
9542 :
9543 : /* The only other cases we handle are MIN, MAX, and comparisons if the
9544 : operands are the same as REG and VAL. */
9545 :
9546 522501 : else if (COMPARISON_P (x) || COMMUTATIVE_ARITH_P (x))
9547 : {
9548 241573 : if (rtx_equal_p (XEXP (x, 0), val))
9549 : {
9550 2 : std::swap (val, reg);
9551 2 : cond = swap_condition (cond);
9552 : }
9553 :
9554 241573 : if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
9555 : {
9556 220633 : if (COMPARISON_P (x))
9557 : {
9558 220413 : if (comparison_dominates_p (cond, code))
9559 345 : return VECTOR_MODE_P (GET_MODE (x)) ? x : const_true_rtx;
9560 :
9561 220068 : code = reversed_comparison_code (x, NULL);
9562 220068 : if (code != UNKNOWN
9563 220068 : && comparison_dominates_p (cond, code))
9564 42 : return CONST0_RTX (GET_MODE (x));
9565 : else
9566 220026 : return x;
9567 : }
9568 220 : else if (code == SMAX || code == SMIN
9569 220 : || code == UMIN || code == UMAX)
9570 : {
9571 39 : int unsignedp = (code == UMIN || code == UMAX);
9572 :
9573 : /* Do not reverse the condition when it is NE or EQ.
9574 : This is because we cannot conclude anything about
9575 : the value of 'SMAX (x, y)' when x is not equal to y,
9576 : but we can when x equals y. */
9577 39 : if ((code == SMAX || code == UMAX)
9578 36 : && ! (cond == EQ || cond == NE))
9579 3 : cond = reverse_condition (cond);
9580 :
9581 6 : switch (cond)
9582 : {
9583 2 : case GE: case GT:
9584 2 : return unsignedp ? x : XEXP (x, 1);
9585 4 : case LE: case LT:
9586 4 : return unsignedp ? x : XEXP (x, 0);
9587 0 : case GEU: case GTU:
9588 0 : return unsignedp ? XEXP (x, 1) : x;
9589 0 : case LEU: case LTU:
9590 0 : return unsignedp ? XEXP (x, 0) : x;
9591 : default:
9592 : break;
9593 : }
9594 : }
9595 : }
9596 : }
9597 280928 : else if (code == SUBREG)
9598 : {
9599 8436 : machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
9600 8436 : rtx new_rtx, r = known_cond (SUBREG_REG (x), cond, reg, val);
9601 :
9602 8436 : if (SUBREG_REG (x) != r)
9603 : {
9604 : /* We must simplify subreg here, before we lose track of the
9605 : original inner_mode. */
9606 34 : new_rtx = simplify_subreg (GET_MODE (x), r,
9607 17 : inner_mode, SUBREG_BYTE (x));
9608 17 : if (new_rtx)
9609 : return new_rtx;
9610 : else
9611 17 : SUBST (SUBREG_REG (x), r);
9612 : }
9613 :
9614 8436 : return x;
9615 : }
9616 : /* We don't have to handle SIGN_EXTEND here, because even in the
9617 : case of replacing something with a modeless CONST_INT, a
9618 : CONST_INT is already (supposed to be) a valid sign extension for
9619 : its narrower mode, which implies it's already properly
9620 : sign-extended for the wider mode. Now, for ZERO_EXTEND, the
9621 : story is different. */
9622 272492 : else if (code == ZERO_EXTEND)
9623 : {
9624 1276 : machine_mode inner_mode = GET_MODE (XEXP (x, 0));
9625 1276 : rtx new_rtx, r = known_cond (XEXP (x, 0), cond, reg, val);
9626 :
9627 1276 : if (XEXP (x, 0) != r)
9628 : {
9629 : /* We must simplify the zero_extend here, before we lose
9630 : track of the original inner_mode. */
9631 0 : new_rtx = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
9632 : r, inner_mode);
9633 0 : if (new_rtx)
9634 : return new_rtx;
9635 : else
9636 0 : SUBST (XEXP (x, 0), r);
9637 : }
9638 :
9639 1276 : return x;
9640 : }
9641 :
9642 292370 : fmt = GET_RTX_FORMAT (code);
9643 673301 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9644 : {
9645 380931 : if (fmt[i] == 'e')
9646 181819 : SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
9647 199112 : else if (fmt[i] == 'E')
9648 15148 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9649 12212 : SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
9650 : cond, reg, val));
9651 : }
9652 :
9653 : return x;
9654 : }
9655 :
9656 : /* See if X and Y are equal for the purposes of seeing if we can rewrite an
9657 : assignment as a field assignment. */
9658 :
9659 : static bool
9660 541355 : rtx_equal_for_field_assignment_p (rtx x, rtx y, bool widen_x)
9661 : {
9662 541355 : if (widen_x && GET_MODE (x) != GET_MODE (y))
9663 : {
9664 56306 : if (paradoxical_subreg_p (GET_MODE (x), GET_MODE (y)))
9665 : return false;
9666 56306 : if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
9667 : return false;
9668 56306 : x = adjust_address_nv (x, GET_MODE (y),
9669 : byte_lowpart_offset (GET_MODE (y),
9670 : GET_MODE (x)));
9671 : }
9672 :
9673 541355 : if (x == y || rtx_equal_p (x, y))
9674 9159 : return true;
9675 :
9676 532196 : if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
9677 : return false;
9678 :
9679 : /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
9680 : Note that all SUBREGs of MEM are paradoxical; otherwise they
9681 : would have been rewritten. */
9682 91824 : if (MEM_P (x) && GET_CODE (y) == SUBREG
9683 6302 : && MEM_P (SUBREG_REG (y))
9684 532196 : && rtx_equal_p (SUBREG_REG (y),
9685 0 : gen_lowpart (GET_MODE (SUBREG_REG (y)), x)))
9686 : return true;
9687 :
9688 59567 : if (MEM_P (y) && GET_CODE (x) == SUBREG
9689 4551 : && MEM_P (SUBREG_REG (x))
9690 532385 : && rtx_equal_p (SUBREG_REG (x),
9691 189 : gen_lowpart (GET_MODE (SUBREG_REG (x)), y)))
9692 : return true;
9693 :
9694 : /* We used to see if get_last_value of X and Y were the same but that's
9695 : not correct. In one direction, we'll cause the assignment to have
9696 : the wrong destination and in the case, we'll import a register into this
9697 : insn that might have already have been dead. So fail if none of the
9698 : above cases are true. */
9699 : return false;
9700 : }
9701 :
9702 : /* See if X, a SET operation, can be rewritten as a bit-field assignment.
9703 : Return that assignment if so.
9704 :
9705 : We only handle the most common cases. */
9706 :
9707 : static rtx
9708 46037515 : make_field_assignment (rtx x)
9709 : {
9710 46037515 : rtx dest = SET_DEST (x);
9711 46037515 : rtx src = SET_SRC (x);
9712 46037515 : rtx assign;
9713 46037515 : rtx rhs, lhs;
9714 46037515 : HOST_WIDE_INT c1;
9715 46037515 : HOST_WIDE_INT pos;
9716 46037515 : unsigned HOST_WIDE_INT len;
9717 46037515 : rtx other;
9718 :
9719 : /* All the rules in this function are specific to scalar integers. */
9720 46037515 : scalar_int_mode mode;
9721 66805567 : if (!is_a <scalar_int_mode> (GET_MODE (dest), &mode))
9722 : return x;
9723 :
9724 : /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
9725 : a clear of a one-bit field. We will have changed it to
9726 : (and (rotate (const_int -2) POS) DEST), so check for that. Also check
9727 : for a SUBREG. */
9728 :
9729 1226750 : if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
9730 2511 : && CONST_INT_P (XEXP (XEXP (src, 0), 0))
9731 553 : && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
9732 20776140 : && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
9733 : {
9734 176 : assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
9735 : 1, true, true, false);
9736 176 : if (assign != 0)
9737 173 : return gen_rtx_SET (assign, const0_rtx);
9738 : return x;
9739 : }
9740 :
9741 1226574 : if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
9742 82245 : && subreg_lowpart_p (XEXP (src, 0))
9743 82212 : && partial_subreg_p (XEXP (src, 0))
9744 19707 : && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
9745 125 : && CONST_INT_P (XEXP (SUBREG_REG (XEXP (src, 0)), 0))
9746 57 : && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
9747 20775468 : && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
9748 : {
9749 14 : assign = make_extraction (VOIDmode, dest, 0,
9750 7 : XEXP (SUBREG_REG (XEXP (src, 0)), 1),
9751 : 1, true, true, false);
9752 7 : if (assign != 0)
9753 7 : return gen_rtx_SET (assign, const0_rtx);
9754 : return x;
9755 : }
9756 :
9757 : /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
9758 : one-bit field. */
9759 1723365 : if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
9760 401782 : && XEXP (XEXP (src, 0), 0) == const1_rtx
9761 20777670 : && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
9762 : {
9763 559 : assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
9764 : 1, true, true, false);
9765 559 : if (assign != 0)
9766 530 : return gen_rtx_SET (assign, const1_rtx);
9767 : return x;
9768 : }
9769 :
9770 : /* If DEST is already a field assignment, i.e. ZERO_EXTRACT, and the
9771 : SRC is an AND with all bits of that field set, then we can discard
9772 : the AND. */
9773 20774845 : if (GET_CODE (dest) == ZERO_EXTRACT
9774 2834 : && CONST_INT_P (XEXP (dest, 1))
9775 2834 : && GET_CODE (src) == AND
9776 834 : && CONST_INT_P (XEXP (src, 1)))
9777 : {
9778 834 : HOST_WIDE_INT width = INTVAL (XEXP (dest, 1));
9779 834 : unsigned HOST_WIDE_INT and_mask = INTVAL (XEXP (src, 1));
9780 834 : unsigned HOST_WIDE_INT ze_mask;
9781 :
9782 834 : if (width >= HOST_BITS_PER_WIDE_INT)
9783 : ze_mask = -1;
9784 : else
9785 834 : ze_mask = (HOST_WIDE_INT_1U << width) - 1;
9786 :
9787 : /* Complete overlap. We can remove the source AND. */
9788 834 : if ((and_mask & ze_mask) == ze_mask)
9789 792 : return gen_rtx_SET (dest, XEXP (src, 0));
9790 :
9791 : /* Partial overlap. We can reduce the source AND. */
9792 42 : if ((and_mask & ze_mask) != and_mask)
9793 : {
9794 6 : src = gen_rtx_AND (mode, XEXP (src, 0),
9795 : gen_int_mode (and_mask & ze_mask, mode));
9796 6 : return gen_rtx_SET (dest, src);
9797 : }
9798 : }
9799 :
9800 : /* The other case we handle is assignments into a constant-position
9801 : field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents
9802 : a mask that has all one bits except for a group of zero bits and
9803 : OTHER is known to have zeros where C1 has ones, this is such an
9804 : assignment. Compute the position and length from C1. Shift OTHER
9805 : to the appropriate position, force it to the required mode, and
9806 : make the extraction. Check for the AND in both operands. */
9807 :
9808 : /* One or more SUBREGs might obscure the constant-position field
9809 : assignment. The first one we are likely to encounter is an outer
9810 : narrowing SUBREG, which we can just strip for the purposes of
9811 : identifying the constant-field assignment. */
9812 20774047 : scalar_int_mode src_mode = mode;
9813 20774047 : if (GET_CODE (src) == SUBREG
9814 200462 : && subreg_lowpart_p (src)
9815 20958647 : && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (src)), &src_mode))
9816 : src = SUBREG_REG (src);
9817 :
9818 20774047 : if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
9819 : return x;
9820 :
9821 1898706 : rhs = expand_compound_operation (XEXP (src, 0));
9822 1898706 : lhs = expand_compound_operation (XEXP (src, 1));
9823 :
9824 1898706 : if (GET_CODE (rhs) == AND
9825 753068 : && CONST_INT_P (XEXP (rhs, 1))
9826 2306298 : && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
9827 8439 : c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
9828 : /* The second SUBREG that might get in the way is a paradoxical
9829 : SUBREG around the first operand of the AND. We want to
9830 : pretend the operand is as wide as the destination here. We
9831 : do this by adjusting the MEM to wider mode for the sole
9832 : purpose of the call to rtx_equal_for_field_assignment_p. Also
9833 : note this trick only works for MEMs. */
9834 1890267 : else if (GET_CODE (rhs) == AND
9835 744629 : && paradoxical_subreg_p (XEXP (rhs, 0))
9836 65392 : && MEM_P (SUBREG_REG (XEXP (rhs, 0)))
9837 30960 : && CONST_INT_P (XEXP (rhs, 1))
9838 1921227 : && rtx_equal_for_field_assignment_p (SUBREG_REG (XEXP (rhs, 0)),
9839 : dest, true))
9840 0 : c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
9841 1890267 : else if (GET_CODE (lhs) == AND
9842 84307 : && CONST_INT_P (XEXP (lhs, 1))
9843 1964848 : && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
9844 29 : c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
9845 : /* The second SUBREG that might get in the way is a paradoxical
9846 : SUBREG around the first operand of the AND. We want to
9847 : pretend the operand is as wide as the destination here. We
9848 : do this by adjusting the MEM to wider mode for the sole
9849 : purpose of the call to rtx_equal_for_field_assignment_p. Also
9850 : note this trick only works for MEMs. */
9851 1890238 : else if (GET_CODE (lhs) == AND
9852 84278 : && paradoxical_subreg_p (XEXP (lhs, 0))
9853 38421 : && MEM_P (SUBREG_REG (XEXP (lhs, 0)))
9854 25346 : && CONST_INT_P (XEXP (lhs, 1))
9855 1915584 : && rtx_equal_for_field_assignment_p (SUBREG_REG (XEXP (lhs, 0)),
9856 : dest, true))
9857 0 : c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
9858 : else
9859 1890238 : return x;
9860 :
9861 8468 : pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (mode), &len);
9862 8468 : if (pos < 0
9863 6819 : || pos + len > GET_MODE_PRECISION (mode)
9864 6819 : || GET_MODE_PRECISION (mode) > HOST_BITS_PER_WIDE_INT
9865 15283 : || (c1 & nonzero_bits (other, mode)) != 0)
9866 2429 : return x;
9867 :
9868 6039 : assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len,
9869 : true, true, false);
9870 6039 : if (assign == 0)
9871 : return x;
9872 :
9873 : /* The mode to use for the source is the mode of the assignment, or of
9874 : what is inside a possible STRICT_LOW_PART. */
9875 12054 : machine_mode new_mode = (GET_CODE (assign) == STRICT_LOW_PART
9876 6027 : ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
9877 :
9878 : /* Shift OTHER right POS places and make it the source, restricting it
9879 : to the proper length and mode. */
9880 :
9881 6027 : src = canon_reg_for_combine (simplify_shift_const (NULL_RTX, LSHIFTRT,
9882 : src_mode, other, pos),
9883 : dest);
9884 12054 : src = force_to_mode (src, new_mode,
9885 : len >= HOST_BITS_PER_WIDE_INT
9886 : ? HOST_WIDE_INT_M1U
9887 6027 : : (HOST_WIDE_INT_1U << len) - 1, false);
9888 :
9889 : /* If SRC is masked by an AND that does not make a difference in
9890 : the value being stored, strip it. */
9891 6027 : if (GET_CODE (assign) == ZERO_EXTRACT
9892 5978 : && CONST_INT_P (XEXP (assign, 1))
9893 5978 : && INTVAL (XEXP (assign, 1)) < HOST_BITS_PER_WIDE_INT
9894 5978 : && GET_CODE (src) == AND
9895 0 : && CONST_INT_P (XEXP (src, 1))
9896 0 : && UINTVAL (XEXP (src, 1))
9897 0 : == (HOST_WIDE_INT_1U << INTVAL (XEXP (assign, 1))) - 1)
9898 0 : src = XEXP (src, 0);
9899 :
9900 6027 : return gen_rtx_SET (assign, src);
9901 : }
9902 :
9903 : /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
9904 : if so. */
9905 :
9906 : static rtx
9907 50076813 : apply_distributive_law (rtx x)
9908 : {
9909 50076813 : enum rtx_code code = GET_CODE (x);
9910 50076813 : enum rtx_code inner_code;
9911 50076813 : rtx lhs, rhs, other;
9912 50076813 : rtx tem;
9913 :
9914 : /* Distributivity is not true for floating point as it can change the
9915 : value. So we don't do it unless -funsafe-math-optimizations. */
9916 50076813 : if (FLOAT_MODE_P (GET_MODE (x))
9917 3622836 : && ! flag_unsafe_math_optimizations)
9918 : return x;
9919 :
9920 : /* The outer operation can only be one of the following: */
9921 46884159 : if (code != IOR && code != AND && code != XOR
9922 46884159 : && code != PLUS && code != MINUS)
9923 : return x;
9924 :
9925 46870586 : lhs = XEXP (x, 0);
9926 46870586 : rhs = XEXP (x, 1);
9927 :
9928 : /* If either operand is a primitive we can't do anything, so get out
9929 : fast. */
9930 46870586 : if (OBJECT_P (lhs) || OBJECT_P (rhs))
9931 : return x;
9932 :
9933 2677409 : lhs = expand_compound_operation (lhs);
9934 2677409 : rhs = expand_compound_operation (rhs);
9935 2677409 : inner_code = GET_CODE (lhs);
9936 2677409 : if (inner_code != GET_CODE (rhs))
9937 : return x;
9938 :
9939 : /* See if the inner and outer operations distribute. */
9940 702598 : switch (inner_code)
9941 : {
9942 262219 : case LSHIFTRT:
9943 262219 : case ASHIFTRT:
9944 262219 : case AND:
9945 262219 : case IOR:
9946 : /* These all distribute except over PLUS. */
9947 262219 : if (code == PLUS || code == MINUS)
9948 : return x;
9949 : break;
9950 :
9951 93379 : case MULT:
9952 93379 : if (code != PLUS && code != MINUS)
9953 : return x;
9954 : break;
9955 :
9956 : case ASHIFT:
9957 : /* This is also a multiply, so it distributes over everything. */
9958 : break;
9959 :
9960 : /* This used to handle SUBREG, but this turned out to be counter-
9961 : productive, since (subreg (op ...)) usually is not handled by
9962 : insn patterns, and this "optimization" therefore transformed
9963 : recognizable patterns into unrecognizable ones. Therefore the
9964 : SUBREG case was removed from here.
9965 :
9966 : It is possible that distributing SUBREG over arithmetic operations
9967 : leads to an intermediate result than can then be optimized further,
9968 : e.g. by moving the outer SUBREG to the other side of a SET as done
9969 : in simplify_set. This seems to have been the original intent of
9970 : handling SUBREGs here.
9971 :
9972 : However, with current GCC this does not appear to actually happen,
9973 : at least on major platforms. If some case is found where removing
9974 : the SUBREG case here prevents follow-on optimizations, distributing
9975 : SUBREGs ought to be re-added at that place, e.g. in simplify_set. */
9976 :
9977 : default:
9978 : return x;
9979 : }
9980 :
9981 : /* Set LHS and RHS to the inner operands (A and B in the example
9982 : above) and set OTHER to the common operand (C in the example).
9983 : There is only one way to do this unless the inner operation is
9984 : commutative. */
9985 288614 : if (COMMUTATIVE_ARITH_P (lhs)
9986 288614 : && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
9987 2241 : other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
9988 286373 : else if (COMMUTATIVE_ARITH_P (lhs)
9989 286373 : && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
9990 15 : other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
9991 286358 : else if (COMMUTATIVE_ARITH_P (lhs)
9992 286358 : && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
9993 10624 : other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
9994 275734 : else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
9995 64427 : other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
9996 : else
9997 : return x;
9998 :
9999 : /* Form the new inner operation, seeing if it simplifies first. */
10000 77307 : tem = simplify_gen_binary (code, GET_MODE (x), lhs, rhs);
10001 :
10002 : /* There is one exception to the general way of distributing:
10003 : (a | c) ^ (b | c) -> (a ^ b) & ~c */
10004 77307 : if (code == XOR && inner_code == IOR)
10005 : {
10006 81 : inner_code = AND;
10007 81 : other = simplify_gen_unary (NOT, GET_MODE (x), other, GET_MODE (x));
10008 : }
10009 :
10010 : /* We may be able to continuing distributing the result, so call
10011 : ourselves recursively on the inner operation before forming the
10012 : outer operation, which we return. */
10013 77307 : return simplify_gen_binary (inner_code, GET_MODE (x),
10014 77307 : apply_distributive_law (tem), other);
10015 : }
10016 :
10017 : /* See if X is of the form (* (+ A B) C), and if so convert to
10018 : (+ (* A C) (* B C)) and try to simplify.
10019 :
10020 : Most of the time, this results in no change. However, if some of
10021 : the operands are the same or inverses of each other, simplifications
10022 : will result.
10023 :
10024 : For example, (and (ior A B) (not B)) can occur as the result of
10025 : expanding a bit field assignment. When we apply the distributive
10026 : law to this, we get (ior (and (A (not B))) (and (B (not B)))),
10027 : which then simplifies to (and (A (not B))).
10028 :
10029 : Note that no checks happen on the validity of applying the inverse
10030 : distributive law. This is pointless since we can do it in the
10031 : few places where this routine is called.
10032 :
10033 : N is the index of the term that is decomposed (the arithmetic operation,
10034 : i.e. (+ A B) in the first example above). !N is the index of the term that
10035 : is distributed, i.e. of C in the first example above. */
10036 : static rtx
10037 1616069 : distribute_and_simplify_rtx (rtx x, int n)
10038 : {
10039 1616069 : machine_mode mode;
10040 1616069 : enum rtx_code outer_code, inner_code;
10041 1616069 : rtx decomposed, distributed, inner_op0, inner_op1, new_op0, new_op1, tmp;
10042 :
10043 : /* Distributivity is not true for floating point as it can change the
10044 : value. So we don't do it unless -funsafe-math-optimizations. */
10045 1616069 : if (FLOAT_MODE_P (GET_MODE (x))
10046 163516 : && ! flag_unsafe_math_optimizations)
10047 : return NULL_RTX;
10048 :
10049 1456141 : decomposed = XEXP (x, n);
10050 1456141 : if (!ARITHMETIC_P (decomposed))
10051 : return NULL_RTX;
10052 :
10053 1456141 : mode = GET_MODE (x);
10054 1456141 : outer_code = GET_CODE (x);
10055 1456141 : distributed = XEXP (x, !n);
10056 :
10057 1456141 : inner_code = GET_CODE (decomposed);
10058 1456141 : inner_op0 = XEXP (decomposed, 0);
10059 1456141 : inner_op1 = XEXP (decomposed, 1);
10060 :
10061 : /* Special case (and (xor B C) (not A)), which is equivalent to
10062 : (xor (ior A B) (ior A C)) */
10063 1456141 : if (outer_code == AND && inner_code == XOR && GET_CODE (distributed) == NOT)
10064 : {
10065 76 : distributed = XEXP (distributed, 0);
10066 76 : outer_code = IOR;
10067 : }
10068 :
10069 1456141 : if (n == 0)
10070 : {
10071 : /* Distribute the second term. */
10072 1401742 : new_op0 = simplify_gen_binary (outer_code, mode, inner_op0, distributed);
10073 1401742 : new_op1 = simplify_gen_binary (outer_code, mode, inner_op1, distributed);
10074 : }
10075 : else
10076 : {
10077 : /* Distribute the first term. */
10078 54399 : new_op0 = simplify_gen_binary (outer_code, mode, distributed, inner_op0);
10079 54399 : new_op1 = simplify_gen_binary (outer_code, mode, distributed, inner_op1);
10080 : }
10081 :
10082 1456141 : tmp = apply_distributive_law (simplify_gen_binary (inner_code, mode,
10083 : new_op0, new_op1));
10084 1456141 : if (GET_CODE (tmp) != outer_code
10085 1456141 : && (set_src_cost (tmp, mode, optimize_this_for_speed_p)
10086 249365 : < set_src_cost (x, mode, optimize_this_for_speed_p)))
10087 : return tmp;
10088 :
10089 : return NULL_RTX;
10090 : }
10091 :
10092 : /* Simplify a logical `and' of VAROP with the constant CONSTOP, to be done
10093 : in MODE. Return an equivalent form, if different from (and VAROP
10094 : (const_int CONSTOP)). Otherwise, return NULL_RTX. */
10095 :
10096 : static rtx
10097 11770474 : simplify_and_const_int_1 (scalar_int_mode mode, rtx varop,
10098 : unsigned HOST_WIDE_INT constop)
10099 : {
10100 11770474 : unsigned HOST_WIDE_INT nonzero;
10101 11770474 : unsigned HOST_WIDE_INT orig_constop;
10102 11770474 : rtx orig_varop;
10103 11770474 : int i;
10104 :
10105 11770474 : orig_varop = varop;
10106 11770474 : orig_constop = constop;
10107 11770474 : if (GET_CODE (varop) == CLOBBER)
10108 : return NULL_RTX;
10109 :
10110 : /* Simplify VAROP knowing that we will be only looking at some of the
10111 : bits in it.
10112 :
10113 : Note by passing in CONSTOP, we guarantee that the bits not set in
10114 : CONSTOP are not significant and will never be examined. We must
10115 : ensure that is the case by explicitly masking out those bits
10116 : before returning. */
10117 11770465 : varop = force_to_mode (varop, mode, constop, false);
10118 :
10119 : /* If VAROP is a CLOBBER, we will fail so return it. */
10120 11770465 : if (GET_CODE (varop) == CLOBBER)
10121 : return varop;
10122 :
10123 : /* If VAROP is a CONST_INT, then we need to apply the mask in CONSTOP
10124 : to VAROP and return the new constant. */
10125 11770455 : if (CONST_INT_P (varop))
10126 312694 : return gen_int_mode (INTVAL (varop) & constop, mode);
10127 :
10128 : /* See what bits may be nonzero in VAROP. Unlike the general case of
10129 : a call to nonzero_bits, here we don't care about bits outside
10130 : MODE unless WORD_REGISTER_OPERATIONS is true. */
10131 :
10132 11457761 : scalar_int_mode tmode = mode;
10133 11457761 : if (WORD_REGISTER_OPERATIONS && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
10134 : tmode = word_mode;
10135 11457761 : nonzero = nonzero_bits (varop, tmode) & GET_MODE_MASK (tmode);
10136 :
10137 : /* Turn off all bits in the constant that are known to already be zero.
10138 : Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
10139 : which is tested below. */
10140 :
10141 11457761 : constop &= nonzero;
10142 :
10143 : /* If we don't have any bits left, return zero. */
10144 11457761 : if (constop == 0 && !side_effects_p (varop))
10145 0 : return const0_rtx;
10146 :
10147 : /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
10148 : a power of two, we can replace this with an ASHIFT. */
10149 35336 : if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), tmode) == 1
10150 11464024 : && (i = exact_log2 (constop)) >= 0)
10151 122 : return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
10152 :
10153 : /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
10154 : or XOR, then try to apply the distributive law. This may eliminate
10155 : operations if either branch can be simplified because of the AND.
10156 : It may also make some cases more complex, but those cases probably
10157 : won't match a pattern either with or without this. */
10158 :
10159 11457639 : if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
10160 : {
10161 81946 : scalar_int_mode varop_mode = as_a <scalar_int_mode> (GET_MODE (varop));
10162 81946 : return
10163 81946 : gen_lowpart
10164 81946 : (mode,
10165 : apply_distributive_law
10166 81946 : (simplify_gen_binary (GET_CODE (varop), varop_mode,
10167 : simplify_and_const_int (NULL_RTX, varop_mode,
10168 : XEXP (varop, 0),
10169 : constop),
10170 : simplify_and_const_int (NULL_RTX, varop_mode,
10171 : XEXP (varop, 1),
10172 : constop))));
10173 : }
10174 :
10175 : /* If VAROP is PLUS, and the constant is a mask of low bits, distribute
10176 : the AND and see if one of the operands simplifies to zero. If so, we
10177 : may eliminate it. */
10178 :
10179 11375693 : if (GET_CODE (varop) == PLUS
10180 11375693 : && pow2p_hwi (constop + 1))
10181 : {
10182 437547 : rtx o0, o1;
10183 :
10184 437547 : o0 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 0), constop);
10185 437547 : o1 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 1), constop);
10186 437547 : if (o0 == const0_rtx)
10187 : return o1;
10188 437547 : if (o1 == const0_rtx)
10189 : return o0;
10190 : }
10191 :
10192 : /* Make a SUBREG if necessary. If we can't make it, fail. */
10193 11375627 : varop = gen_lowpart (mode, varop);
10194 11375627 : if (varop == NULL_RTX || GET_CODE (varop) == CLOBBER)
10195 : return NULL_RTX;
10196 :
10197 : /* If we are only masking insignificant bits, return VAROP. */
10198 11375627 : if (constop == nonzero)
10199 : return varop;
10200 :
10201 10950037 : if (varop == orig_varop && constop == orig_constop)
10202 : return NULL_RTX;
10203 :
10204 : /* Otherwise, return an AND. */
10205 5968785 : return simplify_gen_binary (AND, mode, varop, gen_int_mode (constop, mode));
10206 : }
10207 :
10208 :
10209 : /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
10210 : in MODE.
10211 :
10212 : Return an equivalent form, if different from X. Otherwise, return X. If
10213 : X is zero, we are to always construct the equivalent form. */
10214 :
10215 : static rtx
10216 11770474 : simplify_and_const_int (rtx x, scalar_int_mode mode, rtx varop,
10217 : unsigned HOST_WIDE_INT constop)
10218 : {
10219 11770474 : rtx tem = simplify_and_const_int_1 (mode, varop, constop);
10220 11770474 : if (tem)
10221 : return tem;
10222 :
10223 4981261 : if (!x)
10224 1315135 : x = simplify_gen_binary (AND, GET_MODE (varop), varop,
10225 1315135 : gen_int_mode (constop, mode));
10226 4981261 : if (GET_MODE (x) != mode)
10227 2 : x = gen_lowpart (mode, x);
10228 : return x;
10229 : }
10230 :
10231 : /* Given a REG X of mode XMODE, compute which bits in X can be nonzero.
10232 : We don't care about bits outside of those defined in MODE.
10233 : We DO care about all the bits in MODE, even if XMODE is smaller than MODE.
10234 :
10235 : For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
10236 : a shift, AND, or zero_extract, we can do better. */
10237 :
10238 : static rtx
10239 431193323 : reg_nonzero_bits_for_combine (const_rtx x, scalar_int_mode xmode,
10240 : scalar_int_mode mode,
10241 : unsigned HOST_WIDE_INT *nonzero)
10242 : {
10243 431193323 : rtx tem;
10244 431193323 : reg_stat_type *rsp;
10245 :
10246 : /* If X is a register whose nonzero bits value is current, use it.
10247 : Otherwise, if X is a register whose value we can find, use that
10248 : value. Otherwise, use the previously-computed global nonzero bits
10249 : for this register. */
10250 :
10251 431193323 : rsp = ®_stat[REGNO (x)];
10252 431193323 : if (rsp->last_set_value != 0
10253 401478589 : && (rsp->last_set_mode == mode
10254 1278 : || (REGNO (x) >= FIRST_PSEUDO_REGISTER
10255 0 : && GET_MODE_CLASS (rsp->last_set_mode) == MODE_INT
10256 0 : && GET_MODE_CLASS (mode) == MODE_INT))
10257 832670634 : && ((rsp->last_set_label >= label_tick_ebb_start
10258 299565892 : && rsp->last_set_label < label_tick)
10259 380527132 : || (rsp->last_set_label == label_tick
10260 278615713 : && DF_INSN_LUID (rsp->last_set) < subst_low_luid)
10261 129043596 : || (REGNO (x) >= FIRST_PSEUDO_REGISTER
10262 128980414 : && REGNO (x) < reg_n_sets_max
10263 128980288 : && REG_N_SETS (REGNO (x)) == 1
10264 147407590 : && !REGNO_REG_SET_P
10265 : (DF_LR_IN (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb),
10266 : REGNO (x)))))
10267 : {
10268 : /* Note that, even if the precision of last_set_mode is lower than that
10269 : of mode, record_value_for_reg invoked nonzero_bits on the register
10270 : with nonzero_bits_mode (because last_set_mode is necessarily integral
10271 : and HWI_COMPUTABLE_MODE_P in this case) so bits in nonzero_bits_mode
10272 : are all valid, hence in mode too since nonzero_bits_mode is defined
10273 : to the largest HWI_COMPUTABLE_MODE_P mode. */
10274 346048119 : *nonzero &= rsp->last_set_nonzero_bits;
10275 346048119 : return NULL;
10276 : }
10277 :
10278 85145204 : tem = get_last_value (x);
10279 85145204 : if (tem)
10280 : {
10281 : if (SHORT_IMMEDIATES_SIGN_EXTEND)
10282 : tem = sign_extend_short_imm (tem, xmode, GET_MODE_PRECISION (mode));
10283 :
10284 : return tem;
10285 : }
10286 :
10287 85145198 : if (nonzero_sign_valid && rsp->nonzero_bits)
10288 : {
10289 52436180 : unsigned HOST_WIDE_INT mask = rsp->nonzero_bits;
10290 :
10291 52436180 : if (GET_MODE_PRECISION (xmode) < GET_MODE_PRECISION (mode))
10292 : /* We don't know anything about the upper bits. */
10293 0 : mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (xmode);
10294 :
10295 52436180 : *nonzero &= mask;
10296 : }
10297 :
10298 : return NULL;
10299 : }
10300 :
10301 : /* Given a reg X of mode XMODE, return the number of bits at the high-order
10302 : end of X that are known to be equal to the sign bit. X will be used
10303 : in mode MODE; the returned value will always be between 1 and the
10304 : number of bits in MODE. */
10305 :
10306 : static rtx
10307 125509324 : reg_num_sign_bit_copies_for_combine (const_rtx x, scalar_int_mode xmode,
10308 : scalar_int_mode mode,
10309 : unsigned int *result)
10310 : {
10311 125509324 : rtx tem;
10312 125509324 : reg_stat_type *rsp;
10313 :
10314 125509324 : rsp = ®_stat[REGNO (x)];
10315 125509324 : if (rsp->last_set_value != 0
10316 114697486 : && rsp->last_set_mode == mode
10317 240206641 : && ((rsp->last_set_label >= label_tick_ebb_start
10318 86071576 : && rsp->last_set_label < label_tick)
10319 109241963 : || (rsp->last_set_label == label_tick
10320 80616222 : && DF_INSN_LUID (rsp->last_set) < subst_low_luid)
10321 36164103 : || (REGNO (x) >= FIRST_PSEUDO_REGISTER
10322 36152229 : && REGNO (x) < reg_n_sets_max
10323 36152147 : && REG_N_SETS (REGNO (x)) == 1
10324 41434096 : && !REGNO_REG_SET_P
10325 : (DF_LR_IN (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb),
10326 : REGNO (x)))))
10327 : {
10328 99227239 : *result = rsp->last_set_sign_bit_copies;
10329 99227239 : return NULL;
10330 : }
10331 :
10332 26282085 : tem = get_last_value (x);
10333 26282085 : if (tem != 0)
10334 : return tem;
10335 :
10336 17085908 : if (nonzero_sign_valid && rsp->sign_bit_copies != 0
10337 39405637 : && GET_MODE_PRECISION (xmode) == GET_MODE_PRECISION (mode))
10338 13123557 : *result = rsp->sign_bit_copies;
10339 :
10340 : return NULL;
10341 : }
10342 :
10343 : /* Return the number of "extended" bits there are in X, when interpreted
10344 : as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
10345 : unsigned quantities, this is the number of high-order zero bits.
10346 : For signed quantities, this is the number of copies of the sign bit
10347 : minus 1. In both case, this function returns the number of "spare"
10348 : bits. For example, if two quantities for which this function returns
10349 : at least 1 are added, the addition is known not to overflow.
10350 :
10351 : This function will always return 0 unless called during combine, which
10352 : implies that it must be called from a define_split. */
10353 :
10354 : unsigned int
10355 0 : extended_count (const_rtx x, machine_mode mode, bool unsignedp)
10356 : {
10357 0 : if (nonzero_sign_valid == 0)
10358 : return 0;
10359 :
10360 0 : scalar_int_mode int_mode;
10361 0 : return (unsignedp
10362 0 : ? (is_a <scalar_int_mode> (mode, &int_mode)
10363 0 : && HWI_COMPUTABLE_MODE_P (int_mode)
10364 0 : ? (unsigned int) (GET_MODE_PRECISION (int_mode) - 1
10365 0 : - floor_log2 (nonzero_bits (x, int_mode)))
10366 : : 0)
10367 0 : : num_sign_bit_copies (x, mode) - 1);
10368 : }
10369 :
10370 : /* This function is called from `simplify_shift_const' to merge two
10371 : outer operations. Specifically, we have already found that we need
10372 : to perform operation *POP0 with constant *PCONST0 at the outermost
10373 : position. We would now like to also perform OP1 with constant CONST1
10374 : (with *POP0 being done last).
10375 :
10376 : Return true if we can do the operation and update *POP0 and *PCONST0 with
10377 : the resulting operation. *PCOMP_P is set to true if we would need to
10378 : complement the innermost operand, otherwise it is unchanged.
10379 :
10380 : MODE is the mode in which the operation will be done. No bits outside
10381 : the width of this mode matter. It is assumed that the width of this mode
10382 : is smaller than or equal to HOST_BITS_PER_WIDE_INT.
10383 :
10384 : If *POP0 or OP1 are UNKNOWN, it means no operation is required. Only NEG, PLUS,
10385 : IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
10386 : result is simply *PCONST0.
10387 :
10388 : If the resulting operation cannot be expressed as one operation, we
10389 : return false and do not change *POP0, *PCONST0, and *PCOMP_P. */
10390 :
10391 : static bool
10392 3660389 : merge_outer_ops (enum rtx_code *pop0, HOST_WIDE_INT *pconst0,
10393 : enum rtx_code op1, HOST_WIDE_INT const1,
10394 : machine_mode mode, bool *pcomp_p)
10395 : {
10396 3660389 : enum rtx_code op0 = *pop0;
10397 3660389 : HOST_WIDE_INT const0 = *pconst0;
10398 :
10399 3660389 : const0 &= GET_MODE_MASK (mode);
10400 3660389 : const1 &= GET_MODE_MASK (mode);
10401 :
10402 : /* If OP0 is an AND, clear unimportant bits in CONST1. */
10403 3660389 : if (op0 == AND)
10404 12516 : const1 &= const0;
10405 :
10406 : /* If OP0 or OP1 is UNKNOWN, this is easy. Similarly if they are the same or
10407 : if OP0 is SET. */
10408 :
10409 3660389 : if (op1 == UNKNOWN || op0 == SET)
10410 : return true;
10411 :
10412 3660389 : else if (op0 == UNKNOWN)
10413 : op0 = op1, const0 = const1;
10414 :
10415 63996 : else if (op0 == op1)
10416 : {
10417 12250 : switch (op0)
10418 : {
10419 12245 : case AND:
10420 12245 : const0 &= const1;
10421 12245 : break;
10422 5 : case IOR:
10423 5 : const0 |= const1;
10424 5 : break;
10425 0 : case XOR:
10426 0 : const0 ^= const1;
10427 0 : break;
10428 0 : case PLUS:
10429 0 : const0 += const1;
10430 0 : break;
10431 : case NEG:
10432 3631586 : op0 = UNKNOWN;
10433 : break;
10434 : default:
10435 : break;
10436 : }
10437 : }
10438 :
10439 : /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
10440 51746 : else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
10441 : return false;
10442 :
10443 : /* If the two constants aren't the same, we can't do anything. The
10444 : remaining six cases can all be done. */
10445 24259 : else if (const0 != const1)
10446 : return false;
10447 :
10448 : else
10449 22943 : switch (op0)
10450 : {
10451 8 : case IOR:
10452 8 : if (op1 == AND)
10453 : /* (a & b) | b == b */
10454 0 : op0 = SET;
10455 : else /* op1 == XOR */
10456 : /* (a ^ b) | b == a | b */
10457 : {;}
10458 : break;
10459 :
10460 22668 : case XOR:
10461 22668 : if (op1 == AND)
10462 : /* (a & b) ^ b == (~a) & b */
10463 22668 : op0 = AND, *pcomp_p = true;
10464 : else /* op1 == IOR */
10465 : /* (a | b) ^ b == a & ~b */
10466 0 : op0 = AND, const0 = ~const0;
10467 : break;
10468 :
10469 267 : case AND:
10470 267 : if (op1 == IOR)
10471 : /* (a | b) & b == b */
10472 : op0 = SET;
10473 : else /* op1 == XOR */
10474 : /* (a ^ b) & b) == (~a) & b */
10475 267 : *pcomp_p = true;
10476 : break;
10477 : default:
10478 : break;
10479 : }
10480 :
10481 : /* Check for NO-OP cases. */
10482 3631586 : const0 &= GET_MODE_MASK (mode);
10483 3631586 : if (const0 == 0
10484 21208 : && (op0 == IOR || op0 == XOR || op0 == PLUS))
10485 : op0 = UNKNOWN;
10486 3629271 : else if (const0 == 0 && op0 == AND)
10487 : op0 = SET;
10488 3629271 : else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
10489 15266 : && op0 == AND)
10490 3631586 : op0 = UNKNOWN;
10491 :
10492 3631586 : *pop0 = op0;
10493 :
10494 : /* ??? Slightly redundant with the above mask, but not entirely.
10495 : Moving this above means we'd have to sign-extend the mode mask
10496 : for the final test. */
10497 3631586 : if (op0 != UNKNOWN && op0 != NEG)
10498 3602169 : *pconst0 = trunc_int_for_mode (const0, mode);
10499 :
10500 : return true;
10501 : }
10502 :
10503 : /* A helper to simplify_shift_const_1 to determine the mode we can perform
10504 : the shift in. The original shift operation CODE is performed on OP in
10505 : ORIG_MODE. Return the wider mode MODE if we can perform the operation
10506 : in that mode. Return ORIG_MODE otherwise. We can also assume that the
10507 : result of the shift is subject to operation OUTER_CODE with operand
10508 : OUTER_CONST. */
10509 :
10510 : static scalar_int_mode
10511 297482 : try_widen_shift_mode (enum rtx_code code, rtx op, int count,
10512 : scalar_int_mode orig_mode, scalar_int_mode mode,
10513 : enum rtx_code outer_code, HOST_WIDE_INT outer_const)
10514 : {
10515 297482 : gcc_assert (GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (orig_mode));
10516 :
10517 : /* In general we can't perform in wider mode for right shift and rotate. */
10518 297482 : switch (code)
10519 : {
10520 31642 : case ASHIFTRT:
10521 : /* We can still widen if the bits brought in from the left are identical
10522 : to the sign bit of ORIG_MODE. */
10523 31642 : if (num_sign_bit_copies (op, mode)
10524 31642 : > (unsigned) (GET_MODE_PRECISION (mode)
10525 31642 : - GET_MODE_PRECISION (orig_mode)))
10526 352 : return mode;
10527 31290 : return orig_mode;
10528 :
10529 36056 : case LSHIFTRT:
10530 : /* Similarly here but with zero bits. */
10531 36056 : if (HWI_COMPUTABLE_MODE_P (mode)
10532 36056 : && (nonzero_bits (op, mode) & ~GET_MODE_MASK (orig_mode)) == 0)
10533 4159 : return mode;
10534 :
10535 : /* We can also widen if the bits brought in will be masked off. This
10536 : operation is performed in ORIG_MODE. */
10537 31897 : if (outer_code == AND)
10538 : {
10539 8392 : int care_bits = low_bitmask_len (orig_mode, outer_const);
10540 :
10541 8392 : if (care_bits >= 0
10542 8392 : && GET_MODE_PRECISION (orig_mode) - care_bits >= count)
10543 8374 : return mode;
10544 : }
10545 : /* fall through */
10546 :
10547 24187 : case ROTATE:
10548 24187 : return orig_mode;
10549 :
10550 0 : case ROTATERT:
10551 0 : gcc_unreachable ();
10552 :
10553 229120 : default:
10554 229120 : return mode;
10555 : }
10556 : }
10557 :
10558 : /* Simplify a shift of VAROP by ORIG_COUNT bits. CODE says what kind
10559 : of shift. The result of the shift is RESULT_MODE. Return NULL_RTX
10560 : if we cannot simplify it. Otherwise, return a simplified value.
10561 :
10562 : The shift is normally computed in the widest mode we find in VAROP, as
10563 : long as it isn't a different number of words than RESULT_MODE. Exceptions
10564 : are ASHIFTRT and ROTATE, which are always done in their original mode. */
10565 :
10566 : static rtx
10567 23597496 : simplify_shift_const_1 (enum rtx_code code, machine_mode result_mode,
10568 : rtx varop, int orig_count)
10569 : {
10570 23597496 : enum rtx_code orig_code = code;
10571 23597496 : rtx orig_varop = varop;
10572 23597496 : int count, log2;
10573 23597496 : machine_mode mode = result_mode;
10574 23597496 : machine_mode shift_mode;
10575 23597496 : scalar_int_mode tmode, inner_mode, int_mode, int_varop_mode, int_result_mode;
10576 : /* We form (outer_op (code varop count) (outer_const)). */
10577 23597496 : enum rtx_code outer_op = UNKNOWN;
10578 23597496 : HOST_WIDE_INT outer_const = 0;
10579 23597496 : bool complement_p = false;
10580 23597496 : rtx new_rtx, x;
10581 :
10582 : /* Make sure and truncate the "natural" shift on the way in. We don't
10583 : want to do this inside the loop as it makes it more difficult to
10584 : combine shifts. */
10585 23597496 : if (SHIFT_COUNT_TRUNCATED)
10586 : orig_count &= GET_MODE_UNIT_BITSIZE (mode) - 1;
10587 :
10588 : /* If we were given an invalid count, don't do anything except exactly
10589 : what was requested. */
10590 :
10591 47194892 : if (orig_count < 0 || orig_count >= (int) GET_MODE_UNIT_PRECISION (mode))
10592 : return NULL_RTX;
10593 :
10594 : count = orig_count;
10595 :
10596 : /* Unless one of the branches of the `if' in this loop does a `continue',
10597 : we will `break' the loop after the `if'. */
10598 :
10599 27534609 : while (count != 0)
10600 : {
10601 : /* If we have an operand of (clobber (const_int 0)), fail. */
10602 23764919 : if (GET_CODE (varop) == CLOBBER)
10603 23597496 : return NULL_RTX;
10604 :
10605 : /* Convert ROTATERT to ROTATE. */
10606 23764919 : if (code == ROTATERT)
10607 : {
10608 11095 : unsigned int bitsize = GET_MODE_UNIT_PRECISION (result_mode);
10609 11095 : code = ROTATE;
10610 11095 : count = bitsize - count;
10611 : }
10612 :
10613 23764919 : shift_mode = result_mode;
10614 23764919 : if (shift_mode != mode)
10615 : {
10616 : /* We only change the modes of scalar shifts. */
10617 150934 : int_mode = as_a <scalar_int_mode> (mode);
10618 150934 : int_result_mode = as_a <scalar_int_mode> (result_mode);
10619 150934 : shift_mode = try_widen_shift_mode (code, varop, count,
10620 : int_result_mode, int_mode,
10621 : outer_op, outer_const);
10622 : }
10623 :
10624 23764919 : scalar_int_mode shift_unit_mode;
10625 67365366 : if (!is_a <scalar_int_mode> (GET_MODE_INNER (shift_mode),
10626 : &shift_unit_mode))
10627 : return NULL_RTX;
10628 :
10629 : /* Handle cases where the count is greater than the size of the mode
10630 : minus 1. For ASHIFT, use the size minus one as the count (this can
10631 : occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
10632 : take the count modulo the size. For other shifts, the result is
10633 : zero.
10634 :
10635 : Since these shifts are being produced by the compiler by combining
10636 : multiple operations, each of which are defined, we know what the
10637 : result is supposed to be. */
10638 :
10639 23764919 : if (count > (GET_MODE_PRECISION (shift_unit_mode) - 1))
10640 : {
10641 8911 : if (code == ASHIFTRT)
10642 8905 : count = GET_MODE_PRECISION (shift_unit_mode) - 1;
10643 6 : else if (code == ROTATE || code == ROTATERT)
10644 6 : count %= GET_MODE_PRECISION (shift_unit_mode);
10645 : else
10646 : {
10647 : /* We can't simply return zero because there may be an
10648 : outer op. */
10649 0 : varop = const0_rtx;
10650 0 : count = 0;
10651 0 : break;
10652 : }
10653 : }
10654 :
10655 : /* If we discovered we had to complement VAROP, leave. Making a NOT
10656 : here would cause an infinite loop. */
10657 23764919 : if (complement_p)
10658 : break;
10659 :
10660 23752108 : if (shift_mode == shift_unit_mode)
10661 : {
10662 : /* An arithmetic right shift of a quantity known to be -1 or 0
10663 : is a no-op. */
10664 23155226 : if (code == ASHIFTRT
10665 23155226 : && (num_sign_bit_copies (varop, shift_unit_mode)
10666 4204025 : == GET_MODE_PRECISION (shift_unit_mode)))
10667 : {
10668 : count = 0;
10669 : break;
10670 : }
10671 :
10672 : /* If we are doing an arithmetic right shift and discarding all but
10673 : the sign bit copies, this is equivalent to doing a shift by the
10674 : bitsize minus one. Convert it into that shift because it will
10675 : often allow other simplifications. */
10676 :
10677 23155156 : if (code == ASHIFTRT
10678 23155156 : && (count + num_sign_bit_copies (varop, shift_unit_mode)
10679 4203955 : >= GET_MODE_PRECISION (shift_unit_mode)))
10680 270621 : count = GET_MODE_PRECISION (shift_unit_mode) - 1;
10681 :
10682 : /* We simplify the tests below and elsewhere by converting
10683 : ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
10684 : `make_compound_operation' will convert it to an ASHIFTRT for
10685 : those machines (such as VAX) that don't have an LSHIFTRT. */
10686 23155156 : if (code == ASHIFTRT
10687 4203955 : && HWI_COMPUTABLE_MODE_P (shift_unit_mode)
10688 27333896 : && val_signbit_known_clear_p (shift_unit_mode,
10689 : nonzero_bits (varop,
10690 : shift_unit_mode)))
10691 : code = LSHIFTRT;
10692 :
10693 23126066 : if (((code == LSHIFTRT
10694 5773855 : && HWI_COMPUTABLE_MODE_P (shift_unit_mode)
10695 5754734 : && !(nonzero_bits (varop, shift_unit_mode) >> count))
10696 23153390 : || (code == ASHIFT
10697 13175768 : && HWI_COMPUTABLE_MODE_P (shift_unit_mode)
10698 12731095 : && !((nonzero_bits (varop, shift_unit_mode) << count)
10699 12731095 : & GET_MODE_MASK (shift_unit_mode))))
10700 23130081 : && !side_effects_p (varop))
10701 4015 : varop = const0_rtx;
10702 : }
10703 :
10704 23752038 : switch (GET_CODE (varop))
10705 : {
10706 479763 : case SIGN_EXTEND:
10707 479763 : case ZERO_EXTEND:
10708 479763 : case SIGN_EXTRACT:
10709 479763 : case ZERO_EXTRACT:
10710 479763 : new_rtx = expand_compound_operation (varop);
10711 479763 : if (new_rtx != varop)
10712 : {
10713 66661 : varop = new_rtx;
10714 27601270 : continue;
10715 : }
10716 : break;
10717 :
10718 256911 : case MEM:
10719 : /* The following rules apply only to scalars. */
10720 256911 : if (shift_mode != shift_unit_mode)
10721 : break;
10722 242791 : int_mode = as_a <scalar_int_mode> (mode);
10723 :
10724 : /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
10725 : minus the width of a smaller mode, we can do this with a
10726 : SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
10727 247576 : if ((code == ASHIFTRT || code == LSHIFTRT)
10728 77707 : && ! mode_dependent_address_p (XEXP (varop, 0),
10729 77707 : MEM_ADDR_SPACE (varop))
10730 77707 : && ! MEM_VOLATILE_P (varop)
10731 318936 : && (int_mode_for_size (GET_MODE_BITSIZE (int_mode) - count, 1)
10732 238006 : .exists (&tmode)))
10733 : {
10734 4785 : new_rtx = adjust_address_nv (varop, tmode,
10735 : BYTES_BIG_ENDIAN ? 0
10736 : : count / BITS_PER_UNIT);
10737 :
10738 4785 : varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
10739 : : ZERO_EXTEND, int_mode, new_rtx);
10740 4785 : count = 0;
10741 4785 : continue;
10742 : }
10743 : break;
10744 :
10745 4740400 : case SUBREG:
10746 : /* The following rules apply only to scalars. */
10747 4740400 : if (shift_mode != shift_unit_mode)
10748 : break;
10749 4334781 : int_mode = as_a <scalar_int_mode> (mode);
10750 4334781 : int_varop_mode = as_a <scalar_int_mode> (GET_MODE (varop));
10751 :
10752 : /* If VAROP is a SUBREG, strip it as long as the inner operand has
10753 : the same number of words as what we've seen so far. Then store
10754 : the widest mode in MODE. */
10755 4334781 : if (subreg_lowpart_p (varop)
10756 27917789 : && is_int_mode (GET_MODE (SUBREG_REG (varop)), &inner_mode)
10757 8621276 : && GET_MODE_SIZE (inner_mode) > GET_MODE_SIZE (int_varop_mode)
10758 180111 : && (CEIL (GET_MODE_SIZE (inner_mode), UNITS_PER_WORD)
10759 163967 : == CEIL (GET_MODE_SIZE (int_mode), UNITS_PER_WORD))
10760 4481342 : && GET_MODE_CLASS (int_varop_mode) == MODE_INT)
10761 : {
10762 146561 : varop = SUBREG_REG (varop);
10763 439683 : if (GET_MODE_SIZE (inner_mode) > GET_MODE_SIZE (int_mode))
10764 146561 : mode = inner_mode;
10765 146561 : continue;
10766 : }
10767 : break;
10768 :
10769 407597 : case MULT:
10770 : /* Some machines use MULT instead of ASHIFT because MULT
10771 : is cheaper. But it is still better on those machines to
10772 : merge two shifts into one. */
10773 407597 : if (CONST_INT_P (XEXP (varop, 1))
10774 407597 : && (log2 = exact_log2 (UINTVAL (XEXP (varop, 1)))) >= 0)
10775 : {
10776 0 : rtx log2_rtx = gen_int_shift_amount (GET_MODE (varop), log2);
10777 0 : varop = simplify_gen_binary (ASHIFT, GET_MODE (varop),
10778 : XEXP (varop, 0), log2_rtx);
10779 0 : continue;
10780 0 : }
10781 : break;
10782 :
10783 8822 : case UDIV:
10784 : /* Similar, for when divides are cheaper. */
10785 8822 : if (CONST_INT_P (XEXP (varop, 1))
10786 8822 : && (log2 = exact_log2 (UINTVAL (XEXP (varop, 1)))) >= 0)
10787 : {
10788 9 : rtx log2_rtx = gen_int_shift_amount (GET_MODE (varop), log2);
10789 9 : varop = simplify_gen_binary (LSHIFTRT, GET_MODE (varop),
10790 : XEXP (varop, 0), log2_rtx);
10791 9 : continue;
10792 9 : }
10793 : break;
10794 :
10795 343505 : case ASHIFTRT:
10796 : /* If we are extracting just the sign bit of an arithmetic
10797 : right shift, that shift is not needed. However, the sign
10798 : bit of a wider mode may be different from what would be
10799 : interpreted as the sign bit in a narrower mode, so, if
10800 : the result is narrower, don't discard the shift. */
10801 345360 : if (code == LSHIFTRT
10802 14675 : && count == (GET_MODE_UNIT_BITSIZE (result_mode) - 1)
10803 343505 : && (GET_MODE_UNIT_BITSIZE (result_mode)
10804 3736 : >= GET_MODE_UNIT_BITSIZE (GET_MODE (varop))))
10805 : {
10806 1855 : varop = XEXP (varop, 0);
10807 1855 : continue;
10808 : }
10809 :
10810 : /* fall through */
10811 :
10812 5769748 : case LSHIFTRT:
10813 5769748 : case ASHIFT:
10814 5769748 : case ROTATE:
10815 : /* The following rules apply only to scalars. */
10816 5769748 : if (shift_mode != shift_unit_mode)
10817 : break;
10818 5761969 : int_mode = as_a <scalar_int_mode> (mode);
10819 5761969 : int_varop_mode = as_a <scalar_int_mode> (GET_MODE (varop));
10820 5761969 : int_result_mode = as_a <scalar_int_mode> (result_mode);
10821 :
10822 : /* Here we have two nested shifts. The result is usually the
10823 : AND of a new shift with a mask. We compute the result below. */
10824 5761969 : if (CONST_INT_P (XEXP (varop, 1))
10825 5741922 : && INTVAL (XEXP (varop, 1)) >= 0
10826 5741919 : && INTVAL (XEXP (varop, 1)) < GET_MODE_PRECISION (int_varop_mode)
10827 5741919 : && HWI_COMPUTABLE_MODE_P (int_result_mode)
10828 11470707 : && HWI_COMPUTABLE_MODE_P (int_mode))
10829 : {
10830 5708738 : enum rtx_code first_code = GET_CODE (varop);
10831 5708738 : unsigned int first_count = INTVAL (XEXP (varop, 1));
10832 5708738 : unsigned HOST_WIDE_INT mask;
10833 5708738 : rtx mask_rtx;
10834 :
10835 : /* We have one common special case. We can't do any merging if
10836 : the inner code is an ASHIFTRT of a smaller mode. However, if
10837 : we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
10838 : with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
10839 : we can convert it to
10840 : (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0) C3) C2) C1).
10841 : This simplifies certain SIGN_EXTEND operations. */
10842 5708738 : if (code == ASHIFT && first_code == ASHIFTRT
10843 5708738 : && count == (GET_MODE_PRECISION (int_result_mode)
10844 309637 : - GET_MODE_PRECISION (int_varop_mode)))
10845 : {
10846 : /* C3 has the low-order C1 bits zero. */
10847 :
10848 0 : mask = GET_MODE_MASK (int_mode)
10849 0 : & ~((HOST_WIDE_INT_1U << first_count) - 1);
10850 :
10851 0 : varop = simplify_and_const_int (NULL_RTX, int_result_mode,
10852 : XEXP (varop, 0), mask);
10853 0 : varop = simplify_shift_const (NULL_RTX, ASHIFT,
10854 : int_result_mode, varop, count);
10855 0 : count = first_count;
10856 0 : code = ASHIFTRT;
10857 0 : continue;
10858 : }
10859 :
10860 : /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
10861 : than C1 high-order bits equal to the sign bit, we can convert
10862 : this to either an ASHIFT or an ASHIFTRT depending on the
10863 : two counts.
10864 :
10865 : We cannot do this if VAROP's mode is not SHIFT_UNIT_MODE. */
10866 :
10867 5710212 : if (code == ASHIFTRT && first_code == ASHIFT
10868 2813382 : && int_varop_mode == shift_unit_mode
10869 8515640 : && (num_sign_bit_copies (XEXP (varop, 0), shift_unit_mode)
10870 : > first_count))
10871 : {
10872 1474 : varop = XEXP (varop, 0);
10873 1474 : count -= first_count;
10874 1474 : if (count < 0)
10875 : {
10876 0 : count = -count;
10877 0 : code = ASHIFT;
10878 : }
10879 :
10880 1474 : continue;
10881 : }
10882 :
10883 : /* There are some cases we can't do. If CODE is ASHIFTRT,
10884 : we can only do this if FIRST_CODE is also ASHIFTRT.
10885 :
10886 : We can't do the case when CODE is ROTATE and FIRST_CODE is
10887 : ASHIFTRT.
10888 :
10889 : If the mode of this shift is not the mode of the outer shift,
10890 : we can't do this if either shift is a right shift or ROTATE.
10891 :
10892 : Finally, we can't do any of these if the mode is too wide
10893 : unless the codes are the same.
10894 :
10895 : Handle the case where the shift codes are the same
10896 : first. */
10897 :
10898 5707264 : if (code == first_code)
10899 : {
10900 22960 : if (int_varop_mode != int_result_mode
10901 22960 : && (code == ASHIFTRT || code == LSHIFTRT
10902 77 : || code == ROTATE))
10903 : break;
10904 :
10905 22916 : count += first_count;
10906 22916 : varop = XEXP (varop, 0);
10907 22916 : continue;
10908 : }
10909 :
10910 5684304 : if (code == ASHIFTRT
10911 2872360 : || (code == ROTATE && first_code == ASHIFTRT)
10912 2872330 : || GET_MODE_PRECISION (int_mode) > HOST_BITS_PER_WIDE_INT
10913 8556634 : || (int_varop_mode != int_result_mode
10914 52957 : && (first_code == ASHIFTRT || first_code == LSHIFTRT
10915 52957 : || first_code == ROTATE
10916 8933 : || code == ROTATE)))
10917 : break;
10918 :
10919 : /* To compute the mask to apply after the shift, shift the
10920 : nonzero bits of the inner shift the same way the
10921 : outer shift will. */
10922 :
10923 2828306 : mask_rtx = gen_int_mode (nonzero_bits (varop, int_varop_mode),
10924 : int_result_mode);
10925 2828306 : rtx count_rtx = gen_int_shift_amount (int_result_mode, count);
10926 2828306 : mask_rtx
10927 2828306 : = simplify_const_binary_operation (code, int_result_mode,
10928 : mask_rtx, count_rtx);
10929 :
10930 : /* Give up if we can't compute an outer operation to use. */
10931 2828306 : if (mask_rtx == 0
10932 2828306 : || !CONST_INT_P (mask_rtx)
10933 5656612 : || ! merge_outer_ops (&outer_op, &outer_const, AND,
10934 : INTVAL (mask_rtx),
10935 : int_result_mode, &complement_p))
10936 : break;
10937 :
10938 : /* If the shifts are in the same direction, we add the
10939 : counts. Otherwise, we subtract them. */
10940 2801374 : if ((code == ASHIFTRT || code == LSHIFTRT)
10941 2801374 : == (first_code == ASHIFTRT || first_code == LSHIFTRT))
10942 11443 : count += first_count;
10943 : else
10944 2789931 : count -= first_count;
10945 :
10946 : /* If COUNT is positive, the new shift is usually CODE,
10947 : except for the two exceptions below, in which case it is
10948 : FIRST_CODE. If the count is negative, FIRST_CODE should
10949 : always be used */
10950 2801374 : if (count > 0
10951 672796 : && ((first_code == ROTATE && code == ASHIFT)
10952 672025 : || (first_code == ASHIFTRT && code == LSHIFTRT)))
10953 : code = first_code;
10954 2789937 : else if (count < 0)
10955 286817 : code = first_code, count = -count;
10956 :
10957 2801374 : varop = XEXP (varop, 0);
10958 2801374 : continue;
10959 2801374 : }
10960 :
10961 : /* If we have (A << B << C) for any shift, we can convert this to
10962 : (A << C << B). This wins if A is a constant. Only try this if
10963 : B is not a constant. */
10964 :
10965 53231 : else if (GET_CODE (varop) == code
10966 5174 : && CONST_INT_P (XEXP (varop, 0))
10967 1013 : && !CONST_INT_P (XEXP (varop, 1)))
10968 : {
10969 : /* For ((unsigned) (cstULL >> count)) >> cst2 we have to make
10970 : sure the result will be masked. See PR70222. */
10971 1013 : if (code == LSHIFTRT
10972 7 : && int_mode != int_result_mode
10973 1020 : && !merge_outer_ops (&outer_op, &outer_const, AND,
10974 7 : GET_MODE_MASK (int_result_mode)
10975 7 : >> orig_count, int_result_mode,
10976 : &complement_p))
10977 : break;
10978 : /* For ((int) (cstLL >> count)) >> cst2 just give up. Queuing
10979 : up outer sign extension (often left and right shift) is
10980 : hardly more efficient than the original. See PR70429.
10981 : Similarly punt for rotates with different modes.
10982 : See PR97386. */
10983 1013 : if ((code == ASHIFTRT || code == ROTATE)
10984 1013 : && int_mode != int_result_mode)
10985 : break;
10986 :
10987 999 : rtx count_rtx = gen_int_shift_amount (int_result_mode, count);
10988 999 : rtx new_rtx = simplify_const_binary_operation (code, int_mode,
10989 : XEXP (varop, 0),
10990 : count_rtx);
10991 999 : varop = gen_rtx_fmt_ee (code, int_mode, new_rtx, XEXP (varop, 1));
10992 999 : count = 0;
10993 999 : continue;
10994 999 : }
10995 : break;
10996 :
10997 55074 : case NOT:
10998 : /* The following rules apply only to scalars. */
10999 55074 : if (shift_mode != shift_unit_mode)
11000 : break;
11001 :
11002 : /* Make this fit the case below. */
11003 55042 : varop = gen_rtx_XOR (mode, XEXP (varop, 0), constm1_rtx);
11004 55042 : continue;
11005 :
11006 773737 : case IOR:
11007 773737 : case AND:
11008 773737 : case XOR:
11009 : /* The following rules apply only to scalars. */
11010 773737 : if (shift_mode != shift_unit_mode)
11011 : break;
11012 772023 : int_varop_mode = as_a <scalar_int_mode> (GET_MODE (varop));
11013 772023 : int_result_mode = as_a <scalar_int_mode> (result_mode);
11014 :
11015 : /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
11016 : with C the size of VAROP - 1 and the shift is logical if
11017 : STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
11018 : we have an (le X 0) operation. If we have an arithmetic shift
11019 : and STORE_FLAG_VALUE is 1 or we have a logical shift with
11020 : STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
11021 :
11022 248951 : if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
11023 1962 : && XEXP (XEXP (varop, 0), 1) == constm1_rtx
11024 : && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
11025 221 : && (code == LSHIFTRT || code == ASHIFTRT)
11026 221 : && count == (GET_MODE_PRECISION (int_varop_mode) - 1)
11027 772244 : && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
11028 : {
11029 53 : count = 0;
11030 53 : varop = gen_rtx_LE (int_varop_mode, XEXP (varop, 1),
11031 : const0_rtx);
11032 :
11033 53 : if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
11034 53 : varop = gen_rtx_NEG (int_varop_mode, varop);
11035 :
11036 53 : continue;
11037 : }
11038 :
11039 : /* If we have (shift (logical)), move the logical to the outside
11040 : to allow it to possibly combine with another logical and the
11041 : shift to combine with another shift. This also canonicalizes to
11042 : what a ZERO_EXTRACT looks like. Also, some machines have
11043 : (and (shift)) insns. */
11044 :
11045 1191470 : if (CONST_INT_P (XEXP (varop, 1))
11046 : /* We can't do this if we have (ashiftrt (xor)) and the
11047 : constant has its sign bit set in shift_unit_mode with
11048 : shift_unit_mode wider than result_mode. */
11049 421195 : && !(code == ASHIFTRT && GET_CODE (varop) == XOR
11050 7237 : && int_result_mode != shift_unit_mode
11051 0 : && trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
11052 : shift_unit_mode) < 0)
11053 421195 : && (new_rtx = simplify_const_binary_operation
11054 421195 : (code, int_result_mode,
11055 421195 : gen_int_mode (INTVAL (XEXP (varop, 1)), int_result_mode),
11056 421195 : gen_int_shift_amount (int_result_mode, count))) != 0
11057 421195 : && CONST_INT_P (new_rtx)
11058 1193165 : && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
11059 : INTVAL (new_rtx), int_result_mode,
11060 : &complement_p))
11061 : {
11062 419500 : varop = XEXP (varop, 0);
11063 419500 : continue;
11064 : }
11065 :
11066 : /* If we can't do that, try to simplify the shift in each arm of the
11067 : logical expression, make a new logical expression, and apply
11068 : the inverse distributive law. This also can't be done for
11069 : (ashiftrt (xor)) where we've widened the shift and the constant
11070 : changes the sign bit. */
11071 352470 : if (CONST_INT_P (XEXP (varop, 1))
11072 352470 : && !(code == ASHIFTRT && GET_CODE (varop) == XOR
11073 48 : && int_result_mode != shift_unit_mode
11074 0 : && trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
11075 : shift_unit_mode) < 0))
11076 : {
11077 1695 : rtx lhs = simplify_shift_const (NULL_RTX, code, shift_unit_mode,
11078 : XEXP (varop, 0), count);
11079 1695 : rtx rhs = simplify_shift_const (NULL_RTX, code, shift_unit_mode,
11080 : XEXP (varop, 1), count);
11081 :
11082 1695 : varop = simplify_gen_binary (GET_CODE (varop), shift_unit_mode,
11083 : lhs, rhs);
11084 1695 : varop = apply_distributive_law (varop);
11085 :
11086 1695 : count = 0;
11087 1695 : continue;
11088 1695 : }
11089 : break;
11090 :
11091 30705 : case EQ:
11092 : /* The following rules apply only to scalars. */
11093 30705 : if (shift_mode != shift_unit_mode)
11094 : break;
11095 30705 : int_result_mode = as_a <scalar_int_mode> (result_mode);
11096 :
11097 : /* Convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
11098 : says that the sign bit can be tested, FOO has mode MODE, C is
11099 : GET_MODE_PRECISION (MODE) - 1, and FOO has only its low-order bit
11100 : that may be nonzero. */
11101 30705 : if (code == LSHIFTRT
11102 : && XEXP (varop, 1) == const0_rtx
11103 : && GET_MODE (XEXP (varop, 0)) == int_result_mode
11104 : && count == (GET_MODE_PRECISION (int_result_mode) - 1)
11105 : && HWI_COMPUTABLE_MODE_P (int_result_mode)
11106 : && STORE_FLAG_VALUE == -1
11107 : && nonzero_bits (XEXP (varop, 0), int_result_mode) == 1
11108 : && merge_outer_ops (&outer_op, &outer_const, XOR, 1,
11109 : int_result_mode, &complement_p))
11110 : {
11111 : varop = XEXP (varop, 0);
11112 : count = 0;
11113 : continue;
11114 : }
11115 : break;
11116 :
11117 28207 : case NEG:
11118 : /* The following rules apply only to scalars. */
11119 28207 : if (shift_mode != shift_unit_mode)
11120 : break;
11121 28063 : int_result_mode = as_a <scalar_int_mode> (result_mode);
11122 :
11123 : /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
11124 : than the number of bits in the mode is equivalent to A. */
11125 28068 : if (code == LSHIFTRT
11126 5859 : && count == (GET_MODE_PRECISION (int_result_mode) - 1)
11127 30604 : && nonzero_bits (XEXP (varop, 0), int_result_mode) == 1)
11128 : {
11129 5 : varop = XEXP (varop, 0);
11130 5 : count = 0;
11131 5 : continue;
11132 : }
11133 :
11134 : /* NEG commutes with ASHIFT since it is multiplication. Move the
11135 : NEG outside to allow shifts to combine. */
11136 46951 : if (code == ASHIFT
11137 28058 : && merge_outer_ops (&outer_op, &outer_const, NEG, 0,
11138 : int_result_mode, &complement_p))
11139 : {
11140 18893 : varop = XEXP (varop, 0);
11141 18893 : continue;
11142 : }
11143 : break;
11144 :
11145 1911712 : case PLUS:
11146 : /* The following rules apply only to scalars. */
11147 1911712 : if (shift_mode != shift_unit_mode)
11148 : break;
11149 1867200 : int_result_mode = as_a <scalar_int_mode> (result_mode);
11150 :
11151 : /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
11152 : is one less than the number of bits in the mode is
11153 : equivalent to (xor A 1). */
11154 1867200 : if (code == LSHIFTRT
11155 374268 : && count == (GET_MODE_PRECISION (int_result_mode) - 1)
11156 30781 : && XEXP (varop, 1) == constm1_rtx
11157 13791 : && nonzero_bits (XEXP (varop, 0), int_result_mode) == 1
11158 1867200 : && merge_outer_ops (&outer_op, &outer_const, XOR, 1,
11159 : int_result_mode, &complement_p))
11160 : {
11161 0 : count = 0;
11162 0 : varop = XEXP (varop, 0);
11163 0 : continue;
11164 : }
11165 :
11166 : /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
11167 : that might be nonzero in BAR are those being shifted out and those
11168 : bits are known zero in FOO, we can replace the PLUS with FOO.
11169 : Similarly in the other operand order. This code occurs when
11170 : we are computing the size of a variable-size array. */
11171 :
11172 1870365 : if ((code == ASHIFTRT || code == LSHIFTRT)
11173 535388 : && count < HOST_BITS_PER_WIDE_INT
11174 535304 : && nonzero_bits (XEXP (varop, 1), int_result_mode) >> count == 0
11175 2013277 : && (nonzero_bits (XEXP (varop, 1), int_result_mode)
11176 146077 : & nonzero_bits (XEXP (varop, 0), int_result_mode)) == 0)
11177 : {
11178 3165 : varop = XEXP (varop, 0);
11179 3165 : continue;
11180 : }
11181 1864066 : else if ((code == ASHIFTRT || code == LSHIFTRT)
11182 532223 : && count < HOST_BITS_PER_WIDE_INT
11183 532139 : && HWI_COMPUTABLE_MODE_P (int_result_mode)
11184 530910 : && (nonzero_bits (XEXP (varop, 0), int_result_mode)
11185 530910 : >> count) == 0
11186 1928437 : && (nonzero_bits (XEXP (varop, 0), int_result_mode)
11187 64402 : & nonzero_bits (XEXP (varop, 1), int_result_mode)) == 0)
11188 : {
11189 31 : varop = XEXP (varop, 1);
11190 31 : continue;
11191 : }
11192 :
11193 : /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
11194 2247623 : if (code == ASHIFT
11195 1323128 : && CONST_INT_P (XEXP (varop, 1))
11196 383791 : && (new_rtx = simplify_const_binary_operation
11197 383791 : (ASHIFT, int_result_mode,
11198 383791 : gen_int_mode (INTVAL (XEXP (varop, 1)), int_result_mode),
11199 383791 : gen_int_shift_amount (int_result_mode, count))) != 0
11200 383791 : && CONST_INT_P (new_rtx)
11201 2247795 : && merge_outer_ops (&outer_op, &outer_const, PLUS,
11202 : INTVAL (new_rtx), int_result_mode,
11203 : &complement_p))
11204 : {
11205 383619 : varop = XEXP (varop, 0);
11206 383619 : continue;
11207 : }
11208 :
11209 : /* Check for 'PLUS signbit', which is the canonical form of 'XOR
11210 : signbit', and attempt to change the PLUS to an XOR and move it to
11211 : the outer operation as is done above in the AND/IOR/XOR case
11212 : leg for shift(logical). See details in logical handling above
11213 : for reasoning in doing so. */
11214 1488578 : if (code == LSHIFTRT
11215 371152 : && CONST_INT_P (XEXP (varop, 1))
11216 274116 : && mode_signbit_p (int_result_mode, XEXP (varop, 1))
11217 8193 : && (new_rtx = simplify_const_binary_operation
11218 1480385 : (code, int_result_mode,
11219 8193 : gen_int_mode (INTVAL (XEXP (varop, 1)), int_result_mode),
11220 8193 : gen_int_shift_amount (int_result_mode, count))) != 0
11221 8193 : && CONST_INT_P (new_rtx)
11222 1488578 : && merge_outer_ops (&outer_op, &outer_const, XOR,
11223 : INTVAL (new_rtx), int_result_mode,
11224 : &complement_p))
11225 : {
11226 8193 : varop = XEXP (varop, 0);
11227 8193 : continue;
11228 : }
11229 :
11230 : break;
11231 :
11232 609538 : case MINUS:
11233 : /* The following rules apply only to scalars. */
11234 609538 : if (shift_mode != shift_unit_mode)
11235 : break;
11236 598186 : int_varop_mode = as_a <scalar_int_mode> (GET_MODE (varop));
11237 :
11238 : /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
11239 : with C the size of VAROP - 1 and the shift is logical if
11240 : STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
11241 : we have a (gt X 0) operation. If the shift is arithmetic with
11242 : STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
11243 : we have a (neg (gt X 0)) operation. */
11244 :
11245 598186 : if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
11246 598186 : && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
11247 13229 : && count == (GET_MODE_PRECISION (int_varop_mode) - 1)
11248 49 : && (code == LSHIFTRT || code == ASHIFTRT)
11249 14 : && CONST_INT_P (XEXP (XEXP (varop, 0), 1))
11250 14 : && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
11251 598186 : && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
11252 : {
11253 0 : count = 0;
11254 0 : varop = gen_rtx_GT (int_varop_mode, XEXP (varop, 1),
11255 : const0_rtx);
11256 :
11257 0 : if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
11258 0 : varop = gen_rtx_NEG (int_varop_mode, varop);
11259 :
11260 0 : continue;
11261 : }
11262 : break;
11263 :
11264 667 : case TRUNCATE:
11265 : /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
11266 : if the truncate does not affect the value. */
11267 667 : if (code == LSHIFTRT
11268 509 : && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
11269 509 : && CONST_INT_P (XEXP (XEXP (varop, 0), 1))
11270 667 : && (INTVAL (XEXP (XEXP (varop, 0), 1))
11271 509 : >= (GET_MODE_UNIT_PRECISION (GET_MODE (XEXP (varop, 0)))
11272 1018 : - GET_MODE_UNIT_PRECISION (GET_MODE (varop)))))
11273 : {
11274 509 : rtx varop_inner = XEXP (varop, 0);
11275 509 : int new_count = count + INTVAL (XEXP (varop_inner, 1));
11276 509 : rtx new_count_rtx = gen_int_shift_amount (GET_MODE (varop_inner),
11277 509 : new_count);
11278 509 : varop_inner = gen_rtx_LSHIFTRT (GET_MODE (varop_inner),
11279 : XEXP (varop_inner, 0),
11280 : new_count_rtx);
11281 509 : varop = gen_rtx_TRUNCATE (GET_MODE (varop), varop_inner);
11282 509 : count = 0;
11283 509 : continue;
11284 509 : }
11285 : break;
11286 :
11287 : default:
11288 : break;
11289 55042 : }
11290 :
11291 : break;
11292 : }
11293 :
11294 23597270 : shift_mode = result_mode;
11295 23597270 : if (shift_mode != mode)
11296 : {
11297 : /* We only change the modes of scalar shifts. */
11298 146548 : int_mode = as_a <scalar_int_mode> (mode);
11299 146548 : int_result_mode = as_a <scalar_int_mode> (result_mode);
11300 146548 : shift_mode = try_widen_shift_mode (code, varop, count, int_result_mode,
11301 : int_mode, outer_op, outer_const);
11302 : }
11303 :
11304 : /* We have now finished analyzing the shift. The result should be
11305 : a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
11306 : OUTER_OP is non-UNKNOWN, it is an operation that needs to be applied
11307 : to the result of the shift. OUTER_CONST is the relevant constant,
11308 : but we must turn off all bits turned off in the shift. */
11309 :
11310 23597270 : if (outer_op == UNKNOWN
11311 20011401 : && orig_code == code && orig_count == count
11312 19968215 : && varop == orig_varop
11313 19838214 : && shift_mode == GET_MODE (varop))
11314 : return NULL_RTX;
11315 :
11316 : /* Make a SUBREG if necessary. If we can't make it, fail. */
11317 3761968 : varop = gen_lowpart (shift_mode, varop);
11318 3761968 : if (varop == NULL_RTX || GET_CODE (varop) == CLOBBER)
11319 : return NULL_RTX;
11320 :
11321 : /* If we have an outer operation and we just made a shift, it is
11322 : possible that we could have simplified the shift were it not
11323 : for the outer operation. So try to do the simplification
11324 : recursively. */
11325 :
11326 3761968 : if (outer_op != UNKNOWN)
11327 3585869 : x = simplify_shift_const_1 (code, shift_mode, varop, count);
11328 : else
11329 : x = NULL_RTX;
11330 :
11331 3585869 : if (x == NULL_RTX)
11332 3726349 : x = simplify_gen_binary (code, shift_mode, varop,
11333 3726349 : gen_int_shift_amount (shift_mode, count));
11334 :
11335 : /* If we were doing an LSHIFTRT in a wider mode than it was originally,
11336 : turn off all the bits that the shift would have turned off. */
11337 3761968 : if (orig_code == LSHIFTRT && result_mode != shift_mode)
11338 : /* We only change the modes of scalar shifts. */
11339 9652 : x = simplify_and_const_int (NULL_RTX, as_a <scalar_int_mode> (shift_mode),
11340 9652 : x, GET_MODE_MASK (result_mode) >> orig_count);
11341 :
11342 : /* Do the remainder of the processing in RESULT_MODE. */
11343 3761968 : x = gen_lowpart_or_truncate (result_mode, x);
11344 :
11345 : /* If COMPLEMENT_P is set, we have to complement X before doing the outer
11346 : operation. */
11347 3761968 : if (complement_p)
11348 22935 : x = simplify_gen_unary (NOT, result_mode, x, result_mode);
11349 :
11350 3761968 : if (outer_op != UNKNOWN)
11351 : {
11352 3585869 : int_result_mode = as_a <scalar_int_mode> (result_mode);
11353 :
11354 3585869 : if (GET_RTX_CLASS (outer_op) != RTX_UNARY
11355 3585869 : && GET_MODE_PRECISION (int_result_mode) < HOST_BITS_PER_WIDE_INT)
11356 1288424 : outer_const = trunc_int_for_mode (outer_const, int_result_mode);
11357 :
11358 3585869 : if (outer_op == AND)
11359 3118579 : x = simplify_and_const_int (NULL_RTX, int_result_mode, x, outer_const);
11360 467290 : else if (outer_op == SET)
11361 : {
11362 : /* This means that we have determined that the result is
11363 : equivalent to a constant. This should be rare. */
11364 0 : if (!side_effects_p (x))
11365 0 : x = GEN_INT (outer_const);
11366 : }
11367 467290 : else if (GET_RTX_CLASS (outer_op) == RTX_UNARY)
11368 18893 : x = simplify_gen_unary (outer_op, int_result_mode, x, int_result_mode);
11369 : else
11370 448397 : x = simplify_gen_binary (outer_op, int_result_mode, x,
11371 : GEN_INT (outer_const));
11372 : }
11373 :
11374 : return x;
11375 : }
11376 :
11377 : /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
11378 : The result of the shift is RESULT_MODE. If we cannot simplify it,
11379 : return X or, if it is NULL, synthesize the expression with
11380 : simplify_gen_binary. Otherwise, return a simplified value.
11381 :
11382 : The shift is normally computed in the widest mode we find in VAROP, as
11383 : long as it isn't a different number of words than RESULT_MODE. Exceptions
11384 : are ASHIFTRT and ROTATE, which are always done in their original mode. */
11385 :
11386 : static rtx
11387 20011627 : simplify_shift_const (rtx x, enum rtx_code code, machine_mode result_mode,
11388 : rtx varop, int count)
11389 : {
11390 20011627 : rtx tem = simplify_shift_const_1 (code, result_mode, varop, count);
11391 20011627 : if (tem)
11392 : return tem;
11393 :
11394 16285278 : if (!x)
11395 4845116 : x = simplify_gen_binary (code, GET_MODE (varop), varop,
11396 4845116 : gen_int_shift_amount (GET_MODE (varop), count));
11397 16285278 : if (GET_MODE (x) != result_mode)
11398 0 : x = gen_lowpart (result_mode, x);
11399 : return x;
11400 : }
11401 :
11402 :
11403 : /* A subroutine of recog_for_combine. See there for arguments and
11404 : return value. */
11405 :
11406 : static int
11407 48062279 : recog_for_combine_1 (rtx *pnewpat, rtx_insn *insn, rtx *pnotes,
11408 : unsigned old_nregs, unsigned new_nregs)
11409 : {
11410 48062279 : rtx pat = *pnewpat;
11411 48062279 : rtx pat_without_clobbers;
11412 48062279 : int insn_code_number;
11413 48062279 : int num_clobbers_to_add = 0;
11414 48062279 : int i;
11415 48062279 : rtx notes = NULL_RTX;
11416 48062279 : rtx old_notes, old_pat;
11417 48062279 : int old_icode;
11418 :
11419 : /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
11420 : we use to indicate that something didn't match. If we find such a
11421 : thing, force rejection. */
11422 48062279 : if (GET_CODE (pat) == PARALLEL)
11423 51788538 : for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
11424 35673919 : if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
11425 7102110 : && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
11426 : return -1;
11427 :
11428 48060301 : old_pat = PATTERN (insn);
11429 48060301 : old_notes = REG_NOTES (insn);
11430 48060301 : PATTERN (insn) = pat;
11431 48060301 : REG_NOTES (insn) = NULL_RTX;
11432 :
11433 48060301 : insn_code_number = recog (pat, insn, &num_clobbers_to_add);
11434 48060301 : if (dump_file && (dump_flags & TDF_DETAILS))
11435 : {
11436 277 : if (insn_code_number < 0)
11437 177 : fputs ("Failed to match this instruction:\n", dump_file);
11438 : else
11439 100 : fputs ("Successfully matched this instruction:\n", dump_file);
11440 277 : print_rtl_single (dump_file, pat);
11441 : }
11442 :
11443 : /* If it isn't, there is the possibility that we previously had an insn
11444 : that clobbered some register as a side effect, but the combined
11445 : insn doesn't need to do that. So try once more without the clobbers
11446 : unless this represents an ASM insn. */
11447 :
11448 38289359 : if (insn_code_number < 0 && ! check_asm_operands (pat)
11449 86347859 : && GET_CODE (pat) == PARALLEL)
11450 : {
11451 : int pos;
11452 :
11453 50337491 : for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
11454 34687680 : if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
11455 : {
11456 27986985 : if (i != pos)
11457 2232571 : SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
11458 27986985 : pos++;
11459 : }
11460 :
11461 15649811 : SUBST_INT (XVECLEN (pat, 0), pos);
11462 :
11463 15649811 : if (pos == 1)
11464 4530475 : pat = XVECEXP (pat, 0, 0);
11465 :
11466 15649811 : PATTERN (insn) = pat;
11467 15649811 : insn_code_number = recog (pat, insn, &num_clobbers_to_add);
11468 15649811 : if (dump_file && (dump_flags & TDF_DETAILS))
11469 : {
11470 82 : if (insn_code_number < 0)
11471 81 : fputs ("Failed to match this instruction:\n", dump_file);
11472 : else
11473 1 : fputs ("Successfully matched this instruction:\n", dump_file);
11474 82 : print_rtl_single (dump_file, pat);
11475 : }
11476 : }
11477 :
11478 48060301 : pat_without_clobbers = pat;
11479 :
11480 48060301 : PATTERN (insn) = old_pat;
11481 48060301 : REG_NOTES (insn) = old_notes;
11482 :
11483 : /* Recognize all noop sets, these will be killed by followup pass. */
11484 48060301 : if (insn_code_number < 0 && GET_CODE (pat) == SET && set_noop_p (pat))
11485 209456 : insn_code_number = NOOP_MOVE_INSN_CODE, num_clobbers_to_add = 0;
11486 :
11487 : /* If we had any clobbers to add, make a new pattern than contains
11488 : them. Then check to make sure that all of them are dead. */
11489 48060301 : if (num_clobbers_to_add)
11490 : {
11491 1534594 : rtx newpat = gen_rtx_PARALLEL (VOIDmode,
11492 : rtvec_alloc (GET_CODE (pat) == PARALLEL
11493 : ? (XVECLEN (pat, 0)
11494 : + num_clobbers_to_add)
11495 : : num_clobbers_to_add + 1));
11496 :
11497 1534594 : if (GET_CODE (pat) == PARALLEL)
11498 1473 : for (i = 0; i < XVECLEN (pat, 0); i++)
11499 982 : XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
11500 : else
11501 1534103 : XVECEXP (newpat, 0, 0) = pat;
11502 :
11503 1534594 : add_clobbers (newpat, insn_code_number);
11504 :
11505 2952291 : for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
11506 2952291 : i < XVECLEN (newpat, 0); i++)
11507 : {
11508 1554585 : if (REG_P (XEXP (XVECEXP (newpat, 0, i), 0))
11509 1554585 : && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
11510 : return -1;
11511 1417697 : if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) != SCRATCH)
11512 : {
11513 1372042 : gcc_assert (REG_P (XEXP (XVECEXP (newpat, 0, i), 0)));
11514 1372042 : notes = alloc_reg_note (REG_UNUSED,
11515 : XEXP (XVECEXP (newpat, 0, i), 0), notes);
11516 : }
11517 : }
11518 : pat = newpat;
11519 : }
11520 :
11521 47923413 : if (insn_code_number >= 0
11522 47923413 : && insn_code_number != NOOP_MOVE_INSN_CODE)
11523 : {
11524 : /* Create the reg dead notes if needed for the regs that were created via split. */
11525 9997421 : for (; old_nregs < new_nregs; old_nregs++)
11526 2012 : notes = alloc_reg_note (REG_DEAD, regno_reg_rtx[old_nregs], notes);
11527 9995409 : old_pat = PATTERN (insn);
11528 9995409 : old_notes = REG_NOTES (insn);
11529 9995409 : old_icode = INSN_CODE (insn);
11530 9995409 : PATTERN (insn) = pat;
11531 9995409 : REG_NOTES (insn) = notes;
11532 9995409 : INSN_CODE (insn) = insn_code_number;
11533 :
11534 : /* Do not accept an insn if hard register constraints are used. For
11535 : example, assume that the first insn is combined into the last one:
11536 :
11537 : r100=...
11538 : %5=...
11539 : r101=exp(r100)
11540 :
11541 : If the resulting insn has an operand which is constrained to hard
11542 : register %5, then this introduces a conflict since register %5 is live
11543 : at this point. Therefore, skip for now. This is a sledge hammer
11544 : approach. Ideally we would skip based on the fact whether a
11545 : combination crosses a hard register assignment and the corresponding
11546 : hard register is also referred by a single register constraint of the
11547 : resulting insn. */
11548 9995409 : bool has_hard_reg_cstr = false;
11549 9995409 : extract_insn (insn);
11550 34152359 : for (int nop = recog_data.n_operands - 1; nop >= 0; --nop)
11551 24156950 : if (strchr (recog_data.constraints[nop], '{'))
11552 : {
11553 : has_hard_reg_cstr = true;
11554 : break;
11555 : }
11556 :
11557 : /* Don't accept hard register constraints. Allow targets to reject
11558 : combined insn. */
11559 9995409 : if (has_hard_reg_cstr || !targetm.legitimate_combined_insn (insn))
11560 : {
11561 3523 : if (dump_file && (dump_flags & TDF_DETAILS))
11562 : {
11563 0 : if (has_hard_reg_cstr)
11564 0 : fputs ("Instruction makes use of hard register constraints.",
11565 : dump_file);
11566 : else
11567 0 : fputs ("Instruction not appropriate for target.",
11568 : dump_file);
11569 : }
11570 :
11571 : /* Callers expect recog_for_combine to strip
11572 : clobbers from the pattern on failure. */
11573 : pat = pat_without_clobbers;
11574 : notes = NULL_RTX;
11575 :
11576 : insn_code_number = -1;
11577 : }
11578 :
11579 9995409 : PATTERN (insn) = old_pat;
11580 9995409 : REG_NOTES (insn) = old_notes;
11581 9995409 : INSN_CODE (insn) = old_icode;
11582 : }
11583 :
11584 47923413 : *pnewpat = pat;
11585 47923413 : *pnotes = notes;
11586 :
11587 47923413 : return insn_code_number;
11588 : }
11589 :
11590 : /* Change every ZERO_EXTRACT and ZERO_EXTEND of a SUBREG that can be
11591 : expressed as an AND and maybe an LSHIFTRT, to that formulation.
11592 : Return whether anything was so changed. */
11593 :
11594 : static bool
11595 48196876 : change_zero_ext (rtx pat)
11596 : {
11597 48196876 : bool changed = false;
11598 48196876 : rtx *src = &SET_SRC (pat);
11599 :
11600 48196876 : subrtx_ptr_iterator::array_type array;
11601 333480637 : FOR_EACH_SUBRTX_PTR (iter, array, src, NONCONST)
11602 : {
11603 285283761 : rtx x = **iter;
11604 285283761 : scalar_int_mode mode, inner_mode;
11605 285283761 : if (!is_a <scalar_int_mode> (GET_MODE (x), &mode))
11606 420141839 : continue;
11607 150425683 : int size;
11608 :
11609 150425683 : if (GET_CODE (x) == ZERO_EXTRACT
11610 715228 : && CONST_INT_P (XEXP (x, 1))
11611 715206 : && CONST_INT_P (XEXP (x, 2))
11612 672743 : && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &inner_mode)
11613 151098426 : && GET_MODE_PRECISION (inner_mode) <= GET_MODE_PRECISION (mode))
11614 : {
11615 672735 : size = INTVAL (XEXP (x, 1));
11616 :
11617 672735 : int start = INTVAL (XEXP (x, 2));
11618 672735 : if (BITS_BIG_ENDIAN)
11619 : start = GET_MODE_PRECISION (inner_mode) - size - start;
11620 :
11621 672735 : if (start != 0)
11622 572725 : x = gen_rtx_LSHIFTRT (inner_mode, XEXP (x, 0),
11623 : gen_int_shift_amount (inner_mode, start));
11624 : else
11625 : x = XEXP (x, 0);
11626 :
11627 672735 : if (mode != inner_mode)
11628 : {
11629 163 : if (REG_P (x) && HARD_REGISTER_P (x)
11630 202351 : && !can_change_dest_mode (x, 0, mode))
11631 0 : continue;
11632 :
11633 202351 : x = gen_lowpart_SUBREG (mode, x);
11634 : }
11635 : }
11636 149752948 : else if (GET_CODE (x) == ZERO_EXTEND
11637 2285966 : && GET_CODE (XEXP (x, 0)) == SUBREG
11638 470670 : && SCALAR_INT_MODE_P (GET_MODE (SUBREG_REG (XEXP (x, 0))))
11639 469461 : && !paradoxical_subreg_p (XEXP (x, 0))
11640 150222409 : && subreg_lowpart_p (XEXP (x, 0)))
11641 : {
11642 363645 : inner_mode = as_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)));
11643 363645 : size = GET_MODE_PRECISION (inner_mode);
11644 363645 : x = SUBREG_REG (XEXP (x, 0));
11645 363645 : if (GET_MODE (x) != mode)
11646 : {
11647 13914 : if (REG_P (x) && HARD_REGISTER_P (x)
11648 15605 : && !can_change_dest_mode (x, 0, mode))
11649 0 : continue;
11650 :
11651 15605 : x = gen_lowpart_SUBREG (mode, x);
11652 : }
11653 : }
11654 298778533 : else if (GET_CODE (x) == ZERO_EXTEND
11655 1922321 : && REG_P (XEXP (x, 0))
11656 1044213 : && HARD_REGISTER_P (XEXP (x, 0))
11657 149389376 : && can_change_dest_mode (XEXP (x, 0), 0, mode))
11658 : {
11659 73 : inner_mode = as_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)));
11660 73 : size = GET_MODE_PRECISION (inner_mode);
11661 73 : x = gen_rtx_REG (mode, REGNO (XEXP (x, 0)));
11662 : }
11663 : else
11664 149389230 : continue;
11665 :
11666 1433510 : if (!(GET_CODE (x) == LSHIFTRT
11667 397057 : && CONST_INT_P (XEXP (x, 1))
11668 397057 : && size + INTVAL (XEXP (x, 1)) == GET_MODE_PRECISION (mode)))
11669 : {
11670 886084 : wide_int mask = wi::mask (size, false, GET_MODE_PRECISION (mode));
11671 886084 : x = gen_rtx_AND (mode, x, immed_wide_int_const (mask, mode));
11672 886084 : }
11673 :
11674 1036453 : SUBST (**iter, x);
11675 1036453 : changed = true;
11676 : }
11677 :
11678 48196876 : if (changed)
11679 9293475 : FOR_EACH_SUBRTX_PTR (iter, array, src, NONCONST)
11680 8271293 : maybe_swap_commutative_operands (**iter);
11681 :
11682 48196876 : rtx *dst = &SET_DEST (pat);
11683 48196876 : scalar_int_mode mode;
11684 48196876 : if (GET_CODE (*dst) == ZERO_EXTRACT
11685 9345 : && REG_P (XEXP (*dst, 0))
11686 379 : && is_a <scalar_int_mode> (GET_MODE (XEXP (*dst, 0)), &mode)
11687 379 : && CONST_INT_P (XEXP (*dst, 1))
11688 48197255 : && CONST_INT_P (XEXP (*dst, 2)))
11689 : {
11690 252 : rtx reg = XEXP (*dst, 0);
11691 252 : int width = INTVAL (XEXP (*dst, 1));
11692 252 : int offset = INTVAL (XEXP (*dst, 2));
11693 252 : int reg_width = GET_MODE_PRECISION (mode);
11694 252 : if (BITS_BIG_ENDIAN)
11695 : offset = reg_width - width - offset;
11696 :
11697 252 : rtx x, y, z, w;
11698 252 : wide_int mask = wi::shifted_mask (offset, width, true, reg_width);
11699 252 : wide_int mask2 = wi::shifted_mask (offset, width, false, reg_width);
11700 252 : x = gen_rtx_AND (mode, reg, immed_wide_int_const (mask, mode));
11701 252 : if (offset)
11702 200 : y = gen_rtx_ASHIFT (mode, SET_SRC (pat), GEN_INT (offset));
11703 : else
11704 52 : y = SET_SRC (pat);
11705 252 : z = gen_rtx_AND (mode, y, immed_wide_int_const (mask2, mode));
11706 252 : w = gen_rtx_IOR (mode, x, z);
11707 252 : SUBST (SET_DEST (pat), reg);
11708 252 : SUBST (SET_SRC (pat), w);
11709 :
11710 252 : changed = true;
11711 252 : }
11712 :
11713 48196876 : return changed;
11714 48196876 : }
11715 :
11716 : /* Like recog, but we receive the address of a pointer to a new pattern.
11717 : We try to match the rtx that the pointer points to.
11718 : If that fails, we may try to modify or replace the pattern,
11719 : storing the replacement into the same pointer object.
11720 :
11721 : Modifications include deletion or addition of CLOBBERs. If the
11722 : instruction will still not match, we change ZERO_EXTEND and ZERO_EXTRACT
11723 : to the equivalent AND and perhaps LSHIFTRT patterns, and try with that
11724 : (and undo if that fails).
11725 :
11726 : PNOTES is a pointer to a location where any REG_UNUSED notes added for
11727 : the CLOBBERs are placed.
11728 : If OLD_NREGS != NEW_NREGS, then PNOTES also includes REG_DEAD notes added.
11729 :
11730 : The value is the final insn code from the pattern ultimately matched,
11731 : or -1. */
11732 :
11733 : static int
11734 46805532 : recog_for_combine (rtx *pnewpat, rtx_insn *insn, rtx *pnotes,
11735 : unsigned int old_nregs, unsigned int new_nregs)
11736 : {
11737 46805532 : rtx pat = *pnewpat;
11738 46805532 : int insn_code_number = recog_for_combine_1 (pnewpat, insn, pnotes,
11739 : old_nregs, new_nregs);
11740 46805532 : if (insn_code_number >= 0 || check_asm_operands (pat))
11741 10053672 : return insn_code_number;
11742 :
11743 36751860 : void *marker = get_undo_marker ();
11744 36751860 : bool changed = false;
11745 :
11746 36751860 : if (GET_CODE (pat) == SET)
11747 : {
11748 : /* For an unrecognized single set of a constant, try placing it in
11749 : the constant pool, if this function already uses one. */
11750 21703966 : rtx src = SET_SRC (pat);
11751 21703966 : if (CONSTANT_P (src)
11752 457585 : && !CONST_INT_P (src)
11753 411748 : && crtl->uses_const_pool
11754 362271 : && SET_DEST (pat) != pc_rtx)
11755 : {
11756 362269 : machine_mode mode = GET_MODE (src);
11757 362269 : if (mode == VOIDmode)
11758 1386 : mode = GET_MODE (SET_DEST (pat));
11759 362269 : src = force_const_mem (mode, src);
11760 362269 : if (src)
11761 : {
11762 362259 : SUBST (SET_SRC (pat), src);
11763 362259 : changed = true;
11764 : }
11765 : }
11766 : else
11767 21341697 : changed = change_zero_ext (pat);
11768 : }
11769 15047894 : else if (GET_CODE (pat) == PARALLEL)
11770 : {
11771 : int i;
11772 42120704 : for (i = 0; i < XVECLEN (pat, 0); i++)
11773 : {
11774 27087905 : rtx set = XVECEXP (pat, 0, i);
11775 27087905 : if (GET_CODE (set) == SET)
11776 26855179 : changed |= change_zero_ext (set);
11777 : }
11778 : }
11779 :
11780 36736755 : if (changed)
11781 : {
11782 1256747 : insn_code_number = recog_for_combine_1 (pnewpat, insn, pnotes,
11783 : old_nregs, new_nregs);
11784 :
11785 1256747 : if (insn_code_number < 0)
11786 1107276 : undo_to_marker (marker);
11787 : }
11788 :
11789 : return insn_code_number;
11790 : }
11791 :
11792 : /* Like gen_lowpart_general but for use by combine. In combine it
11793 : is not possible to create any new pseudoregs. However, it is
11794 : safe to create invalid memory addresses, because combine will
11795 : try to recognize them and all they will do is make the combine
11796 : attempt fail.
11797 :
11798 : If for some reason this cannot do its job, an rtx
11799 : (clobber (const_int 0)) is returned.
11800 : An insn containing that will not be recognized. */
11801 :
11802 : static rtx
11803 149668027 : gen_lowpart_for_combine (machine_mode omode, rtx x)
11804 : {
11805 149668027 : machine_mode imode = GET_MODE (x);
11806 149668027 : rtx result;
11807 :
11808 149668027 : if (omode == imode)
11809 : return x;
11810 :
11811 : /* We can only support MODE being wider than a word if X is a
11812 : constant integer or has a mode the same size. */
11813 49859037 : if (maybe_gt (GET_MODE_SIZE (omode), UNITS_PER_WORD)
11814 23568094 : && ! (CONST_SCALAR_INT_P (x)
11815 9756226 : || known_eq (GET_MODE_SIZE (imode), GET_MODE_SIZE (omode))))
11816 2907450 : goto fail;
11817 :
11818 : /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
11819 : won't know what to do. So we will strip off the SUBREG here and
11820 : process normally. */
11821 20660644 : if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
11822 : {
11823 14051 : x = SUBREG_REG (x);
11824 :
11825 : /* For use in case we fall down into the address adjustments
11826 : further below, we need to adjust the known mode and size of
11827 : x; imode and isize, since we just adjusted x. */
11828 14051 : imode = GET_MODE (x);
11829 :
11830 14051 : if (imode == omode)
11831 : return x;
11832 : }
11833 :
11834 20651701 : result = gen_lowpart_common (omode, x);
11835 :
11836 20651701 : if (result)
11837 : return result;
11838 :
11839 8166129 : if (MEM_P (x))
11840 : {
11841 : /* Refuse to work on a volatile memory ref or one with a mode-dependent
11842 : address. */
11843 2019318 : if (MEM_VOLATILE_P (x)
11844 3994205 : || mode_dependent_address_p (XEXP (x, 0), MEM_ADDR_SPACE (x)))
11845 44462 : goto fail;
11846 :
11847 : /* If we want to refer to something bigger than the original memref,
11848 : generate a paradoxical subreg instead. That will force a reload
11849 : of the original memref X. */
11850 1974856 : if (paradoxical_subreg_p (omode, imode)
11851 1974856 : && validate_subreg (omode, GET_MODE (x), x, 0))
11852 1757843 : return gen_rtx_SUBREG (omode, x, 0);
11853 :
11854 217013 : poly_int64 offset = byte_lowpart_offset (omode, imode);
11855 217013 : return adjust_address_nv (x, omode, offset);
11856 : }
11857 :
11858 : /* If X is a comparison operator, rewrite it in a new mode. This
11859 : probably won't match, but may allow further simplifications. */
11860 6146811 : else if (COMPARISON_P (x)
11861 142612 : && SCALAR_INT_MODE_P (imode)
11862 52503 : && SCALAR_INT_MODE_P (omode))
11863 52492 : return gen_rtx_fmt_ee (GET_CODE (x), omode, XEXP (x, 0), XEXP (x, 1));
11864 :
11865 : /* If we couldn't simplify X any other way, just enclose it in a
11866 : SUBREG. Normally, this SUBREG won't match, but some patterns may
11867 : include an explicit SUBREG or we may simplify it further in combine. */
11868 : else
11869 : {
11870 6094319 : rtx res;
11871 :
11872 6094319 : if (imode == VOIDmode)
11873 : {
11874 8 : imode = int_mode_for_mode (omode).require ();
11875 8 : x = gen_lowpart_common (imode, x);
11876 8 : if (x == NULL)
11877 0 : goto fail;
11878 : }
11879 6094319 : res = lowpart_subreg (omode, x, imode);
11880 6094319 : if (res)
11881 : return res;
11882 : }
11883 :
11884 19625 : fail:
11885 2971537 : return gen_rtx_CLOBBER (omode, const0_rtx);
11886 : }
11887 :
11888 : /* Like gen_lowpart_for_combine but returns NULL_RTX
11889 : for an error instead of CLOBBER.
11890 : Note no_emit is not called directly from combine but rather from
11891 : simplify_rtx and is expecting a NULL on failure rather than
11892 : a CLOBBER. */
11893 :
11894 : static rtx
11895 748697 : gen_lowpart_for_combine_no_emit (machine_mode omode, rtx x)
11896 : {
11897 748697 : rtx tem = gen_lowpart_for_combine (omode, x);
11898 748697 : if (!tem || GET_CODE (tem) == CLOBBER)
11899 15936 : return NULL_RTX;
11900 : return tem;
11901 : }
11902 :
11903 :
11904 : /* Try to simplify a comparison between OP0 and a constant OP1,
11905 : where CODE is the comparison code that will be tested, into a
11906 : (CODE OP0 const0_rtx) form.
11907 :
11908 : The result is a possibly different comparison code to use.
11909 : *POP0 and *POP1 may be updated. */
11910 :
11911 : static enum rtx_code
11912 15184008 : simplify_compare_const (enum rtx_code code, machine_mode mode,
11913 : rtx *pop0, rtx *pop1)
11914 : {
11915 15184008 : scalar_int_mode int_mode;
11916 15184008 : rtx op0 = *pop0;
11917 15184008 : HOST_WIDE_INT const_op = INTVAL (*pop1);
11918 :
11919 : /* Get the constant we are comparing against and turn off all bits
11920 : not on in our mode. */
11921 15184008 : if (mode != VOIDmode)
11922 14888966 : const_op = trunc_int_for_mode (const_op, mode);
11923 :
11924 : /* If we are comparing against a constant power of two and the value
11925 : being compared can only have that single bit nonzero (e.g., it was
11926 : `and'ed with that bit), we can replace this with a comparison
11927 : with zero. */
11928 15184008 : if (const_op
11929 4028175 : && (code == EQ || code == NE || code == GEU || code == LTU
11930 : /* This optimization is incorrect for signed >= INT_MIN or
11931 : < INT_MIN, those are always true or always false. */
11932 24272 : || ((code == GE || code == LT) && const_op > 0))
11933 2717391 : && is_a <scalar_int_mode> (mode, &int_mode)
11934 2717391 : && GET_MODE_PRECISION (int_mode) - 1 < HOST_BITS_PER_WIDE_INT
11935 2698213 : && pow2p_hwi (const_op & GET_MODE_MASK (int_mode))
11936 16073206 : && (nonzero_bits (op0, int_mode)
11937 889198 : == (unsigned HOST_WIDE_INT) (const_op & GET_MODE_MASK (int_mode))))
11938 : {
11939 4694 : code = (code == EQ || code == GE || code == GEU ? NE : EQ);
11940 : const_op = 0;
11941 : }
11942 :
11943 : /* Similarly, if we are comparing a value known to be either -1 or
11944 : 0 with -1, change it to the opposite comparison against zero. */
11945 15179314 : if (const_op == -1
11946 211891 : && (code == EQ || code == NE || code == GT || code == LE
11947 : || code == GEU || code == LTU)
11948 15382399 : && is_a <scalar_int_mode> (mode, &int_mode)
11949 15388659 : && num_sign_bit_copies (op0, int_mode) == GET_MODE_PRECISION (int_mode))
11950 : {
11951 12241 : code = (code == EQ || code == LE || code == GEU ? NE : EQ);
11952 : const_op = 0;
11953 : }
11954 :
11955 : /* Do some canonicalizations based on the comparison code. We prefer
11956 : comparisons against zero and then prefer equality comparisons.
11957 : If we can reduce the size of a constant, we will do that too. */
11958 15173054 : switch (code)
11959 : {
11960 270168 : case LT:
11961 : /* < C is equivalent to <= (C - 1) */
11962 270168 : if (const_op > 0)
11963 : {
11964 4947 : const_op -= 1;
11965 4947 : code = LE;
11966 : /* ... fall through to LE case below. */
11967 405572 : gcc_fallthrough ();
11968 : }
11969 : else
11970 : break;
11971 :
11972 405572 : case LE:
11973 : /* <= C is equivalent to < (C + 1); we do this for C < 0 */
11974 405572 : if (const_op < 0)
11975 : {
11976 52 : const_op += 1;
11977 52 : code = LT;
11978 : }
11979 :
11980 : /* If we are doing a <= 0 comparison on a value known to have
11981 : a zero sign bit, we can replace this with == 0. */
11982 405520 : else if (const_op == 0
11983 278583 : && is_a <scalar_int_mode> (mode, &int_mode)
11984 278583 : && GET_MODE_PRECISION (int_mode) - 1 < HOST_BITS_PER_WIDE_INT
11985 684103 : && (nonzero_bits (op0, int_mode)
11986 278583 : & (HOST_WIDE_INT_1U << (GET_MODE_PRECISION (int_mode) - 1)))
11987 278583 : == 0)
11988 : code = EQ;
11989 : break;
11990 :
11991 235260 : case GE:
11992 : /* >= C is equivalent to > (C - 1). */
11993 235260 : if (const_op > 0)
11994 : {
11995 1895 : const_op -= 1;
11996 1895 : code = GT;
11997 : /* ... fall through to GT below. */
11998 248010 : gcc_fallthrough ();
11999 : }
12000 : else
12001 : break;
12002 :
12003 248010 : case GT:
12004 : /* > C is equivalent to >= (C + 1); we do this for C < 0. */
12005 248010 : if (const_op < 0)
12006 : {
12007 155 : const_op += 1;
12008 155 : code = GE;
12009 : }
12010 :
12011 : /* If we are doing a > 0 comparison on a value known to have
12012 : a zero sign bit, we can replace this with != 0. */
12013 247855 : else if (const_op == 0
12014 125166 : && is_a <scalar_int_mode> (mode, &int_mode)
12015 125166 : && GET_MODE_PRECISION (int_mode) - 1 < HOST_BITS_PER_WIDE_INT
12016 373021 : && (nonzero_bits (op0, int_mode)
12017 125166 : & (HOST_WIDE_INT_1U << (GET_MODE_PRECISION (int_mode) - 1)))
12018 125166 : == 0)
12019 : code = NE;
12020 : break;
12021 :
12022 98824 : case LTU:
12023 : /* < C is equivalent to <= (C - 1). */
12024 98824 : if (const_op > 0)
12025 : {
12026 89570 : const_op -= 1;
12027 89570 : code = LEU;
12028 : /* ... fall through ... */
12029 89570 : gcc_fallthrough ();
12030 : }
12031 : /* (unsigned) < 0x80000000 is equivalent to >= 0. */
12032 9254 : else if (is_a <scalar_int_mode> (mode, &int_mode)
12033 9254 : && GET_MODE_PRECISION (int_mode) - 1 < HOST_BITS_PER_WIDE_INT
12034 8484 : && (((unsigned HOST_WIDE_INT) const_op & GET_MODE_MASK (int_mode))
12035 8484 : == HOST_WIDE_INT_1U << (GET_MODE_PRECISION (int_mode) - 1)))
12036 : {
12037 : const_op = 0;
12038 : code = GE;
12039 : break;
12040 : }
12041 : else
12042 : break;
12043 :
12044 672861 : case LEU:
12045 : /* unsigned <= 0 is equivalent to == 0 */
12046 672861 : if (const_op == 0)
12047 : code = EQ;
12048 : /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
12049 672518 : else if (is_a <scalar_int_mode> (mode, &int_mode)
12050 672518 : && GET_MODE_PRECISION (int_mode) - 1 < HOST_BITS_PER_WIDE_INT
12051 670577 : && ((unsigned HOST_WIDE_INT) const_op
12052 : == ((HOST_WIDE_INT_1U
12053 670577 : << (GET_MODE_PRECISION (int_mode) - 1)) - 1)))
12054 : {
12055 : const_op = 0;
12056 : code = GE;
12057 : }
12058 : break;
12059 :
12060 31878 : case GEU:
12061 : /* >= C is equivalent to > (C - 1). */
12062 31878 : if (const_op > 1)
12063 : {
12064 23474 : const_op -= 1;
12065 23474 : code = GTU;
12066 : /* ... fall through ... */
12067 23474 : gcc_fallthrough ();
12068 : }
12069 :
12070 : /* (unsigned) >= 0x80000000 is equivalent to < 0. */
12071 8404 : else if (is_a <scalar_int_mode> (mode, &int_mode)
12072 8404 : && GET_MODE_PRECISION (int_mode) - 1 < HOST_BITS_PER_WIDE_INT
12073 7148 : && (((unsigned HOST_WIDE_INT) const_op & GET_MODE_MASK (int_mode))
12074 7148 : == HOST_WIDE_INT_1U << (GET_MODE_PRECISION (int_mode) - 1)))
12075 : {
12076 : const_op = 0;
12077 : code = LT;
12078 : break;
12079 : }
12080 : else
12081 : break;
12082 :
12083 490546 : case GTU:
12084 : /* unsigned > 0 is equivalent to != 0 */
12085 490546 : if (const_op == 0)
12086 : code = NE;
12087 : /* (unsigned) > 0x7fffffff is equivalent to < 0. */
12088 490546 : else if (is_a <scalar_int_mode> (mode, &int_mode)
12089 490546 : && GET_MODE_PRECISION (int_mode) - 1 < HOST_BITS_PER_WIDE_INT
12090 489447 : && ((unsigned HOST_WIDE_INT) const_op
12091 : == (HOST_WIDE_INT_1U
12092 489447 : << (GET_MODE_PRECISION (int_mode) - 1)) - 1))
12093 : {
12094 : const_op = 0;
12095 : code = LT;
12096 : }
12097 : break;
12098 :
12099 : default:
12100 : break;
12101 : }
12102 :
12103 : /* Narrow non-symmetric comparison of memory and constant as e.g.
12104 : x0...x7 <= 0x3fffffffffffffff into x0 <= 0x3f where x0 is the most
12105 : significant byte. Likewise, transform x0...x7 >= 0x4000000000000000 into
12106 : x0 >= 0x40. */
12107 14494812 : if ((code == LEU || code == LTU || code == GEU || code == GTU)
12108 1177339 : && is_a <scalar_int_mode> (GET_MODE (op0), &int_mode)
12109 1177318 : && HWI_COMPUTABLE_MODE_P (int_mode)
12110 1172252 : && MEM_P (op0)
12111 79789 : && !MEM_VOLATILE_P (op0)
12112 : /* The optimization makes only sense for constants which are big enough
12113 : so that we have a chance to chop off something at all. */
12114 78927 : && ((unsigned HOST_WIDE_INT) const_op & GET_MODE_MASK (int_mode)) > 0xff
12115 : /* Ensure that we do not overflow during normalization. */
12116 21713 : && (code != GTU
12117 3599 : || ((unsigned HOST_WIDE_INT) const_op & GET_MODE_MASK (int_mode))
12118 : < HOST_WIDE_INT_M1U)
12119 15205721 : && trunc_int_for_mode (const_op, int_mode) == const_op)
12120 : {
12121 21713 : unsigned HOST_WIDE_INT n
12122 21713 : = (unsigned HOST_WIDE_INT) const_op & GET_MODE_MASK (int_mode);
12123 21713 : enum rtx_code adjusted_code;
12124 :
12125 : /* Normalize code to either LEU or GEU. */
12126 21713 : if (code == LTU)
12127 : {
12128 109 : --n;
12129 109 : adjusted_code = LEU;
12130 : }
12131 21604 : else if (code == GTU)
12132 : {
12133 3599 : ++n;
12134 3599 : adjusted_code = GEU;
12135 : }
12136 : else
12137 : adjusted_code = code;
12138 :
12139 21713 : scalar_int_mode narrow_mode_iter;
12140 67850 : FOR_EACH_MODE_UNTIL (narrow_mode_iter, int_mode)
12141 : {
12142 46777 : unsigned nbits = GET_MODE_PRECISION (int_mode)
12143 46777 : - GET_MODE_PRECISION (narrow_mode_iter);
12144 46777 : unsigned HOST_WIDE_INT mask = (HOST_WIDE_INT_1U << nbits) - 1;
12145 46777 : unsigned HOST_WIDE_INT lower_bits = n & mask;
12146 46777 : if ((adjusted_code == LEU && lower_bits == mask)
12147 46535 : || (adjusted_code == GEU && lower_bits == 0))
12148 : {
12149 640 : n >>= nbits;
12150 640 : break;
12151 : }
12152 : }
12153 :
12154 21713 : if (narrow_mode_iter < int_mode)
12155 : {
12156 640 : if (dump_file && (dump_flags & TDF_DETAILS))
12157 : {
12158 12 : fprintf (
12159 : dump_file, "narrow comparison from mode %s to %s: (MEM %s "
12160 : HOST_WIDE_INT_PRINT_HEX ") to (MEM %s "
12161 12 : HOST_WIDE_INT_PRINT_HEX ").\n", GET_MODE_NAME (int_mode),
12162 12 : GET_MODE_NAME (narrow_mode_iter), GET_RTX_NAME (code),
12163 12 : (unsigned HOST_WIDE_INT) const_op & GET_MODE_MASK (int_mode),
12164 12 : GET_RTX_NAME (adjusted_code), n);
12165 : }
12166 640 : poly_int64 offset = (BYTES_BIG_ENDIAN
12167 640 : ? 0
12168 640 : : (GET_MODE_SIZE (int_mode)
12169 640 : - GET_MODE_SIZE (narrow_mode_iter)));
12170 640 : *pop0 = adjust_address_nv (op0, narrow_mode_iter, offset);
12171 640 : *pop1 = gen_int_mode (n, narrow_mode_iter);
12172 640 : return adjusted_code;
12173 : }
12174 : }
12175 :
12176 15183368 : *pop1 = GEN_INT (const_op);
12177 15183368 : return code;
12178 : }
12179 :
12180 : /* Simplify a comparison between *POP0 and *POP1 where CODE is the
12181 : comparison code that will be tested.
12182 :
12183 : The result is a possibly different comparison code to use. *POP0 and
12184 : *POP1 may be updated.
12185 :
12186 : It is possible that we might detect that a comparison is either always
12187 : true or always false. However, we do not perform general constant
12188 : folding in combine, so this knowledge isn't useful. Such tautologies
12189 : should have been detected earlier. Hence we ignore all such cases. */
12190 :
12191 : static enum rtx_code
12192 23019274 : simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
12193 : {
12194 23019274 : rtx op0 = *pop0;
12195 23019274 : rtx op1 = *pop1;
12196 23019274 : rtx tem, tem1;
12197 23019274 : int i;
12198 23019274 : scalar_int_mode mode, inner_mode, tmode;
12199 23019274 : opt_scalar_int_mode tmode_iter;
12200 :
12201 : /* Try a few ways of applying the same transformation to both operands. */
12202 23019578 : while (1)
12203 : {
12204 : /* The test below this one won't handle SIGN_EXTENDs on these machines,
12205 : so check specially. */
12206 23019578 : if (!WORD_REGISTER_OPERATIONS
12207 23019578 : && code != GTU && code != GEU && code != LTU && code != LEU
12208 19940296 : && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
12209 1599 : && GET_CODE (XEXP (op0, 0)) == ASHIFT
12210 1158 : && GET_CODE (XEXP (op1, 0)) == ASHIFT
12211 724 : && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
12212 724 : && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
12213 724 : && is_a <scalar_int_mode> (GET_MODE (op0), &mode)
12214 : && (is_a <scalar_int_mode>
12215 724 : (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))), &inner_mode))
12216 724 : && inner_mode == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0)))
12217 724 : && CONST_INT_P (XEXP (op0, 1))
12218 724 : && XEXP (op0, 1) == XEXP (op1, 1)
12219 91 : && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
12220 91 : && XEXP (op0, 1) == XEXP (XEXP (op1, 0), 1)
12221 91 : && (INTVAL (XEXP (op0, 1))
12222 91 : == (GET_MODE_PRECISION (mode)
12223 91 : - GET_MODE_PRECISION (inner_mode))))
12224 : {
12225 91 : op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
12226 91 : op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
12227 : }
12228 :
12229 : /* If both operands are the same constant shift, see if we can ignore the
12230 : shift. We can if the shift is a rotate or if the bits shifted out of
12231 : this shift are known to be zero for both inputs and if the type of
12232 : comparison is compatible with the shift. */
12233 23019578 : if (GET_CODE (op0) == GET_CODE (op1)
12234 3472539 : && HWI_COMPUTABLE_MODE_P (GET_MODE (op0))
12235 3149732 : && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
12236 3149732 : || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
12237 926 : && (code != GT && code != LT && code != GE && code != LE))
12238 3148856 : || (GET_CODE (op0) == ASHIFTRT
12239 1520 : && (code != GTU && code != LTU
12240 1512 : && code != GEU && code != LEU)))
12241 2384 : && CONST_INT_P (XEXP (op0, 1))
12242 2349 : && INTVAL (XEXP (op0, 1)) >= 0
12243 2349 : && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
12244 23021927 : && XEXP (op0, 1) == XEXP (op1, 1))
12245 : {
12246 1201 : machine_mode mode = GET_MODE (op0);
12247 1201 : unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
12248 1201 : int shift_count = INTVAL (XEXP (op0, 1));
12249 :
12250 1201 : if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
12251 570 : mask &= (mask >> shift_count) << shift_count;
12252 631 : else if (GET_CODE (op0) == ASHIFT)
12253 631 : mask = (mask & (mask << shift_count)) >> shift_count;
12254 :
12255 1201 : if ((nonzero_bits (XEXP (op0, 0), mode) & ~mask) == 0
12256 1201 : && (nonzero_bits (XEXP (op1, 0), mode) & ~mask) == 0)
12257 145 : op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
12258 : else
12259 : break;
12260 : }
12261 :
12262 : /* If both operands are AND's of a paradoxical SUBREG by constant, the
12263 : SUBREGs are of the same mode, and, in both cases, the AND would
12264 : be redundant if the comparison was done in the narrower mode,
12265 : do the comparison in the narrower mode (e.g., we are AND'ing with 1
12266 : and the operand's possibly nonzero bits are 0xffffff01; in that case
12267 : if we only care about QImode, we don't need the AND). This case
12268 : occurs if the output mode of an scc insn is not SImode and
12269 : STORE_FLAG_VALUE == 1 (e.g., the 386).
12270 :
12271 : Similarly, check for a case where the AND's are ZERO_EXTEND
12272 : operations from some narrower mode even though a SUBREG is not
12273 : present. */
12274 :
12275 23018377 : else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
12276 2661 : && CONST_INT_P (XEXP (op0, 1))
12277 2566 : && CONST_INT_P (XEXP (op1, 1)))
12278 : {
12279 2550 : rtx inner_op0 = XEXP (op0, 0);
12280 2550 : rtx inner_op1 = XEXP (op1, 0);
12281 2550 : HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
12282 2550 : HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
12283 2550 : bool changed = false;
12284 :
12285 2550 : if (paradoxical_subreg_p (inner_op0)
12286 1010 : && GET_CODE (inner_op1) == SUBREG
12287 472 : && HWI_COMPUTABLE_MODE_P (GET_MODE (SUBREG_REG (inner_op0)))
12288 472 : && (GET_MODE (SUBREG_REG (inner_op0))
12289 472 : == GET_MODE (SUBREG_REG (inner_op1)))
12290 198 : && ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
12291 : GET_MODE (SUBREG_REG (inner_op0)))) == 0
12292 1761 : && ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
12293 128 : GET_MODE (SUBREG_REG (inner_op1)))) == 0)
12294 : {
12295 112 : op0 = SUBREG_REG (inner_op0);
12296 112 : op1 = SUBREG_REG (inner_op1);
12297 :
12298 : /* The resulting comparison is always unsigned since we masked
12299 : off the original sign bit. */
12300 112 : code = unsigned_condition (code);
12301 :
12302 112 : changed = true;
12303 : }
12304 :
12305 2438 : else if (c0 == c1)
12306 5182 : FOR_EACH_MODE_UNTIL (tmode,
12307 : as_a <scalar_int_mode> (GET_MODE (op0)))
12308 3126 : if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
12309 : {
12310 35 : op0 = gen_lowpart_or_truncate (tmode, inner_op0);
12311 35 : op1 = gen_lowpart_or_truncate (tmode, inner_op1);
12312 35 : code = unsigned_condition (code);
12313 35 : changed = true;
12314 35 : break;
12315 : }
12316 :
12317 2203 : if (! changed)
12318 : break;
12319 : }
12320 :
12321 : /* If both operands are NOT, we can strip off the outer operation
12322 : and adjust the comparison code for swapped operands; similarly for
12323 : NEG, except that this must be an equality comparison. */
12324 23015827 : else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
12325 23015826 : || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
12326 11 : && (code == EQ || code == NE)))
12327 12 : op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
12328 :
12329 : else
12330 : break;
12331 : }
12332 :
12333 : /* If the first operand is a constant, swap the operands and adjust the
12334 : comparison code appropriately, but don't do this if the second operand
12335 : is already a constant integer. */
12336 23019274 : if (swap_commutative_operands_p (op0, op1))
12337 : {
12338 1476045 : std::swap (op0, op1);
12339 1476045 : code = swap_condition (code);
12340 : }
12341 :
12342 : /* We now enter a loop during which we will try to simplify the comparison.
12343 : For the most part, we only are concerned with comparisons with zero,
12344 : but some things may really be comparisons with zero but not start
12345 : out looking that way. */
12346 :
12347 24163776 : while (CONST_INT_P (op1))
12348 : {
12349 15578034 : machine_mode raw_mode = GET_MODE (op0);
12350 15578034 : scalar_int_mode int_mode;
12351 15578034 : int equality_comparison_p;
12352 15578034 : int sign_bit_comparison_p;
12353 15578034 : int unsigned_comparison_p;
12354 15578034 : HOST_WIDE_INT const_op;
12355 :
12356 : /* We only want to handle integral modes. This catches VOIDmode,
12357 : CCmode, and the floating-point modes. An exception is that we
12358 : can handle VOIDmode if OP0 is a COMPARE or a comparison
12359 : operation. */
12360 :
12361 15578034 : if (GET_MODE_CLASS (raw_mode) != MODE_INT
12362 1488831 : && ! (raw_mode == VOIDmode
12363 295082 : && (GET_CODE (op0) == COMPARE || COMPARISON_P (op0))))
12364 : break;
12365 :
12366 : /* Try to simplify the compare to constant, possibly changing the
12367 : comparison op, and/or changing op1 to zero. */
12368 14384245 : code = simplify_compare_const (code, raw_mode, &op0, &op1);
12369 14384245 : const_op = INTVAL (op1);
12370 :
12371 : /* Compute some predicates to simplify code below. */
12372 :
12373 14384245 : equality_comparison_p = (code == EQ || code == NE);
12374 14384245 : sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
12375 14384245 : unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
12376 14384245 : || code == GEU);
12377 :
12378 : /* If this is a sign bit comparison and we can do arithmetic in
12379 : MODE, say that we will only be needing the sign bit of OP0. */
12380 14384245 : if (sign_bit_comparison_p
12381 454981 : && is_a <scalar_int_mode> (raw_mode, &int_mode)
12382 14839226 : && HWI_COMPUTABLE_MODE_P (int_mode))
12383 454589 : op0 = force_to_mode (op0, int_mode,
12384 : HOST_WIDE_INT_1U
12385 454589 : << (GET_MODE_PRECISION (int_mode) - 1), false);
12386 :
12387 14384245 : if (COMPARISON_P (op0))
12388 : {
12389 : /* We can't do anything if OP0 is a condition code value, rather
12390 : than an actual data value. */
12391 603606 : if (const_op != 0
12392 603606 : || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
12393 : break;
12394 :
12395 : /* Get the two operands being compared. */
12396 103715 : if (GET_CODE (XEXP (op0, 0)) == COMPARE)
12397 0 : tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
12398 : else
12399 103715 : tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
12400 :
12401 : /* Check for the cases where we simply want the result of the
12402 : earlier test or the opposite of that result. */
12403 103715 : if (code == NE || code == EQ
12404 103715 : || (val_signbit_known_set_p (raw_mode, STORE_FLAG_VALUE)
12405 0 : && (code == LT || code == GE)))
12406 : {
12407 103715 : enum rtx_code new_code;
12408 103715 : if (code == LT || code == NE)
12409 103715 : new_code = GET_CODE (op0);
12410 : else
12411 0 : new_code = reversed_comparison_code (op0, NULL);
12412 :
12413 103715 : if (new_code != UNKNOWN)
12414 : {
12415 103715 : code = new_code;
12416 103715 : op0 = tem;
12417 103715 : op1 = tem1;
12418 24267491 : continue;
12419 : }
12420 : }
12421 : break;
12422 : }
12423 :
12424 13780639 : if (raw_mode == VOIDmode)
12425 : break;
12426 13780639 : scalar_int_mode mode = as_a <scalar_int_mode> (raw_mode);
12427 :
12428 : /* Now try cases based on the opcode of OP0. If none of the cases
12429 : does a "continue", we exit this loop immediately after the
12430 : switch. */
12431 :
12432 13780639 : unsigned int mode_width = GET_MODE_PRECISION (mode);
12433 13780639 : unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
12434 13780639 : switch (GET_CODE (op0))
12435 : {
12436 368871 : case ZERO_EXTRACT:
12437 : /* If we are extracting a single bit from a variable position in
12438 : a constant that has only a single bit set and are comparing it
12439 : with zero, we can convert this into an equality comparison
12440 : between the position and the location of the single bit. */
12441 : /* Except we can't if SHIFT_COUNT_TRUNCATED is set, since we might
12442 : have already reduced the shift count modulo the word size. */
12443 368871 : if (!SHIFT_COUNT_TRUNCATED
12444 368871 : && CONST_INT_P (XEXP (op0, 0))
12445 9249 : && XEXP (op0, 1) == const1_rtx
12446 9231 : && equality_comparison_p && const_op == 0
12447 378102 : && (i = exact_log2 (UINTVAL (XEXP (op0, 0)))) >= 0)
12448 : {
12449 0 : if (BITS_BIG_ENDIAN)
12450 : i = BITS_PER_WORD - 1 - i;
12451 :
12452 0 : op0 = XEXP (op0, 2);
12453 0 : op1 = GEN_INT (i);
12454 0 : const_op = i;
12455 :
12456 : /* Result is nonzero iff shift count is equal to I. */
12457 0 : code = reverse_condition (code);
12458 0 : continue;
12459 : }
12460 :
12461 : /* fall through */
12462 :
12463 368875 : case SIGN_EXTRACT:
12464 368875 : tem = expand_compound_operation (op0);
12465 368875 : if (tem != op0)
12466 : {
12467 334677 : op0 = tem;
12468 334677 : continue;
12469 : }
12470 : break;
12471 :
12472 27450 : case NOT:
12473 : /* If testing for equality, we can take the NOT of the constant. */
12474 38426 : if (equality_comparison_p
12475 27450 : && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
12476 : {
12477 10976 : op0 = XEXP (op0, 0);
12478 10976 : op1 = tem;
12479 10976 : continue;
12480 : }
12481 :
12482 : /* If just looking at the sign bit, reverse the sense of the
12483 : comparison. */
12484 16474 : if (sign_bit_comparison_p)
12485 : {
12486 16097 : op0 = XEXP (op0, 0);
12487 16097 : code = (code == GE ? LT : GE);
12488 16097 : continue;
12489 : }
12490 : break;
12491 :
12492 237769 : case NEG:
12493 : /* If testing for equality, we can take the NEG of the constant. */
12494 472068 : if (equality_comparison_p
12495 237769 : && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
12496 : {
12497 234299 : op0 = XEXP (op0, 0);
12498 234299 : op1 = tem;
12499 234299 : continue;
12500 : }
12501 :
12502 : /* The remaining cases only apply to comparisons with zero. */
12503 3470 : if (const_op != 0)
12504 : break;
12505 :
12506 : /* When X is ABS or is known positive,
12507 : (neg X) is < 0 if and only if X != 0. */
12508 :
12509 2946 : if (sign_bit_comparison_p
12510 2908 : && (GET_CODE (XEXP (op0, 0)) == ABS
12511 2907 : || (mode_width <= HOST_BITS_PER_WIDE_INT
12512 2907 : && (nonzero_bits (XEXP (op0, 0), mode)
12513 2907 : & (HOST_WIDE_INT_1U << (mode_width - 1)))
12514 2907 : == 0)))
12515 : {
12516 38 : op0 = XEXP (op0, 0);
12517 38 : code = (code == LT ? NE : EQ);
12518 38 : continue;
12519 : }
12520 :
12521 : /* If we have NEG of something whose two high-order bits are the
12522 : same, we know that "(-a) < 0" is equivalent to "a > 0". */
12523 2870 : if (num_sign_bit_copies (op0, mode) >= 2)
12524 : {
12525 22 : op0 = XEXP (op0, 0);
12526 22 : code = swap_condition (code);
12527 22 : continue;
12528 : }
12529 : break;
12530 :
12531 146 : case ROTATE:
12532 : /* If we are testing equality and our count is a constant, we
12533 : can perform the inverse operation on our RHS. */
12534 146 : if (equality_comparison_p && CONST_INT_P (XEXP (op0, 1))
12535 146 : && (tem = simplify_binary_operation (ROTATERT, mode,
12536 : op1, XEXP (op0, 1))) != 0)
12537 : {
12538 0 : op0 = XEXP (op0, 0);
12539 0 : op1 = tem;
12540 0 : continue;
12541 : }
12542 :
12543 : /* If we are doing a < 0 or >= 0 comparison, it means we are testing
12544 : a particular bit. Convert it to an AND of a constant of that
12545 : bit. This will be converted into a ZERO_EXTRACT. */
12546 146 : if (const_op == 0 && sign_bit_comparison_p
12547 0 : && CONST_INT_P (XEXP (op0, 1))
12548 0 : && mode_width <= HOST_BITS_PER_WIDE_INT
12549 0 : && UINTVAL (XEXP (op0, 1)) < mode_width)
12550 : {
12551 0 : op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
12552 : (HOST_WIDE_INT_1U
12553 : << (mode_width - 1
12554 0 : - INTVAL (XEXP (op0, 1)))));
12555 0 : code = (code == LT ? NE : EQ);
12556 0 : continue;
12557 : }
12558 :
12559 : /* Fall through. */
12560 :
12561 2190 : case ABS:
12562 : /* ABS is ignorable inside an equality comparison with zero. */
12563 2190 : if (const_op == 0 && equality_comparison_p)
12564 : {
12565 1 : op0 = XEXP (op0, 0);
12566 1 : continue;
12567 : }
12568 : break;
12569 :
12570 1722 : case SIGN_EXTEND:
12571 : /* Can simplify (compare (zero/sign_extend FOO) CONST) to
12572 : (compare FOO CONST) if CONST fits in FOO's mode and we
12573 : are either testing inequality or have an unsigned
12574 : comparison with ZERO_EXTEND or a signed comparison with
12575 : SIGN_EXTEND. But don't do it if we don't have a compare
12576 : insn of the given mode, since we'd have to revert it
12577 : later on, and then we wouldn't know whether to sign- or
12578 : zero-extend. */
12579 1722 : if (is_int_mode (GET_MODE (XEXP (op0, 0)), &mode)
12580 1722 : && ! unsigned_comparison_p
12581 954 : && HWI_COMPUTABLE_MODE_P (mode)
12582 954 : && trunc_int_for_mode (const_op, mode) == const_op
12583 954 : && have_insn_for (COMPARE, mode))
12584 : {
12585 954 : op0 = XEXP (op0, 0);
12586 954 : continue;
12587 : }
12588 : break;
12589 :
12590 387438 : case SUBREG:
12591 : /* Check for the case where we are comparing A - C1 with C2, that is
12592 :
12593 : (subreg:MODE (plus (A) (-C1))) op (C2)
12594 :
12595 : with C1 a constant, and try to lift the SUBREG, i.e. to do the
12596 : comparison in the wider mode. One of the following two conditions
12597 : must be true in order for this to be valid:
12598 :
12599 : 1. The mode extension results in the same bit pattern being added
12600 : on both sides and the comparison is equality or unsigned. As
12601 : C2 has been truncated to fit in MODE, the pattern can only be
12602 : all 0s or all 1s.
12603 :
12604 : 2. The mode extension results in the sign bit being copied on
12605 : each side.
12606 :
12607 : The difficulty here is that we have predicates for A but not for
12608 : (A - C1) so we need to check that C1 is within proper bounds so
12609 : as to perturbate A as little as possible. */
12610 :
12611 387438 : if (mode_width <= HOST_BITS_PER_WIDE_INT
12612 387334 : && subreg_lowpart_p (op0)
12613 356397 : && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (op0)),
12614 : &inner_mode)
12615 354687 : && GET_MODE_PRECISION (inner_mode) > mode_width
12616 354687 : && GET_CODE (SUBREG_REG (op0)) == PLUS
12617 387438 : && CONST_INT_P (XEXP (SUBREG_REG (op0), 1)))
12618 : {
12619 0 : rtx a = XEXP (SUBREG_REG (op0), 0);
12620 0 : HOST_WIDE_INT c1 = -INTVAL (XEXP (SUBREG_REG (op0), 1));
12621 :
12622 0 : if ((c1 > 0
12623 0 : && (unsigned HOST_WIDE_INT) c1
12624 0 : < HOST_WIDE_INT_1U << (mode_width - 1)
12625 0 : && (equality_comparison_p || unsigned_comparison_p)
12626 : /* (A - C1) zero-extends if it is positive and sign-extends
12627 : if it is negative, C2 both zero- and sign-extends. */
12628 0 : && (((nonzero_bits (a, inner_mode)
12629 0 : & ~GET_MODE_MASK (mode)) == 0
12630 0 : && const_op >= 0)
12631 : /* (A - C1) sign-extends if it is positive and 1-extends
12632 : if it is negative, C2 both sign- and 1-extends. */
12633 0 : || (num_sign_bit_copies (a, inner_mode)
12634 0 : > (unsigned int) (GET_MODE_PRECISION (inner_mode)
12635 0 : - mode_width)
12636 0 : && const_op < 0)))
12637 0 : || ((unsigned HOST_WIDE_INT) c1
12638 0 : < HOST_WIDE_INT_1U << (mode_width - 2)
12639 : /* (A - C1) always sign-extends, like C2. */
12640 0 : && num_sign_bit_copies (a, inner_mode)
12641 0 : > (unsigned int) (GET_MODE_PRECISION (inner_mode)
12642 0 : - (mode_width - 1))))
12643 : {
12644 0 : op0 = SUBREG_REG (op0);
12645 0 : continue;
12646 : }
12647 : }
12648 :
12649 : /* If the inner mode is narrower and we are extracting the low part,
12650 : we can treat the SUBREG as if it were a ZERO_EXTEND ... */
12651 387438 : if (paradoxical_subreg_p (op0))
12652 : {
12653 : if (WORD_REGISTER_OPERATIONS
12654 : && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (op0)),
12655 : &inner_mode)
12656 : && GET_MODE_PRECISION (inner_mode) < BITS_PER_WORD
12657 : /* On WORD_REGISTER_OPERATIONS targets the bits
12658 : beyond sub_mode aren't considered undefined,
12659 : so optimize only if it is a MEM load when MEM loads
12660 : zero extend, because then the upper bits are all zero. */
12661 : && !(MEM_P (SUBREG_REG (op0))
12662 : && load_extend_op (inner_mode) == ZERO_EXTEND))
12663 : break;
12664 : /* FALLTHROUGH to case ZERO_EXTEND */
12665 : }
12666 387438 : else if (subreg_lowpart_p (op0)
12667 356501 : && GET_MODE_CLASS (mode) == MODE_INT
12668 356501 : && is_int_mode (GET_MODE (SUBREG_REG (op0)), &inner_mode)
12669 354687 : && (code == NE || code == EQ)
12670 272731 : && GET_MODE_PRECISION (inner_mode) <= HOST_BITS_PER_WIDE_INT
12671 266901 : && !paradoxical_subreg_p (op0)
12672 654339 : && (nonzero_bits (SUBREG_REG (op0), inner_mode)
12673 266901 : & ~GET_MODE_MASK (mode)) == 0)
12674 : {
12675 : /* Remove outer subregs that don't do anything. */
12676 108214 : tem = gen_lowpart (inner_mode, op1);
12677 :
12678 108214 : if ((nonzero_bits (tem, inner_mode)
12679 108214 : & ~GET_MODE_MASK (mode)) == 0)
12680 : {
12681 107593 : op0 = SUBREG_REG (op0);
12682 107593 : op1 = tem;
12683 107593 : continue;
12684 : }
12685 : break;
12686 : }
12687 : else
12688 : break;
12689 :
12690 : /* FALLTHROUGH */
12691 :
12692 40381 : case ZERO_EXTEND:
12693 40381 : if (is_int_mode (GET_MODE (XEXP (op0, 0)), &mode)
12694 40381 : && (unsigned_comparison_p || equality_comparison_p)
12695 40339 : && HWI_COMPUTABLE_MODE_P (mode)
12696 40339 : && (unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (mode)
12697 40339 : && const_op >= 0
12698 40330 : && have_insn_for (COMPARE, mode))
12699 : {
12700 40330 : op0 = XEXP (op0, 0);
12701 40330 : continue;
12702 : }
12703 : break;
12704 :
12705 452014 : case PLUS:
12706 : /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
12707 : this for equality comparisons due to pathological cases involving
12708 : overflows. */
12709 506946 : if (equality_comparison_p
12710 452014 : && (tem = simplify_binary_operation (MINUS, mode,
12711 : op1, XEXP (op0, 1))) != 0)
12712 : {
12713 54932 : op0 = XEXP (op0, 0);
12714 54932 : op1 = tem;
12715 54932 : continue;
12716 : }
12717 :
12718 : /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
12719 397082 : if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
12720 12550 : && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
12721 : {
12722 0 : op0 = XEXP (XEXP (op0, 0), 0);
12723 0 : code = (code == LT ? EQ : NE);
12724 0 : continue;
12725 : }
12726 : break;
12727 :
12728 173131 : case MINUS:
12729 : /* We used to optimize signed comparisons against zero, but that
12730 : was incorrect. Unsigned comparisons against zero (GTU, LEU)
12731 : arrive here as equality comparisons, or (GEU, LTU) are
12732 : optimized away. No need to special-case them. */
12733 :
12734 : /* (eq (minus A B) C) -> (eq A (plus B C)) or
12735 : (eq B (minus A C)), whichever simplifies. We can only do
12736 : this for equality comparisons due to pathological cases involving
12737 : overflows. */
12738 206016 : if (equality_comparison_p
12739 173131 : && (tem = simplify_binary_operation (PLUS, mode,
12740 : XEXP (op0, 1), op1)) != 0)
12741 : {
12742 32885 : op0 = XEXP (op0, 0);
12743 32885 : op1 = tem;
12744 32885 : continue;
12745 : }
12746 :
12747 171127 : if (equality_comparison_p
12748 140246 : && (tem = simplify_binary_operation (MINUS, mode,
12749 : XEXP (op0, 0), op1)) != 0)
12750 : {
12751 30881 : op0 = XEXP (op0, 1);
12752 30881 : op1 = tem;
12753 30881 : continue;
12754 : }
12755 :
12756 : /* The sign bit of (minus (ashiftrt X C) X), where C is the number
12757 : of bits in X minus 1, is one iff X > 0. */
12758 16793 : if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
12759 458 : && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
12760 458 : && UINTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
12761 109389 : && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
12762 : {
12763 0 : op0 = XEXP (op0, 1);
12764 0 : code = (code == GE ? LE : GT);
12765 0 : continue;
12766 : }
12767 : break;
12768 :
12769 8945 : case XOR:
12770 : /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
12771 : if C is zero or B is a constant. */
12772 9092 : if (equality_comparison_p
12773 8945 : && (tem = simplify_binary_operation (XOR, mode,
12774 : XEXP (op0, 1), op1)) != 0)
12775 : {
12776 147 : op0 = XEXP (op0, 0);
12777 147 : op1 = tem;
12778 147 : continue;
12779 : }
12780 : break;
12781 :
12782 :
12783 376048 : case IOR:
12784 : /* The sign bit of (ior (plus X (const_int -1)) X) is nonzero
12785 : iff X <= 0. */
12786 7221 : if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
12787 1270 : && XEXP (XEXP (op0, 0), 1) == constm1_rtx
12788 376096 : && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
12789 : {
12790 48 : op0 = XEXP (op0, 1);
12791 48 : code = (code == GE ? GT : LE);
12792 48 : continue;
12793 : }
12794 : break;
12795 :
12796 1678918 : case AND:
12797 : /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
12798 : will be converted to a ZERO_EXTRACT later. */
12799 1678918 : if (const_op == 0 && equality_comparison_p
12800 1583366 : && GET_CODE (XEXP (op0, 0)) == ASHIFT
12801 60396 : && XEXP (XEXP (op0, 0), 0) == const1_rtx)
12802 : {
12803 6813 : op0 = gen_rtx_LSHIFTRT (mode, XEXP (op0, 1),
12804 : XEXP (XEXP (op0, 0), 1));
12805 6813 : op0 = simplify_and_const_int (NULL_RTX, mode, op0, 1);
12806 6813 : continue;
12807 : }
12808 :
12809 : /* If we are comparing (and (lshiftrt X C1) C2) for equality with
12810 : zero and X is a comparison and C1 and C2 describe only bits set
12811 : in STORE_FLAG_VALUE, we can compare with X. */
12812 1672105 : if (const_op == 0 && equality_comparison_p
12813 1576553 : && mode_width <= HOST_BITS_PER_WIDE_INT
12814 1572662 : && CONST_INT_P (XEXP (op0, 1))
12815 1242036 : && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
12816 515472 : && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
12817 502065 : && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
12818 502065 : && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
12819 : {
12820 502065 : mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
12821 502065 : << INTVAL (XEXP (XEXP (op0, 0), 1)));
12822 502065 : if ((~STORE_FLAG_VALUE & mask) == 0
12823 502065 : && (COMPARISON_P (XEXP (XEXP (op0, 0), 0))
12824 0 : || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
12825 0 : && COMPARISON_P (tem))))
12826 : {
12827 0 : op0 = XEXP (XEXP (op0, 0), 0);
12828 0 : continue;
12829 : }
12830 : }
12831 :
12832 : /* If we are doing an equality comparison of an AND of a bit equal
12833 : to the sign bit, replace this with a LT or GE comparison of
12834 : the underlying value. */
12835 1672563 : if (equality_comparison_p
12836 : && const_op == 0
12837 1576553 : && CONST_INT_P (XEXP (op0, 1))
12838 1242349 : && mode_width <= HOST_BITS_PER_WIDE_INT
12839 1672105 : && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
12840 1242036 : == HOST_WIDE_INT_1U << (mode_width - 1)))
12841 : {
12842 458 : op0 = XEXP (op0, 0);
12843 458 : code = (code == EQ ? GE : LT);
12844 458 : continue;
12845 : }
12846 :
12847 : /* If this AND operation is really a ZERO_EXTEND from a narrower
12848 : mode, the constant fits within that mode, and this is either an
12849 : equality or unsigned comparison, try to do this comparison in
12850 : the narrower mode.
12851 :
12852 : Note that in:
12853 :
12854 : (ne:DI (and:DI (reg:DI 4) (const_int 0xffffffff)) (const_int 0))
12855 : -> (ne:DI (reg:SI 4) (const_int 0))
12856 :
12857 : unless TARGET_TRULY_NOOP_TRUNCATION allows it or the register is
12858 : known to hold a value of the required mode the
12859 : transformation is invalid. */
12860 1687183 : if ((equality_comparison_p || unsigned_comparison_p)
12861 1656102 : && CONST_INT_P (XEXP (op0, 1))
12862 3854949 : && (i = exact_log2 ((UINTVAL (XEXP (op0, 1))
12863 1317312 : & GET_MODE_MASK (mode))
12864 : + 1)) >= 0
12865 881526 : && const_op >> i == 0
12866 4193739 : && int_mode_for_size (i, 1).exists (&tmode))
12867 : {
12868 15536 : op0 = gen_lowpart_or_truncate (tmode, XEXP (op0, 0));
12869 15536 : continue;
12870 : }
12871 :
12872 : /* If this is (and:M1 (subreg:M1 X:M2 0) (const_int C1)) where C1
12873 : fits in both M1 and M2 and the SUBREG is either paradoxical
12874 : or represents the low part, permute the SUBREG and the AND
12875 : and try again. */
12876 1656111 : if (GET_CODE (XEXP (op0, 0)) == SUBREG
12877 113963 : && CONST_INT_P (XEXP (op0, 1)))
12878 : {
12879 108618 : unsigned HOST_WIDE_INT c1 = INTVAL (XEXP (op0, 1));
12880 : /* Require an integral mode, to avoid creating something like
12881 : (AND:SF ...). */
12882 155692 : if ((is_a <scalar_int_mode>
12883 108618 : (GET_MODE (SUBREG_REG (XEXP (op0, 0))), &tmode))
12884 : /* It is unsafe to commute the AND into the SUBREG if the
12885 : SUBREG is paradoxical and WORD_REGISTER_OPERATIONS is
12886 : not defined. As originally written the upper bits
12887 : have a defined value due to the AND operation.
12888 : However, if we commute the AND inside the SUBREG then
12889 : they no longer have defined values and the meaning of
12890 : the code has been changed.
12891 : Also C1 should not change value in the smaller mode,
12892 : see PR67028 (a positive C1 can become negative in the
12893 : smaller mode, so that the AND does no longer mask the
12894 : upper bits). */
12895 108585 : && ((WORD_REGISTER_OPERATIONS
12896 : && mode_width > GET_MODE_PRECISION (tmode)
12897 : && mode_width <= BITS_PER_WORD
12898 : && trunc_int_for_mode (c1, tmode) == (HOST_WIDE_INT) c1)
12899 108585 : || (mode_width <= GET_MODE_PRECISION (tmode)
12900 48768 : && subreg_lowpart_p (XEXP (op0, 0))))
12901 48742 : && mode_width <= HOST_BITS_PER_WIDE_INT
12902 48742 : && HWI_COMPUTABLE_MODE_P (tmode)
12903 48625 : && (c1 & ~mask) == 0
12904 47074 : && (c1 & ~GET_MODE_MASK (tmode)) == 0
12905 47074 : && c1 != mask
12906 47074 : && c1 != GET_MODE_MASK (tmode))
12907 : {
12908 47074 : op0 = simplify_gen_binary (AND, tmode,
12909 47074 : SUBREG_REG (XEXP (op0, 0)),
12910 47074 : gen_int_mode (c1, tmode));
12911 47074 : op0 = gen_lowpart (mode, op0);
12912 47074 : continue;
12913 : }
12914 : }
12915 :
12916 : /* Convert (ne (and (not X) 1) 0) to (eq (and X 1) 0). */
12917 1609037 : if (const_op == 0 && equality_comparison_p
12918 1520930 : && XEXP (op0, 1) == const1_rtx
12919 661968 : && GET_CODE (XEXP (op0, 0)) == NOT)
12920 : {
12921 4678 : op0 = simplify_and_const_int (NULL_RTX, mode,
12922 : XEXP (XEXP (op0, 0), 0), 1);
12923 4678 : code = (code == NE ? EQ : NE);
12924 4678 : continue;
12925 : }
12926 :
12927 : /* Convert (ne (and (lshiftrt (not X)) 1) 0) to
12928 : (eq (and (lshiftrt X) 1) 0).
12929 : Also handle the case where (not X) is expressed using xor. */
12930 1604359 : if (const_op == 0 && equality_comparison_p
12931 1516252 : && XEXP (op0, 1) == const1_rtx
12932 657290 : && GET_CODE (XEXP (op0, 0)) == LSHIFTRT)
12933 : {
12934 507751 : rtx shift_op = XEXP (XEXP (op0, 0), 0);
12935 507751 : rtx shift_count = XEXP (XEXP (op0, 0), 1);
12936 :
12937 510442 : if (GET_CODE (shift_op) == NOT
12938 507751 : || (GET_CODE (shift_op) == XOR
12939 4548 : && CONST_INT_P (XEXP (shift_op, 1))
12940 2691 : && CONST_INT_P (shift_count)
12941 2691 : && HWI_COMPUTABLE_MODE_P (mode)
12942 2691 : && (UINTVAL (XEXP (shift_op, 1))
12943 : == HOST_WIDE_INT_1U
12944 2691 : << INTVAL (shift_count))))
12945 : {
12946 2691 : op0
12947 2691 : = gen_rtx_LSHIFTRT (mode, XEXP (shift_op, 0), shift_count);
12948 2691 : op0 = simplify_and_const_int (NULL_RTX, mode, op0, 1);
12949 2691 : code = (code == NE ? EQ : NE);
12950 2691 : continue;
12951 : }
12952 : }
12953 : break;
12954 :
12955 52028 : case ASHIFT:
12956 : /* If we have (compare (ashift FOO N) (const_int C)) and
12957 : the high order N bits of FOO (N+1 if an inequality comparison)
12958 : are known to be zero, we can do this by comparing FOO with C
12959 : shifted right N bits so long as the low-order N bits of C are
12960 : zero. */
12961 52028 : if (CONST_INT_P (XEXP (op0, 1))
12962 48478 : && INTVAL (XEXP (op0, 1)) >= 0
12963 48478 : && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
12964 : < HOST_BITS_PER_WIDE_INT)
12965 48478 : && (((unsigned HOST_WIDE_INT) const_op
12966 48478 : & ((HOST_WIDE_INT_1U << INTVAL (XEXP (op0, 1)))
12967 : - 1)) == 0)
12968 37509 : && mode_width <= HOST_BITS_PER_WIDE_INT
12969 89507 : && (nonzero_bits (XEXP (op0, 0), mode)
12970 37479 : & ~(mask >> (INTVAL (XEXP (op0, 1))
12971 37479 : + ! equality_comparison_p))) == 0)
12972 : {
12973 : /* We must perform a logical shift, not an arithmetic one,
12974 : as we want the top N bits of C to be zero. */
12975 472 : unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode);
12976 :
12977 472 : temp >>= INTVAL (XEXP (op0, 1));
12978 472 : op1 = gen_int_mode (temp, mode);
12979 472 : op0 = XEXP (op0, 0);
12980 472 : continue;
12981 472 : }
12982 :
12983 : /* If we are doing a sign bit comparison, it means we are testing
12984 : a particular bit. Convert it to the appropriate AND. */
12985 51556 : if (sign_bit_comparison_p && CONST_INT_P (XEXP (op0, 1))
12986 1614 : && mode_width <= HOST_BITS_PER_WIDE_INT)
12987 : {
12988 3228 : op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
12989 : (HOST_WIDE_INT_1U
12990 : << (mode_width - 1
12991 1614 : - INTVAL (XEXP (op0, 1)))));
12992 1614 : code = (code == LT ? NE : EQ);
12993 1614 : continue;
12994 : }
12995 :
12996 : /* If this an equality comparison with zero and we are shifting
12997 : the low bit to the sign bit, we can convert this to an AND of the
12998 : low-order bit. */
12999 49942 : if (const_op == 0 && equality_comparison_p
13000 13028 : && CONST_INT_P (XEXP (op0, 1))
13001 10645 : && UINTVAL (XEXP (op0, 1)) == mode_width - 1)
13002 : {
13003 332 : op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0), 1);
13004 332 : continue;
13005 : }
13006 : break;
13007 :
13008 45469 : case ASHIFTRT:
13009 : /* If this is an equality comparison with zero, we can do this
13010 : as a logical shift, which might be much simpler. */
13011 45469 : if (equality_comparison_p && const_op == 0
13012 24116 : && CONST_INT_P (XEXP (op0, 1)))
13013 : {
13014 46892 : op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
13015 : XEXP (op0, 0),
13016 23446 : INTVAL (XEXP (op0, 1)));
13017 23446 : continue;
13018 : }
13019 :
13020 : /* If OP0 is a sign extension and CODE is not an unsigned comparison,
13021 : do the comparison in a narrower mode. */
13022 27081 : if (! unsigned_comparison_p
13023 19494 : && CONST_INT_P (XEXP (op0, 1))
13024 18788 : && GET_CODE (XEXP (op0, 0)) == ASHIFT
13025 5764 : && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
13026 5546 : && (int_mode_for_size (mode_width - INTVAL (XEXP (op0, 1)), 1)
13027 22023 : .exists (&tmode))
13028 22023 : && (((unsigned HOST_WIDE_INT) const_op
13029 5058 : + (GET_MODE_MASK (tmode) >> 1) + 1)
13030 5058 : <= GET_MODE_MASK (tmode)))
13031 : {
13032 5058 : op0 = gen_lowpart (tmode, XEXP (XEXP (op0, 0), 0));
13033 5058 : continue;
13034 : }
13035 :
13036 : /* Likewise if OP0 is a PLUS of a sign extension with a
13037 : constant, which is usually represented with the PLUS
13038 : between the shifts. */
13039 16965 : if (! unsigned_comparison_p
13040 14436 : && CONST_INT_P (XEXP (op0, 1))
13041 13730 : && GET_CODE (XEXP (op0, 0)) == PLUS
13042 54 : && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
13043 22 : && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
13044 2 : && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
13045 0 : && (int_mode_for_size (mode_width - INTVAL (XEXP (op0, 1)), 1)
13046 16965 : .exists (&tmode))
13047 16965 : && (((unsigned HOST_WIDE_INT) const_op
13048 0 : + (GET_MODE_MASK (tmode) >> 1) + 1)
13049 0 : <= GET_MODE_MASK (tmode)))
13050 : {
13051 0 : rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0);
13052 0 : rtx add_const = XEXP (XEXP (op0, 0), 1);
13053 0 : rtx new_const = simplify_gen_binary (ASHIFTRT, mode,
13054 : add_const, XEXP (op0, 1));
13055 :
13056 0 : op0 = simplify_gen_binary (PLUS, tmode,
13057 0 : gen_lowpart (tmode, inner),
13058 : new_const);
13059 0 : continue;
13060 0 : }
13061 :
13062 : /* FALLTHROUGH */
13063 129379 : case LSHIFTRT:
13064 : /* If we have (compare (xshiftrt FOO N) (const_int C)) and
13065 : the low order N bits of FOO are known to be zero, we can do this
13066 : by comparing FOO with C shifted left N bits so long as no
13067 : overflow occurs. Even if the low order N bits of FOO aren't known
13068 : to be zero, if the comparison is >= or < we can use the same
13069 : optimization and for > or <= by setting all the low
13070 : order N bits in the comparison constant. */
13071 129379 : if (CONST_INT_P (XEXP (op0, 1))
13072 124860 : && INTVAL (XEXP (op0, 1)) > 0
13073 124860 : && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
13074 124500 : && mode_width <= HOST_BITS_PER_WIDE_INT
13075 129379 : && (((unsigned HOST_WIDE_INT) const_op
13076 247478 : + (GET_CODE (op0) != LSHIFTRT
13077 123739 : ? ((GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1)) >> 1)
13078 : + 1)
13079 : : 0))
13080 123739 : <= GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1))))
13081 : {
13082 123589 : unsigned HOST_WIDE_INT low_bits
13083 123589 : = (nonzero_bits (XEXP (op0, 0), mode)
13084 123589 : & ((HOST_WIDE_INT_1U
13085 123589 : << INTVAL (XEXP (op0, 1))) - 1));
13086 123589 : if (low_bits == 0 || !equality_comparison_p)
13087 : {
13088 : /* If the shift was logical, then we must make the condition
13089 : unsigned. */
13090 22022 : if (GET_CODE (op0) == LSHIFTRT)
13091 17958 : code = unsigned_condition (code);
13092 :
13093 22022 : const_op = (unsigned HOST_WIDE_INT) const_op
13094 22022 : << INTVAL (XEXP (op0, 1));
13095 22022 : if (low_bits != 0
13096 2752 : && (code == GT || code == GTU
13097 953 : || code == LE || code == LEU))
13098 2684 : const_op
13099 2684 : |= ((HOST_WIDE_INT_1 << INTVAL (XEXP (op0, 1))) - 1);
13100 22022 : op1 = GEN_INT (const_op);
13101 22022 : op0 = XEXP (op0, 0);
13102 22022 : continue;
13103 : }
13104 : }
13105 :
13106 : /* If we are using this shift to extract just the sign bit, we
13107 : can replace this with an LT or GE comparison. */
13108 107357 : if (const_op == 0
13109 90080 : && (equality_comparison_p || sign_bit_comparison_p)
13110 90044 : && CONST_INT_P (XEXP (op0, 1))
13111 85740 : && UINTVAL (XEXP (op0, 1)) == mode_width - 1)
13112 : {
13113 46713 : op0 = XEXP (op0, 0);
13114 46713 : code = (code == NE || code == GT ? LT : GE);
13115 46713 : continue;
13116 : }
13117 : break;
13118 :
13119 : default:
13120 : break;
13121 : }
13122 :
13123 : break;
13124 : }
13125 :
13126 : /* Now make any compound operations involved in this comparison. Then,
13127 : check for an outermost SUBREG on OP0 that is not doing anything or is
13128 : paradoxical. The latter transformation must only be performed when
13129 : it is known that the "extra" bits will be the same in op0 and op1 or
13130 : that they don't matter. There are three cases to consider:
13131 :
13132 : 1. SUBREG_REG (op0) is a register. In this case the bits are don't
13133 : care bits and we can assume they have any convenient value. So
13134 : making the transformation is safe.
13135 :
13136 : 2. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is UNKNOWN.
13137 : In this case the upper bits of op0 are undefined. We should not make
13138 : the simplification in that case as we do not know the contents of
13139 : those bits.
13140 :
13141 : 3. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is not UNKNOWN.
13142 : In that case we know those bits are zeros or ones. We must also be
13143 : sure that they are the same as the upper bits of op1.
13144 :
13145 : We can never remove a SUBREG for a non-equality comparison because
13146 : the sign bit is in a different place in the underlying object. */
13147 :
13148 23019274 : rtx_code op0_mco_code = SET;
13149 23019274 : if (op1 == const0_rtx)
13150 10773686 : op0_mco_code = code == NE || code == EQ ? EQ : COMPARE;
13151 :
13152 23019274 : op0 = make_compound_operation (op0, op0_mco_code);
13153 23019274 : op1 = make_compound_operation (op1, SET);
13154 :
13155 433122 : if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
13156 401023 : && is_int_mode (GET_MODE (op0), &mode)
13157 373464 : && is_int_mode (GET_MODE (SUBREG_REG (op0)), &inner_mode)
13158 23389365 : && (code == NE || code == EQ))
13159 : {
13160 208141 : if (paradoxical_subreg_p (op0))
13161 : {
13162 : /* For paradoxical subregs, allow case 1 as above. Case 3 isn't
13163 : implemented. */
13164 0 : if (REG_P (SUBREG_REG (op0)))
13165 : {
13166 0 : op0 = SUBREG_REG (op0);
13167 0 : op1 = gen_lowpart (inner_mode, op1);
13168 : }
13169 : }
13170 208141 : else if (GET_MODE_PRECISION (inner_mode) <= HOST_BITS_PER_WIDE_INT
13171 208141 : && (nonzero_bits (SUBREG_REG (op0), inner_mode)
13172 201284 : & ~GET_MODE_MASK (mode)) == 0)
13173 : {
13174 28097 : tem = gen_lowpart (inner_mode, op1);
13175 :
13176 28097 : if ((nonzero_bits (tem, inner_mode) & ~GET_MODE_MASK (mode)) == 0)
13177 19396 : op0 = SUBREG_REG (op0), op1 = tem;
13178 : }
13179 : }
13180 :
13181 : /* We now do the opposite procedure: Some machines don't have compare
13182 : insns in all modes. If OP0's mode is an integer mode smaller than a
13183 : word and we can't do a compare in that mode, see if there is a larger
13184 : mode for which we can do the compare. There are a number of cases in
13185 : which we can use the wider mode. */
13186 :
13187 23019274 : if (is_int_mode (GET_MODE (op0), &mode)
13188 23842900 : && GET_MODE_SIZE (mode) < UNITS_PER_WORD
13189 8359803 : && ! have_insn_for (COMPARE, mode))
13190 0 : FOR_EACH_WIDER_MODE (tmode_iter, mode)
13191 : {
13192 0 : tmode = tmode_iter.require ();
13193 0 : if (!HWI_COMPUTABLE_MODE_P (tmode))
13194 : break;
13195 0 : if (have_insn_for (COMPARE, tmode))
13196 : {
13197 0 : int zero_extended;
13198 :
13199 : /* If this is a test for negative, we can make an explicit
13200 : test of the sign bit. Test this first so we can use
13201 : a paradoxical subreg to extend OP0. */
13202 :
13203 0 : if (op1 == const0_rtx && (code == LT || code == GE)
13204 0 : && HWI_COMPUTABLE_MODE_P (mode))
13205 : {
13206 0 : unsigned HOST_WIDE_INT sign
13207 0 : = HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (mode) - 1);
13208 0 : op0 = simplify_gen_binary (AND, tmode,
13209 0 : gen_lowpart (tmode, op0),
13210 0 : gen_int_mode (sign, tmode));
13211 0 : code = (code == LT) ? NE : EQ;
13212 : break;
13213 : }
13214 :
13215 : /* If the only nonzero bits in OP0 and OP1 are those in the
13216 : narrower mode and this is an equality or unsigned comparison,
13217 : we can use the wider mode. Similarly for sign-extended
13218 : values, in which case it is true for all comparisons. */
13219 0 : zero_extended = ((code == EQ || code == NE
13220 0 : || code == GEU || code == GTU
13221 0 : || code == LEU || code == LTU)
13222 0 : && (nonzero_bits (op0, tmode)
13223 0 : & ~GET_MODE_MASK (mode)) == 0
13224 0 : && ((CONST_INT_P (op1)
13225 0 : || (nonzero_bits (op1, tmode)
13226 0 : & ~GET_MODE_MASK (mode)) == 0)));
13227 :
13228 0 : if (zero_extended
13229 0 : || ((num_sign_bit_copies (op0, tmode)
13230 0 : > (unsigned int) (GET_MODE_PRECISION (tmode)
13231 0 : - GET_MODE_PRECISION (mode)))
13232 0 : && (num_sign_bit_copies (op1, tmode)
13233 0 : > (unsigned int) (GET_MODE_PRECISION (tmode)
13234 0 : - GET_MODE_PRECISION (mode)))))
13235 : {
13236 : /* If OP0 is an AND and we don't have an AND in MODE either,
13237 : make a new AND in the proper mode. */
13238 0 : if (GET_CODE (op0) == AND
13239 0 : && !have_insn_for (AND, mode))
13240 0 : op0 = simplify_gen_binary (AND, tmode,
13241 0 : gen_lowpart (tmode,
13242 : XEXP (op0, 0)),
13243 0 : gen_lowpart (tmode,
13244 : XEXP (op0, 1)));
13245 : else
13246 : {
13247 0 : if (zero_extended)
13248 : {
13249 0 : op0 = simplify_gen_unary (ZERO_EXTEND, tmode,
13250 : op0, mode);
13251 0 : op1 = simplify_gen_unary (ZERO_EXTEND, tmode,
13252 : op1, mode);
13253 : }
13254 : else
13255 : {
13256 0 : op0 = simplify_gen_unary (SIGN_EXTEND, tmode,
13257 : op0, mode);
13258 0 : op1 = simplify_gen_unary (SIGN_EXTEND, tmode,
13259 : op1, mode);
13260 : }
13261 : break;
13262 : }
13263 : }
13264 : }
13265 : }
13266 :
13267 : /* We may have changed the comparison operands. Re-canonicalize. */
13268 23019274 : if (swap_commutative_operands_p (op0, op1))
13269 : {
13270 58309 : std::swap (op0, op1);
13271 58309 : code = swap_condition (code);
13272 : }
13273 :
13274 : /* If this machine only supports a subset of valid comparisons, see if we
13275 : can convert an unsupported one into a supported one. */
13276 23019274 : target_canonicalize_comparison (&code, &op0, &op1, 0);
13277 :
13278 23019274 : *pop0 = op0;
13279 23019274 : *pop1 = op1;
13280 :
13281 23019274 : return code;
13282 : }
13283 :
13284 : /* Utility function for record_value_for_reg. Count number of
13285 : rtxs in X. */
13286 : static int
13287 1963 : count_rtxs (rtx x)
13288 : {
13289 1963 : enum rtx_code code = GET_CODE (x);
13290 1963 : const char *fmt;
13291 1963 : int i, j, ret = 1;
13292 :
13293 1963 : if (GET_RTX_CLASS (code) == RTX_BIN_ARITH
13294 1963 : || GET_RTX_CLASS (code) == RTX_COMM_ARITH)
13295 : {
13296 82 : rtx x0 = XEXP (x, 0);
13297 82 : rtx x1 = XEXP (x, 1);
13298 :
13299 82 : if (x0 == x1)
13300 0 : return 1 + 2 * count_rtxs (x0);
13301 :
13302 82 : if ((GET_RTX_CLASS (GET_CODE (x1)) == RTX_BIN_ARITH
13303 82 : || GET_RTX_CLASS (GET_CODE (x1)) == RTX_COMM_ARITH)
13304 0 : && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
13305 0 : return 2 + 2 * count_rtxs (x0)
13306 0 : + count_rtxs (x == XEXP (x1, 0)
13307 0 : ? XEXP (x1, 1) : XEXP (x1, 0));
13308 :
13309 82 : if ((GET_RTX_CLASS (GET_CODE (x0)) == RTX_BIN_ARITH
13310 82 : || GET_RTX_CLASS (GET_CODE (x0)) == RTX_COMM_ARITH)
13311 0 : && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
13312 0 : return 2 + 2 * count_rtxs (x1)
13313 0 : + count_rtxs (x == XEXP (x0, 0)
13314 0 : ? XEXP (x0, 1) : XEXP (x0, 0));
13315 : }
13316 :
13317 1963 : fmt = GET_RTX_FORMAT (code);
13318 4737 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
13319 2774 : if (fmt[i] == 'e')
13320 1120 : ret += count_rtxs (XEXP (x, i));
13321 1654 : else if (fmt[i] == 'E')
13322 304 : for (j = 0; j < XVECLEN (x, i); j++)
13323 236 : ret += count_rtxs (XVECEXP (x, i, j));
13324 :
13325 : return ret;
13326 : }
13327 :
13328 : /* Utility function for following routine. Called when X is part of a value
13329 : being stored into last_set_value. Sets last_set_table_tick
13330 : for each register mentioned. Similar to mention_regs in cse.cc */
13331 :
13332 : static void
13333 240824245 : update_table_tick (rtx x)
13334 : {
13335 241492685 : enum rtx_code code = GET_CODE (x);
13336 241492685 : const char *fmt = GET_RTX_FORMAT (code);
13337 241492685 : int i, j;
13338 :
13339 241492685 : if (code == REG)
13340 : {
13341 81000128 : unsigned int regno = REGNO (x);
13342 81000128 : unsigned int endregno = END_REGNO (x);
13343 81000128 : unsigned int r;
13344 :
13345 162111468 : for (r = regno; r < endregno; r++)
13346 : {
13347 81111340 : reg_stat_type *rsp = ®_stat[r];
13348 81111340 : rsp->last_set_table_tick = label_tick;
13349 : }
13350 :
13351 : return;
13352 : }
13353 :
13354 413865936 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
13355 254162553 : if (fmt[i] == 'e')
13356 : {
13357 : /* Check for identical subexpressions. If x contains
13358 : identical subexpression we only have to traverse one of
13359 : them. */
13360 149604644 : if (i == 0 && ARITHMETIC_P (x))
13361 : {
13362 : /* Note that at this point x1 has already been
13363 : processed. */
13364 58320866 : rtx x0 = XEXP (x, 0);
13365 58320866 : rtx x1 = XEXP (x, 1);
13366 :
13367 : /* If x0 and x1 are identical then there is no need to
13368 : process x0. */
13369 58320866 : if (x0 == x1)
13370 : break;
13371 :
13372 : /* If x0 is identical to a subexpression of x1 then while
13373 : processing x1, x0 has already been processed. Thus we
13374 : are done with x. */
13375 58200271 : if (ARITHMETIC_P (x1)
13376 403128 : && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
13377 : break;
13378 :
13379 : /* If x1 is identical to a subexpression of x0 then we
13380 : still have to process the rest of x0. */
13381 58200132 : if (ARITHMETIC_P (x0)
13382 16131419 : && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
13383 : {
13384 668440 : update_table_tick (XEXP (x0, x1 == XEXP (x0, 0) ? 1 : 0));
13385 668440 : break;
13386 : }
13387 : }
13388 :
13389 148815470 : update_table_tick (XEXP (x, i));
13390 : }
13391 104557909 : else if (fmt[i] == 'E')
13392 9653944 : for (j = 0; j < XVECLEN (x, i); j++)
13393 7066601 : update_table_tick (XVECEXP (x, i, j));
13394 : }
13395 :
13396 : /* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
13397 : are saying that the register is clobbered and we no longer know its
13398 : value. If INSN is zero, don't update reg_stat[].last_set; this is
13399 : only permitted with VALUE also zero and is used to invalidate the
13400 : register. */
13401 :
13402 : static void
13403 112199613 : record_value_for_reg (rtx reg, rtx_insn *insn, rtx value)
13404 : {
13405 112199613 : unsigned int regno = REGNO (reg);
13406 112199613 : unsigned int endregno = END_REGNO (reg);
13407 112199613 : unsigned int i;
13408 112199613 : reg_stat_type *rsp;
13409 :
13410 : /* If VALUE contains REG and we have a previous value for REG, substitute
13411 : the previous value. */
13412 112199613 : if (value && insn && reg_overlap_mentioned_p (reg, value))
13413 : {
13414 6158421 : rtx tem;
13415 :
13416 : /* Set things up so get_last_value is allowed to see anything set up to
13417 : our insn. */
13418 6158421 : subst_low_luid = DF_INSN_LUID (insn);
13419 6158421 : tem = get_last_value (reg);
13420 :
13421 : /* If TEM is simply a binary operation with two CLOBBERs as operands,
13422 : it isn't going to be useful and will take a lot of time to process,
13423 : so just use the CLOBBER. */
13424 :
13425 6158421 : if (tem)
13426 : {
13427 2463779 : if (ARITHMETIC_P (tem)
13428 2244407 : && GET_CODE (XEXP (tem, 0)) == CLOBBER
13429 1066825 : && GET_CODE (XEXP (tem, 1)) == CLOBBER)
13430 : tem = XEXP (tem, 0);
13431 2462624 : else if (count_occurrences (value, reg, 1) >= 2)
13432 : {
13433 : /* If there are two or more occurrences of REG in VALUE,
13434 : prevent the value from growing too much. */
13435 607 : if (count_rtxs (tem) > param_max_last_value_rtl)
13436 0 : tem = gen_rtx_CLOBBER (GET_MODE (tem), const0_rtx);
13437 : }
13438 :
13439 2463779 : value = replace_rtx (copy_rtx (value), reg, tem);
13440 : }
13441 : }
13442 :
13443 : /* For each register modified, show we don't know its value, that
13444 : we don't know about its bitwise content, that its value has been
13445 : updated, and that we don't know the location of the death of the
13446 : register. */
13447 224743751 : for (i = regno; i < endregno; i++)
13448 : {
13449 112544138 : rsp = ®_stat[i];
13450 :
13451 112544138 : if (insn)
13452 102461249 : rsp->last_set = insn;
13453 :
13454 112544138 : rsp->last_set_value = 0;
13455 112544138 : rsp->last_set_mode = VOIDmode;
13456 112544138 : rsp->last_set_nonzero_bits = 0;
13457 112544138 : rsp->last_set_sign_bit_copies = 0;
13458 112544138 : rsp->last_death = 0;
13459 112544138 : rsp->truncated_to_mode = VOIDmode;
13460 : }
13461 :
13462 : /* Mark registers that are being referenced in this value. */
13463 112199613 : if (value)
13464 84942174 : update_table_tick (value);
13465 :
13466 : /* Now update the status of each register being set.
13467 : If someone is using this register in this block, set this register
13468 : to invalid since we will get confused between the two lives in this
13469 : basic block. This makes using this register always invalid. In cse, we
13470 : scan the table to invalidate all entries using this register, but this
13471 : is too much work for us. */
13472 :
13473 224743751 : for (i = regno; i < endregno; i++)
13474 : {
13475 112544138 : rsp = ®_stat[i];
13476 112544138 : rsp->last_set_label = label_tick;
13477 112544138 : if (!insn
13478 102461249 : || (value && rsp->last_set_table_tick >= label_tick_ebb_start))
13479 20371578 : rsp->last_set_invalid = true;
13480 : else
13481 92172560 : rsp->last_set_invalid = false;
13482 : }
13483 :
13484 : /* The value being assigned might refer to X (like in "x++;"). In that
13485 : case, we must replace it with (clobber (const_int 0)) to prevent
13486 : infinite loops. */
13487 112199613 : rsp = ®_stat[regno];
13488 112199613 : if (value && !get_last_value_validate (&value, insn, label_tick, false))
13489 : {
13490 10961584 : value = copy_rtx (value);
13491 10961584 : if (!get_last_value_validate (&value, insn, label_tick, true))
13492 0 : value = 0;
13493 : }
13494 :
13495 : /* For the main register being modified, update the value, the mode, the
13496 : nonzero bits, and the number of sign bit copies. */
13497 :
13498 112199613 : rsp->last_set_value = value;
13499 :
13500 112199613 : if (value)
13501 : {
13502 84942174 : machine_mode mode = GET_MODE (reg);
13503 84942174 : subst_low_luid = DF_INSN_LUID (insn);
13504 84942174 : rsp->last_set_mode = mode;
13505 84942174 : if (GET_MODE_CLASS (mode) == MODE_INT
13506 84942174 : && HWI_COMPUTABLE_MODE_P (mode))
13507 63763924 : mode = nonzero_bits_mode;
13508 84942174 : rsp->last_set_nonzero_bits = nonzero_bits (value, mode);
13509 84942174 : rsp->last_set_sign_bit_copies
13510 84942174 : = num_sign_bit_copies (value, GET_MODE (reg));
13511 : }
13512 112199613 : }
13513 :
13514 : /* Called via note_stores from record_dead_and_set_regs to handle one
13515 : SET or CLOBBER in an insn. DATA is the instruction in which the
13516 : set is occurring. */
13517 :
13518 : static void
13519 134267397 : record_dead_and_set_regs_1 (rtx dest, const_rtx setter, void *data)
13520 : {
13521 134267397 : rtx_insn *record_dead_insn = (rtx_insn *) data;
13522 :
13523 134267397 : if (GET_CODE (dest) == SUBREG)
13524 5 : dest = SUBREG_REG (dest);
13525 :
13526 134267397 : if (!record_dead_insn)
13527 : {
13528 4993110 : if (REG_P (dest))
13529 4993110 : record_value_for_reg (dest, NULL, NULL_RTX);
13530 4993110 : return;
13531 : }
13532 :
13533 129274287 : if (REG_P (dest))
13534 : {
13535 : /* If we are setting the whole register, we know its value. */
13536 102288991 : if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
13537 84788654 : record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
13538 : /* We can handle a SUBREG if it's the low part, but we must be
13539 : careful with paradoxical SUBREGs on RISC architectures because
13540 : we cannot strip e.g. an extension around a load and record the
13541 : naked load since the RTL middle-end considers that the upper bits
13542 : are defined according to LOAD_EXTEND_OP. */
13543 17500337 : else if (GET_CODE (setter) == SET
13544 587137 : && GET_CODE (SET_DEST (setter)) == SUBREG
13545 573644 : && SUBREG_REG (SET_DEST (setter)) == dest
13546 929636 : && known_le (GET_MODE_PRECISION (GET_MODE (dest)),
13547 : BITS_PER_WORD)
13548 17605058 : && subreg_lowpart_p (SET_DEST (setter)))
13549 : {
13550 104721 : if (WORD_REGISTER_OPERATIONS
13551 : && word_register_operation_p (SET_SRC (setter))
13552 : && paradoxical_subreg_p (SET_DEST (setter)))
13553 : record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
13554 104721 : else if (!partial_subreg_p (SET_DEST (setter)))
13555 92739 : record_value_for_reg (dest, record_dead_insn,
13556 92739 : gen_lowpart (GET_MODE (dest),
13557 92739 : SET_SRC (setter)));
13558 : else
13559 : {
13560 11982 : record_value_for_reg (dest, record_dead_insn,
13561 11982 : gen_lowpart (GET_MODE (dest),
13562 11982 : SET_SRC (setter)));
13563 :
13564 11982 : unsigned HOST_WIDE_INT mask;
13565 11982 : reg_stat_type *rsp = ®_stat[REGNO (dest)];
13566 11982 : mask = GET_MODE_MASK (GET_MODE (SET_DEST (setter)));
13567 11982 : rsp->last_set_nonzero_bits |= ~mask;
13568 11982 : rsp->last_set_sign_bit_copies = 1;
13569 : }
13570 : }
13571 : /* Otherwise show that we don't know the value. */
13572 : else
13573 17395616 : record_value_for_reg (dest, record_dead_insn, NULL_RTX);
13574 : }
13575 26985296 : else if (MEM_P (dest)
13576 : /* Ignore pushes, they clobber nothing. */
13577 26985296 : && ! push_operand (dest, GET_MODE (dest)))
13578 13879696 : mem_last_set = DF_INSN_LUID (record_dead_insn);
13579 : }
13580 :
13581 : /* Update the records of when each REG was most recently set or killed
13582 : for the things done by INSN. This is the last thing done in processing
13583 : INSN in the combiner loop.
13584 :
13585 : We update reg_stat[], in particular fields last_set, last_set_value,
13586 : last_set_mode, last_set_nonzero_bits, last_set_sign_bit_copies,
13587 : last_death, and also the similar information mem_last_set (which insn
13588 : most recently modified memory) and last_call_luid (which insn was the
13589 : most recent subroutine call). */
13590 :
13591 : static void
13592 170524010 : record_dead_and_set_regs (rtx_insn *insn)
13593 : {
13594 170524010 : rtx link;
13595 170524010 : unsigned int i;
13596 :
13597 304914656 : for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
13598 : {
13599 134390646 : if (REG_NOTE_KIND (link) == REG_DEAD
13600 76756599 : && REG_P (XEXP (link, 0)))
13601 : {
13602 76756599 : unsigned int regno = REGNO (XEXP (link, 0));
13603 76756599 : unsigned int endregno = END_REGNO (XEXP (link, 0));
13604 :
13605 153711097 : for (i = regno; i < endregno; i++)
13606 : {
13607 76954498 : reg_stat_type *rsp;
13608 :
13609 76954498 : rsp = ®_stat[i];
13610 76954498 : rsp->last_death = insn;
13611 : }
13612 : }
13613 57634047 : else if (REG_NOTE_KIND (link) == REG_INC)
13614 0 : record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
13615 : }
13616 :
13617 170524010 : if (CALL_P (insn))
13618 : {
13619 9242557 : HARD_REG_SET callee_clobbers
13620 9242557 : = insn_callee_abi (insn).full_and_partial_reg_clobbers ();
13621 9242557 : hard_reg_set_iterator hrsi;
13622 763029098 : EXECUTE_IF_SET_IN_HARD_REG_SET (callee_clobbers, 0, i, hrsi)
13623 : {
13624 753786541 : reg_stat_type *rsp;
13625 :
13626 : /* ??? We could try to preserve some information from the last
13627 : set of register I if the call doesn't actually clobber
13628 : (reg:last_set_mode I), which might be true for ABIs with
13629 : partial clobbers. However, it would be difficult to
13630 : update last_set_nonzero_bits and last_sign_bit_copies
13631 : to account for the part of I that actually was clobbered.
13632 : It wouldn't help much anyway, since we rarely see this
13633 : situation before RA. */
13634 753786541 : rsp = ®_stat[i];
13635 753786541 : rsp->last_set_invalid = true;
13636 753786541 : rsp->last_set = insn;
13637 753786541 : rsp->last_set_value = 0;
13638 753786541 : rsp->last_set_mode = VOIDmode;
13639 753786541 : rsp->last_set_nonzero_bits = 0;
13640 753786541 : rsp->last_set_sign_bit_copies = 0;
13641 753786541 : rsp->last_death = 0;
13642 753786541 : rsp->truncated_to_mode = VOIDmode;
13643 : }
13644 :
13645 9242557 : last_call_luid = mem_last_set = DF_INSN_LUID (insn);
13646 :
13647 : /* We can't combine into a call pattern. Remember, though, that
13648 : the return value register is set at this LUID. We could
13649 : still replace a register with the return value from the
13650 : wrong subroutine call! */
13651 9242557 : note_stores (insn, record_dead_and_set_regs_1, NULL_RTX);
13652 : }
13653 : else
13654 161281453 : note_stores (insn, record_dead_and_set_regs_1, insn);
13655 170524010 : }
13656 :
13657 : /* If a SUBREG has the promoted bit set, it is in fact a property of the
13658 : register present in the SUBREG, so for each such SUBREG go back and
13659 : adjust nonzero and sign bit information of the registers that are
13660 : known to have some zero/sign bits set.
13661 :
13662 : This is needed because when combine blows the SUBREGs away, the
13663 : information on zero/sign bits is lost and further combines can be
13664 : missed because of that. */
13665 :
13666 : static void
13667 5971 : record_promoted_value (rtx_insn *insn, rtx subreg)
13668 : {
13669 5971 : struct insn_link *links;
13670 5971 : rtx set;
13671 5971 : unsigned int regno = REGNO (SUBREG_REG (subreg));
13672 5971 : machine_mode mode = GET_MODE (subreg);
13673 :
13674 5971 : if (!HWI_COMPUTABLE_MODE_P (mode))
13675 : return;
13676 :
13677 6640 : for (links = LOG_LINKS (insn); links;)
13678 : {
13679 5837 : reg_stat_type *rsp;
13680 :
13681 5837 : insn = links->insn;
13682 5837 : set = single_set (insn);
13683 :
13684 5837 : if (! set || !REG_P (SET_DEST (set))
13685 5833 : || REGNO (SET_DEST (set)) != regno
13686 11141 : || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg)))
13687 : {
13688 533 : links = links->next;
13689 533 : continue;
13690 : }
13691 :
13692 5304 : rsp = ®_stat[regno];
13693 5304 : if (rsp->last_set == insn)
13694 : {
13695 5304 : if (SUBREG_PROMOTED_UNSIGNED_P (subreg))
13696 5304 : rsp->last_set_nonzero_bits &= GET_MODE_MASK (mode);
13697 : }
13698 :
13699 5304 : if (REG_P (SET_SRC (set)))
13700 : {
13701 136 : regno = REGNO (SET_SRC (set));
13702 136 : links = LOG_LINKS (insn);
13703 : }
13704 : else
13705 : break;
13706 : }
13707 : }
13708 :
13709 : /* Check if X, a register, is known to contain a value already
13710 : truncated to MODE. In this case we can use a subreg to refer to
13711 : the truncated value even though in the generic case we would need
13712 : an explicit truncation. */
13713 :
13714 : static bool
13715 0 : reg_truncated_to_mode (machine_mode mode, const_rtx x)
13716 : {
13717 0 : reg_stat_type *rsp = ®_stat[REGNO (x)];
13718 0 : machine_mode truncated = rsp->truncated_to_mode;
13719 :
13720 0 : if (truncated == 0
13721 0 : || rsp->truncation_label < label_tick_ebb_start)
13722 : return false;
13723 0 : if (!partial_subreg_p (mode, truncated))
13724 : return true;
13725 0 : if (TRULY_NOOP_TRUNCATION_MODES_P (mode, truncated))
13726 : return true;
13727 : return false;
13728 : }
13729 :
13730 : /* If X is a hard reg or a subreg record the mode that the register is
13731 : accessed in. For non-TARGET_TRULY_NOOP_TRUNCATION targets we might be
13732 : able to turn a truncate into a subreg using this information. Return true
13733 : if traversing X is complete. */
13734 :
13735 : static bool
13736 196687706 : record_truncated_value (rtx x)
13737 : {
13738 196687706 : machine_mode truncated_mode;
13739 196687706 : reg_stat_type *rsp;
13740 :
13741 196687706 : if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x)))
13742 : {
13743 1762947 : machine_mode original_mode = GET_MODE (SUBREG_REG (x));
13744 1762947 : truncated_mode = GET_MODE (x);
13745 :
13746 1762947 : if (!partial_subreg_p (truncated_mode, original_mode))
13747 : return true;
13748 :
13749 1051995 : truncated_mode = GET_MODE (x);
13750 1051995 : if (TRULY_NOOP_TRUNCATION_MODES_P (truncated_mode, original_mode))
13751 : return true;
13752 :
13753 0 : x = SUBREG_REG (x);
13754 0 : }
13755 : /* ??? For hard-regs we now record everything. We might be able to
13756 : optimize this using last_set_mode. */
13757 194924759 : else if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
13758 20674344 : truncated_mode = GET_MODE (x);
13759 : else
13760 : return false;
13761 :
13762 20674344 : rsp = ®_stat[REGNO (x)];
13763 20674344 : if (rsp->truncated_to_mode == 0
13764 9582594 : || rsp->truncation_label < label_tick_ebb_start
13765 29034411 : || partial_subreg_p (truncated_mode, rsp->truncated_to_mode))
13766 : {
13767 12314896 : rsp->truncated_to_mode = truncated_mode;
13768 12314896 : rsp->truncation_label = label_tick;
13769 : }
13770 :
13771 : return true;
13772 : }
13773 :
13774 : /* Callback for note_uses. Find hardregs and subregs of pseudos and
13775 : the modes they are used in. This can help turning TRUNCATEs into
13776 : SUBREGs. */
13777 :
13778 : static void
13779 75107555 : record_truncated_values (rtx *loc, void *data ATTRIBUTE_UNUSED)
13780 : {
13781 75107555 : subrtx_var_iterator::array_type array;
13782 271795261 : FOR_EACH_SUBRTX_VAR (iter, array, *loc, NONCONST)
13783 196687706 : if (record_truncated_value (*iter))
13784 22437291 : iter.skip_subrtxes ();
13785 75107555 : }
13786 :
13787 : /* Scan X for promoted SUBREGs. For each one found,
13788 : note what it implies to the registers used in it. */
13789 :
13790 : static void
13791 357513709 : check_promoted_subreg (rtx_insn *insn, rtx x)
13792 : {
13793 357513709 : if (GET_CODE (x) == SUBREG
13794 2129581 : && SUBREG_PROMOTED_VAR_P (x)
13795 357519680 : && REG_P (SUBREG_REG (x)))
13796 5971 : record_promoted_value (insn, x);
13797 : else
13798 : {
13799 357507738 : const char *format = GET_RTX_FORMAT (GET_CODE (x));
13800 357507738 : int i, j;
13801 :
13802 860574057 : for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
13803 503066319 : switch (format[i])
13804 : {
13805 267699050 : case 'e':
13806 267699050 : check_promoted_subreg (insn, XEXP (x, i));
13807 267699050 : break;
13808 11622008 : case 'V':
13809 11622008 : case 'E':
13810 11622008 : if (XVEC (x, i) != 0)
13811 35937648 : for (j = 0; j < XVECLEN (x, i); j++)
13812 24315640 : check_promoted_subreg (insn, XVECEXP (x, i, j));
13813 : break;
13814 : }
13815 : }
13816 357513709 : }
13817 :
13818 : /* Verify that all the registers and memory references mentioned in *LOC are
13819 : still valid. *LOC was part of a value set in INSN when label_tick was
13820 : equal to TICK. Return false if some are not. If REPLACE is true, replace
13821 : the invalid references with (clobber (const_int 0)) and return true. This
13822 : replacement is useful because we often can get useful information about
13823 : the form of a value (e.g., if it was produced by a shift that always
13824 : produces -1 or 0) even though we don't know exactly what registers it
13825 : was produced from. */
13826 :
13827 : static bool
13828 485737943 : get_last_value_validate (rtx *loc, rtx_insn *insn, int tick, bool replace)
13829 : {
13830 485737943 : rtx x = *loc;
13831 485737943 : const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
13832 485737943 : int len = GET_RTX_LENGTH (GET_CODE (x));
13833 485737943 : int i, j;
13834 :
13835 485737943 : if (REG_P (x))
13836 : {
13837 154764857 : unsigned int regno = REGNO (x);
13838 154764857 : unsigned int endregno = END_REGNO (x);
13839 154764857 : unsigned int j;
13840 :
13841 285896407 : for (j = regno; j < endregno; j++)
13842 : {
13843 154790401 : reg_stat_type *rsp = ®_stat[j];
13844 154790401 : if (rsp->last_set_invalid
13845 : /* If this is a pseudo-register that was only set once and not
13846 : live at the beginning of the function, it is always valid. */
13847 256041281 : || (! (regno >= FIRST_PSEUDO_REGISTER
13848 116579525 : && regno < reg_n_sets_max
13849 116559056 : && REG_N_SETS (regno) == 1
13850 202501760 : && (!REGNO_REG_SET_P
13851 : (DF_LR_IN (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb),
13852 : regno)))
13853 30188383 : && rsp->last_set_label > tick))
13854 : {
13855 23658851 : if (replace)
13856 12221668 : *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
13857 23658851 : return replace;
13858 : }
13859 : }
13860 :
13861 : return true;
13862 : }
13863 : /* If this is a memory reference, make sure that there were no stores after
13864 : it that might have clobbered the value. We don't have alias info, so we
13865 : assume any store invalidates it. Moreover, we only have local UIDs, so
13866 : we also assume that there were stores in the intervening basic blocks. */
13867 34125158 : else if (MEM_P (x) && !MEM_READONLY_P (x)
13868 363017632 : && (tick != label_tick || DF_INSN_LUID (insn) <= mem_last_set))
13869 : {
13870 7541599 : if (replace)
13871 3773337 : *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
13872 7541599 : return replace;
13873 : }
13874 :
13875 807814600 : for (i = 0; i < len; i++)
13876 : {
13877 496071987 : if (fmt[i] == 'e')
13878 : {
13879 : /* Check for identical subexpressions. If x contains
13880 : identical subexpression we only have to traverse one of
13881 : them. */
13882 307909366 : if (i == 1 && ARITHMETIC_P (x))
13883 : {
13884 : /* Note that at this point x0 has already been checked
13885 : and found valid. */
13886 114085066 : rtx x0 = XEXP (x, 0);
13887 114085066 : rtx x1 = XEXP (x, 1);
13888 :
13889 : /* If x0 and x1 are identical then x is also valid. */
13890 114085066 : if (x0 == x1)
13891 : return true;
13892 :
13893 : /* If x1 is identical to a subexpression of x0 then
13894 : while checking x0, x1 has already been checked. Thus
13895 : it is valid and so as x. */
13896 113705000 : if (ARITHMETIC_P (x0)
13897 32634411 : && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
13898 : return true;
13899 :
13900 : /* If x0 is identical to a subexpression of x1 then x is
13901 : valid iff the rest of x1 is valid. */
13902 111690787 : if (ARITHMETIC_P (x1)
13903 1249759 : && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
13904 475 : return
13905 489 : get_last_value_validate (&XEXP (x1,
13906 : x0 == XEXP (x1, 0) ? 1 : 0),
13907 475 : insn, tick, replace);
13908 : }
13909 :
13910 305514612 : if (!get_last_value_validate (&XEXP (x, i), insn, tick, replace))
13911 : return false;
13912 : }
13913 188162621 : else if (fmt[i] == 'E')
13914 29311127 : for (j = 0; j < XVECLEN (x, i); j++)
13915 23184188 : if (!get_last_value_validate (&XVECEXP (x, i, j),
13916 : insn, tick, replace))
13917 : return false;
13918 : }
13919 :
13920 : /* If we haven't found a reason for it to be invalid, it is valid. */
13921 : return true;
13922 : }
13923 :
13924 : /* Get the last value assigned to X, if known. Some registers
13925 : in the value may be replaced with (clobber (const_int 0)) if their value
13926 : is known longer known reliably. */
13927 :
13928 : static rtx
13929 218935854 : get_last_value (const_rtx x)
13930 : {
13931 218935854 : unsigned int regno;
13932 218935854 : rtx value;
13933 218935854 : reg_stat_type *rsp;
13934 :
13935 : /* If this is a non-paradoxical SUBREG, get the value of its operand and
13936 : then convert it to the desired mode. If this is a paradoxical SUBREG,
13937 : we cannot predict what values the "extra" bits might have. */
13938 218935854 : if (GET_CODE (x) == SUBREG
13939 11871726 : && subreg_lowpart_p (x)
13940 11418581 : && !paradoxical_subreg_p (x)
13941 225621392 : && (value = get_last_value (SUBREG_REG (x))) != 0)
13942 3428439 : return gen_lowpart (GET_MODE (x), value);
13943 :
13944 215507415 : if (!REG_P (x))
13945 : return 0;
13946 :
13947 187913039 : regno = REGNO (x);
13948 187913039 : rsp = ®_stat[regno];
13949 187913039 : value = rsp->last_set_value;
13950 :
13951 : /* If we don't have a value, or if it isn't for this basic block and
13952 : it's either a hard register, set more than once, or it's a live
13953 : at the beginning of the function, return 0.
13954 :
13955 : Because if it's not live at the beginning of the function then the reg
13956 : is always set before being used (is never used without being set).
13957 : And, if it's set only once, and it's always set before use, then all
13958 : uses must have the same last value, even if it's not from this basic
13959 : block. */
13960 :
13961 187913039 : if (value == 0
13962 187913039 : || (rsp->last_set_label < label_tick_ebb_start
13963 76107399 : && (regno < FIRST_PSEUDO_REGISTER
13964 75238635 : || regno >= reg_n_sets_max
13965 75238635 : || REG_N_SETS (regno) != 1
13966 16555778 : || REGNO_REG_SET_P
13967 : (DF_LR_IN (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb), regno))))
13968 111350390 : return 0;
13969 :
13970 : /* If the value was set in a later insn than the ones we are processing,
13971 : we can't use it even if the register was only set once. */
13972 76562649 : if (rsp->last_set_label == label_tick
13973 76562649 : && DF_INSN_LUID (rsp->last_set) >= subst_low_luid)
13974 : return 0;
13975 :
13976 : /* If fewer bits were set than what we are asked for now, we cannot use
13977 : the value. */
13978 56892471 : if (maybe_lt (GET_MODE_PRECISION (rsp->last_set_mode),
13979 56892471 : GET_MODE_PRECISION (GET_MODE (x))))
13980 : return 0;
13981 :
13982 : /* If the value has all its registers valid, return it. */
13983 56891049 : if (get_last_value_validate (&value, rsp->last_set,
13984 : rsp->last_set_label, false))
13985 52647188 : return value;
13986 :
13987 : /* Otherwise, make a copy and replace any invalid register with
13988 : (clobber (const_int 0)). If that fails for some reason, return 0. */
13989 :
13990 4243861 : value = copy_rtx (value);
13991 4243861 : if (get_last_value_validate (&value, rsp->last_set,
13992 : rsp->last_set_label, true))
13993 4243861 : return value;
13994 :
13995 : return 0;
13996 : }
13997 :
13998 : /* Define three variables used for communication between the following
13999 : routines. */
14000 :
14001 : static unsigned int reg_dead_regno, reg_dead_endregno;
14002 : static int reg_dead_flag;
14003 : rtx reg_dead_reg;
14004 :
14005 : /* Function called via note_stores from reg_dead_at_p.
14006 :
14007 : If DEST is within [reg_dead_regno, reg_dead_endregno), set
14008 : reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
14009 :
14010 : static void
14011 604128 : reg_dead_at_p_1 (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
14012 : {
14013 604128 : unsigned int regno, endregno;
14014 :
14015 604128 : if (!REG_P (dest))
14016 : return;
14017 :
14018 551754 : regno = REGNO (dest);
14019 551754 : endregno = END_REGNO (dest);
14020 551754 : if (reg_dead_endregno > regno && reg_dead_regno < endregno)
14021 273776 : reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
14022 : }
14023 :
14024 : /* Return true if REG is known to be dead at INSN.
14025 :
14026 : We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
14027 : referencing REG, it is dead. If we hit a SET referencing REG, it is
14028 : live. Otherwise, see if it is live or dead at the start of the basic
14029 : block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
14030 : must be assumed to be always live. */
14031 :
14032 : static bool
14033 1508930 : reg_dead_at_p (rtx reg, rtx_insn *insn)
14034 : {
14035 1508930 : basic_block block;
14036 1508930 : unsigned int i;
14037 :
14038 : /* Set variables for reg_dead_at_p_1. */
14039 1508930 : reg_dead_regno = REGNO (reg);
14040 1508930 : reg_dead_endregno = END_REGNO (reg);
14041 1508930 : reg_dead_reg = reg;
14042 :
14043 1508930 : reg_dead_flag = 0;
14044 :
14045 : /* Check that reg isn't mentioned in NEWPAT_USED_REGS. For fixed registers
14046 : we allow the machine description to decide whether use-and-clobber
14047 : patterns are OK. */
14048 1508930 : if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
14049 : {
14050 3017860 : for (i = reg_dead_regno; i < reg_dead_endregno; i++)
14051 1508930 : if (!fixed_regs[i] && TEST_HARD_REG_BIT (newpat_used_regs, i))
14052 : return false;
14053 : }
14054 :
14055 : /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, or
14056 : beginning of basic block. */
14057 1508930 : block = BLOCK_FOR_INSN (insn);
14058 727395 : for (;;)
14059 : {
14060 2236325 : if (INSN_P (insn))
14061 : {
14062 2084082 : if (find_regno_note (insn, REG_UNUSED, reg_dead_regno))
14063 : return true;
14064 :
14065 768583 : note_stores (insn, reg_dead_at_p_1, NULL);
14066 768583 : if (reg_dead_flag)
14067 136888 : return reg_dead_flag == 1 ? 1 : 0;
14068 :
14069 631695 : if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
14070 : return true;
14071 : }
14072 :
14073 756712 : if (insn == BB_HEAD (block))
14074 : break;
14075 :
14076 727395 : insn = PREV_INSN (insn);
14077 : }
14078 :
14079 : /* Look at live-in sets for the basic block that we were in. */
14080 58634 : for (i = reg_dead_regno; i < reg_dead_endregno; i++)
14081 29317 : if (REGNO_REG_SET_P (df_get_live_in (block), i))
14082 : return false;
14083 :
14084 : return true;
14085 : }
14086 :
14087 : /* Note hard registers in X that are used. */
14088 :
14089 : static void
14090 283113215 : mark_used_regs_combine (rtx x)
14091 : {
14092 327348341 : RTX_CODE code = GET_CODE (x);
14093 327348341 : unsigned int regno;
14094 327348341 : int i;
14095 :
14096 327348341 : switch (code)
14097 : {
14098 : case LABEL_REF:
14099 : case SYMBOL_REF:
14100 : case CONST:
14101 : CASE_CONST_ANY:
14102 : case PC:
14103 : case ADDR_VEC:
14104 : case ADDR_DIFF_VEC:
14105 : case ASM_INPUT:
14106 : return;
14107 :
14108 7097979 : case CLOBBER:
14109 : /* If we are clobbering a MEM, mark any hard registers inside the
14110 : address as used. */
14111 7097979 : if (MEM_P (XEXP (x, 0)))
14112 5574 : mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
14113 : return;
14114 :
14115 74855005 : case REG:
14116 74855005 : regno = REGNO (x);
14117 : /* A hard reg in a wide mode may really be multiple registers.
14118 : If so, mark all of them just like the first. */
14119 74855005 : if (regno < FIRST_PSEUDO_REGISTER)
14120 : {
14121 : /* None of this applies to the stack, frame or arg pointers. */
14122 8852529 : if (regno == STACK_POINTER_REGNUM
14123 8852529 : || (!HARD_FRAME_POINTER_IS_FRAME_POINTER
14124 : && regno == HARD_FRAME_POINTER_REGNUM)
14125 7918569 : || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
14126 1095619 : && regno == ARG_POINTER_REGNUM && fixed_regs[regno])
14127 6822950 : || regno == FRAME_POINTER_REGNUM)
14128 : return;
14129 :
14130 1593168 : add_to_hard_reg_set (&newpat_used_regs, GET_MODE (x), regno);
14131 : }
14132 : return;
14133 :
14134 44229552 : case SET:
14135 44229552 : {
14136 : /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
14137 : the address. */
14138 44229552 : rtx testreg = SET_DEST (x);
14139 :
14140 44229552 : while (GET_CODE (testreg) == SUBREG
14141 44245071 : || GET_CODE (testreg) == ZERO_EXTRACT
14142 88803268 : || GET_CODE (testreg) == STRICT_LOW_PART)
14143 336573 : testreg = XEXP (testreg, 0);
14144 :
14145 44229552 : if (MEM_P (testreg))
14146 4852037 : mark_used_regs_combine (XEXP (testreg, 0));
14147 :
14148 44229552 : mark_used_regs_combine (SET_SRC (x));
14149 : }
14150 44229552 : return;
14151 :
14152 130736163 : default:
14153 130736163 : break;
14154 : }
14155 :
14156 : /* Recursively scan the operands of this expression. */
14157 :
14158 130736163 : {
14159 130736163 : const char *fmt = GET_RTX_FORMAT (code);
14160 :
14161 379212757 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
14162 : {
14163 248476594 : if (fmt[i] == 'e')
14164 201496326 : mark_used_regs_combine (XEXP (x, i));
14165 46980268 : else if (fmt[i] == 'E')
14166 : {
14167 : int j;
14168 :
14169 64133988 : for (j = 0; j < XVECLEN (x, i); j++)
14170 44415095 : mark_used_regs_combine (XVECEXP (x, i, j));
14171 : }
14172 : }
14173 : }
14174 : }
14175 :
14176 : /* Remove register number REGNO from the dead registers list of INSN.
14177 :
14178 : Return the note used to record the death, if there was one. */
14179 :
14180 : rtx
14181 3079706 : remove_death (unsigned int regno, rtx_insn *insn)
14182 : {
14183 3079706 : rtx note = find_regno_note (insn, REG_DEAD, regno);
14184 :
14185 3079706 : if (note)
14186 468421 : remove_note (insn, note);
14187 :
14188 3079706 : return note;
14189 : }
14190 :
14191 : /* For each register (hardware or pseudo) used within expression X, if its
14192 : death is in an instruction with luid between FROM_LUID (inclusive) and
14193 : TO_INSN (exclusive), put a REG_DEAD note for that register in the
14194 : list headed by PNOTES.
14195 :
14196 : That said, don't move registers killed by maybe_kill_insn.
14197 :
14198 : This is done when X is being merged by combination into TO_INSN. These
14199 : notes will then be distributed as needed. */
14200 :
14201 : static void
14202 24270304 : move_deaths (rtx x, rtx maybe_kill_insn, int from_luid, rtx_insn *to_insn,
14203 : rtx *pnotes)
14204 : {
14205 24797737 : const char *fmt;
14206 24797737 : int len, i;
14207 24797737 : enum rtx_code code = GET_CODE (x);
14208 :
14209 24797737 : if (code == REG)
14210 : {
14211 6116744 : unsigned int regno = REGNO (x);
14212 6116744 : rtx_insn *where_dead = reg_stat[regno].last_death;
14213 :
14214 : /* If we do not know where the register died, it may still die between
14215 : FROM_LUID and TO_INSN. If so, find it. This is PR83304. */
14216 6116744 : if (!where_dead || DF_INSN_LUID (where_dead) >= DF_INSN_LUID (to_insn))
14217 : {
14218 3309041 : rtx_insn *insn = prev_real_nondebug_insn (to_insn);
14219 3309041 : while (insn
14220 4911036 : && BLOCK_FOR_INSN (insn) == BLOCK_FOR_INSN (to_insn)
14221 9084356 : && DF_INSN_LUID (insn) >= from_luid)
14222 : {
14223 2218847 : if (dead_or_set_regno_p (insn, regno))
14224 : {
14225 585228 : if (find_regno_note (insn, REG_DEAD, regno))
14226 6116744 : where_dead = insn;
14227 : break;
14228 : }
14229 :
14230 1633619 : insn = prev_real_nondebug_insn (insn);
14231 : }
14232 : }
14233 :
14234 : /* Don't move the register if it gets killed in between from and to. */
14235 139034 : if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
14236 6158065 : && ! reg_referenced_p (x, maybe_kill_insn))
14237 : return;
14238 :
14239 6075423 : if (where_dead
14240 3169937 : && BLOCK_FOR_INSN (where_dead) == BLOCK_FOR_INSN (to_insn)
14241 2985660 : && DF_INSN_LUID (where_dead) >= from_luid
14242 9060862 : && DF_INSN_LUID (where_dead) < DF_INSN_LUID (to_insn))
14243 : {
14244 2702094 : rtx note = remove_death (regno, where_dead);
14245 :
14246 : /* It is possible for the call above to return 0. This can occur
14247 : when last_death points to I2 or I1 that we combined with.
14248 : In that case make a new note.
14249 :
14250 : We must also check for the case where X is a hard register
14251 : and NOTE is a death note for a range of hard registers
14252 : including X. In that case, we must put REG_DEAD notes for
14253 : the remaining registers in place of NOTE. */
14254 :
14255 2702094 : if (note != 0 && regno < FIRST_PSEUDO_REGISTER
14256 2702094 : && partial_subreg_p (GET_MODE (x), GET_MODE (XEXP (note, 0))))
14257 : {
14258 0 : unsigned int deadregno = REGNO (XEXP (note, 0));
14259 0 : unsigned int deadend = END_REGNO (XEXP (note, 0));
14260 0 : unsigned int ourend = END_REGNO (x);
14261 0 : unsigned int i;
14262 :
14263 0 : for (i = deadregno; i < deadend; i++)
14264 0 : if (i < regno || i >= ourend)
14265 0 : add_reg_note (where_dead, REG_DEAD, regno_reg_rtx[i]);
14266 : }
14267 :
14268 : /* If we didn't find any note, or if we found a REG_DEAD note that
14269 : covers only part of the given reg, and we have a multi-reg hard
14270 : register, then to be safe we must check for REG_DEAD notes
14271 : for each register other than the first. They could have
14272 : their own REG_DEAD notes lying around. */
14273 2702094 : else if ((note == 0
14274 : || (note != 0
14275 90847 : && partial_subreg_p (GET_MODE (XEXP (note, 0)),
14276 90847 : GET_MODE (x))))
14277 2611247 : && regno < FIRST_PSEUDO_REGISTER
14278 3001044 : && REG_NREGS (x) > 1)
14279 : {
14280 0 : unsigned int ourend = END_REGNO (x);
14281 0 : unsigned int i, offset;
14282 0 : rtx oldnotes = 0;
14283 :
14284 0 : if (note)
14285 0 : offset = hard_regno_nregs (regno, GET_MODE (XEXP (note, 0)));
14286 : else
14287 : offset = 1;
14288 :
14289 0 : for (i = regno + offset; i < ourend; i++)
14290 0 : move_deaths (regno_reg_rtx[i],
14291 : maybe_kill_insn, from_luid, to_insn, &oldnotes);
14292 : }
14293 :
14294 2702094 : if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
14295 : {
14296 90823 : XEXP (note, 1) = *pnotes;
14297 90823 : *pnotes = note;
14298 : }
14299 : else
14300 2611271 : *pnotes = alloc_reg_note (REG_DEAD, x, *pnotes);
14301 : }
14302 :
14303 6075423 : return;
14304 : }
14305 :
14306 18680993 : else if (GET_CODE (x) == SET)
14307 : {
14308 4165392 : rtx dest = SET_DEST (x);
14309 :
14310 4165392 : move_deaths (SET_SRC (x), maybe_kill_insn, from_luid, to_insn, pnotes);
14311 :
14312 : /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
14313 : that accesses one word of a multi-word item, some
14314 : piece of everything register in the expression is used by
14315 : this insn, so remove any old death. */
14316 : /* ??? So why do we test for equality of the sizes? */
14317 :
14318 4165392 : if (GET_CODE (dest) == ZERO_EXTRACT
14319 4164959 : || GET_CODE (dest) == STRICT_LOW_PART
14320 8328616 : || (GET_CODE (dest) == SUBREG
14321 74676 : && !read_modify_subreg_p (dest)))
14322 : {
14323 61606 : move_deaths (dest, maybe_kill_insn, from_luid, to_insn, pnotes);
14324 61606 : return;
14325 : }
14326 :
14327 : /* If this is some other SUBREG, we know it replaces the entire
14328 : value, so use that as the destination. */
14329 4103786 : if (GET_CODE (dest) == SUBREG)
14330 15238 : dest = SUBREG_REG (dest);
14331 :
14332 : /* If this is a MEM, adjust deaths of anything used in the address.
14333 : For a REG (the only other possibility), the entire value is
14334 : being replaced so the old value is not used in this insn. */
14335 :
14336 4103786 : if (MEM_P (dest))
14337 465827 : move_deaths (XEXP (dest, 0), maybe_kill_insn, from_luid,
14338 : to_insn, pnotes);
14339 : return;
14340 : }
14341 :
14342 14515601 : else if (GET_CODE (x) == CLOBBER)
14343 : return;
14344 :
14345 13921080 : len = GET_RTX_LENGTH (code);
14346 13921080 : fmt = GET_RTX_FORMAT (code);
14347 :
14348 36292316 : for (i = 0; i < len; i++)
14349 : {
14350 22371236 : if (fmt[i] == 'E')
14351 : {
14352 969063 : int j;
14353 3447375 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
14354 2478312 : move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_luid,
14355 : to_insn, pnotes);
14356 : }
14357 21402173 : else if (fmt[i] == 'e')
14358 13528206 : move_deaths (XEXP (x, i), maybe_kill_insn, from_luid, to_insn, pnotes);
14359 : }
14360 : }
14361 :
14362 : /* Return true if X is the target of a bit-field assignment in BODY, the
14363 : pattern of an insn. X must be a REG. */
14364 :
14365 : static bool
14366 4673103 : reg_bitfield_target_p (rtx x, rtx body)
14367 : {
14368 4673103 : int i;
14369 :
14370 4673103 : if (GET_CODE (body) == SET)
14371 : {
14372 3435307 : rtx dest = SET_DEST (body);
14373 3435307 : rtx target;
14374 3435307 : unsigned int regno, tregno, endregno, endtregno;
14375 :
14376 3435307 : if (GET_CODE (dest) == ZERO_EXTRACT)
14377 434 : target = XEXP (dest, 0);
14378 3434873 : else if (GET_CODE (dest) == STRICT_LOW_PART)
14379 1984 : target = SUBREG_REG (XEXP (dest, 0));
14380 : else
14381 : return false;
14382 :
14383 2418 : if (GET_CODE (target) == SUBREG)
14384 227 : target = SUBREG_REG (target);
14385 :
14386 2418 : if (!REG_P (target))
14387 : return false;
14388 :
14389 2339 : tregno = REGNO (target), regno = REGNO (x);
14390 2339 : if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
14391 2329 : return target == x;
14392 :
14393 10 : endtregno = end_hard_regno (GET_MODE (target), tregno);
14394 10 : endregno = end_hard_regno (GET_MODE (x), regno);
14395 :
14396 10 : return endregno > tregno && regno < endtregno;
14397 : }
14398 :
14399 1237796 : else if (GET_CODE (body) == PARALLEL)
14400 1853141 : for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
14401 1246092 : if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
14402 : return true;
14403 :
14404 : return false;
14405 : }
14406 :
14407 : /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
14408 : as appropriate. I3 and I2 are the insns resulting from the combination
14409 : insns including FROM (I2 may be zero).
14410 :
14411 : ELIM_I2 and ELIM_I1 are either zero or registers that we know will
14412 : not need REG_DEAD notes because they are being substituted for. This
14413 : saves searching in the most common cases.
14414 :
14415 : Each note in the list is either ignored or placed on some insns, depending
14416 : on the type of note. */
14417 :
14418 : static void
14419 9779127 : distribute_notes (rtx notes, rtx_insn *from_insn, rtx_insn *i3, rtx_insn *i2,
14420 : rtx elim_i2, rtx elim_i1, rtx elim_i0)
14421 : {
14422 9779127 : rtx note, next_note;
14423 9779127 : rtx tem_note;
14424 9779127 : rtx_insn *tem_insn;
14425 :
14426 22732799 : for (note = notes; note; note = next_note)
14427 : {
14428 12953672 : rtx_insn *place = 0, *place2 = 0;
14429 :
14430 12953672 : next_note = XEXP (note, 1);
14431 12953672 : switch (REG_NOTE_KIND (note))
14432 : {
14433 : case REG_BR_PROB:
14434 : case REG_BR_PRED:
14435 : /* Doesn't matter much where we put this, as long as it's somewhere.
14436 : It is preferable to keep these notes on branches, which is most
14437 : likely to be i3. */
14438 : place = i3;
14439 : break;
14440 :
14441 0 : case REG_NON_LOCAL_GOTO:
14442 0 : if (JUMP_P (i3))
14443 : place = i3;
14444 : else
14445 : {
14446 0 : gcc_assert (i2 && JUMP_P (i2));
14447 : place = i2;
14448 : }
14449 : break;
14450 :
14451 20826 : case REG_EH_REGION:
14452 20826 : {
14453 : /* The landing pad handling needs to be kept in sync with the
14454 : prerequisite checking in try_combine. */
14455 20826 : int lp_nr = INTVAL (XEXP (note, 0));
14456 : /* A REG_EH_REGION note transfering control can only ever come
14457 : from i3. */
14458 20826 : if (lp_nr > 0)
14459 11573 : gcc_assert (from_insn == i3);
14460 : /* We are making sure there is a single effective REG_EH_REGION
14461 : note and it's valid to put it on i3. */
14462 20826 : if (!insn_could_throw_p (from_insn)
14463 20826 : && !(lp_nr == INT_MIN && can_nonlocal_goto (from_insn)))
14464 : /* Throw away stray notes on insns that can never throw or
14465 : make a nonlocal goto. */
14466 : ;
14467 : else
14468 : {
14469 20745 : if (CALL_P (i3))
14470 : place = i3;
14471 : else
14472 : {
14473 2087 : gcc_assert (cfun->can_throw_non_call_exceptions);
14474 : /* If i3 can still trap preserve the note, otherwise we've
14475 : combined things such that we can now prove that the
14476 : instructions can't trap. Drop the note in this case. */
14477 2087 : if (may_trap_p (i3))
14478 : place = i3;
14479 : }
14480 : }
14481 : break;
14482 : }
14483 :
14484 126166 : case REG_ARGS_SIZE:
14485 : /* ??? How to distribute between i3-i1. Assume i3 contains the
14486 : entire adjustment. Assert i3 contains at least some adjust. */
14487 126166 : if (!noop_move_p (i3))
14488 : {
14489 126165 : poly_int64 old_size, args_size = get_args_size (note);
14490 : /* fixup_args_size_notes looks at REG_NORETURN note,
14491 : so ensure the note is placed there first. */
14492 126165 : if (CALL_P (i3))
14493 : {
14494 : rtx *np;
14495 1641 : for (np = &next_note; *np; np = &XEXP (*np, 1))
14496 20 : if (REG_NOTE_KIND (*np) == REG_NORETURN)
14497 : {
14498 9 : rtx n = *np;
14499 9 : *np = XEXP (n, 1);
14500 9 : XEXP (n, 1) = REG_NOTES (i3);
14501 9 : REG_NOTES (i3) = n;
14502 9 : break;
14503 : }
14504 : }
14505 126165 : old_size = fixup_args_size_notes (PREV_INSN (i3), i3, args_size);
14506 : /* emit_call_1 adds for !ACCUMULATE_OUTGOING_ARGS
14507 : REG_ARGS_SIZE note to all noreturn calls, allow that here. */
14508 126165 : gcc_assert (maybe_ne (old_size, args_size)
14509 : || (CALL_P (i3)
14510 : && !ACCUMULATE_OUTGOING_ARGS
14511 : && find_reg_note (i3, REG_NORETURN, NULL_RTX)));
14512 : }
14513 : break;
14514 :
14515 79172 : case REG_NORETURN:
14516 79172 : case REG_SETJMP:
14517 79172 : case REG_TM:
14518 79172 : case REG_CALL_DECL:
14519 79172 : case REG_UNTYPED_CALL:
14520 79172 : case REG_CALL_NOCF_CHECK:
14521 : /* These notes must remain with the call. It should not be
14522 : possible for both I2 and I3 to be a call. */
14523 79172 : if (CALL_P (i3))
14524 : place = i3;
14525 : else
14526 : {
14527 0 : gcc_assert (i2 && CALL_P (i2));
14528 : place = i2;
14529 : }
14530 : break;
14531 :
14532 1994849 : case REG_UNUSED:
14533 : /* Any clobbers for i3 may still exist, and so we must process
14534 : REG_UNUSED notes from that insn.
14535 :
14536 : Any clobbers from i2 or i1 can only exist if they were added by
14537 : recog_for_combine. In that case, recog_for_combine created the
14538 : necessary REG_UNUSED notes. Trying to keep any original
14539 : REG_UNUSED notes from these insns can cause incorrect output
14540 : if it is for the same register as the original i3 dest.
14541 : In that case, we will notice that the register is set in i3,
14542 : and then add a REG_UNUSED note for the destination of i3, which
14543 : is wrong. However, it is possible to have REG_UNUSED notes from
14544 : i2 or i1 for register which were both used and clobbered, so
14545 : we keep notes from i2 or i1 if they will turn into REG_DEAD
14546 : notes. */
14547 :
14548 : /* If this register is set or clobbered between FROM_INSN and I3,
14549 : we should not create a note for it. */
14550 1994849 : if (reg_set_between_p (XEXP (note, 0), from_insn, i3))
14551 : break;
14552 :
14553 : /* If this register is set or clobbered in I3, put the note there
14554 : unless there is one already. */
14555 1911310 : if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
14556 : {
14557 1100328 : if (from_insn != i3)
14558 : break;
14559 :
14560 617137 : if (! (REG_P (XEXP (note, 0))
14561 617137 : ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
14562 0 : : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
14563 : place = i3;
14564 : }
14565 : /* Otherwise, if this register is used by I3, then this register
14566 : now dies here, so we must put a REG_DEAD note here unless there
14567 : is one already. */
14568 810982 : else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
14569 : {
14570 7660 : if (! (REG_P (XEXP (note, 0))
14571 7660 : ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
14572 0 : : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
14573 : {
14574 7429 : PUT_REG_NOTE_KIND (note, REG_DEAD);
14575 7429 : place = i3;
14576 : }
14577 : }
14578 :
14579 : /* A SET or CLOBBER of the REG_UNUSED reg has been removed,
14580 : but we can't tell which at this point. We must reset any
14581 : expectations we had about the value that was previously
14582 : stored in the reg. ??? Ideally, we'd adjust REG_N_SETS
14583 : and, if appropriate, restore its previous value, but we
14584 : don't have enough information for that at this point. */
14585 : else
14586 : {
14587 803322 : record_value_for_reg (XEXP (note, 0), NULL, NULL_RTX);
14588 :
14589 : /* Otherwise, if this register is now referenced in i2
14590 : then the register used to be modified in one of the
14591 : original insns. If it was i3 (say, in an unused
14592 : parallel), it's now completely gone, so the note can
14593 : be discarded. But if it was modified in i2, i1 or i0
14594 : and we still reference it in i2, then we're
14595 : referencing the previous value, and since the
14596 : register was modified and REG_UNUSED, we know that
14597 : the previous value is now dead. So, if we only
14598 : reference the register in i2, we change the note to
14599 : REG_DEAD, to reflect the previous value. However, if
14600 : we're also setting or clobbering the register as
14601 : scratch, we know (because the register was not
14602 : referenced in i3) that it's unused, just as it was
14603 : unused before, and we place the note in i2. */
14604 17772 : if (from_insn != i3 && i2 && INSN_P (i2)
14605 821094 : && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
14606 : {
14607 13 : if (!reg_set_p (XEXP (note, 0), PATTERN (i2)))
14608 13 : PUT_REG_NOTE_KIND (note, REG_DEAD);
14609 13 : if (! (REG_P (XEXP (note, 0))
14610 13 : ? find_regno_note (i2, REG_NOTE_KIND (note),
14611 13 : REGNO (XEXP (note, 0)))
14612 0 : : find_reg_note (i2, REG_NOTE_KIND (note),
14613 : XEXP (note, 0))))
14614 : place = i2;
14615 : }
14616 : }
14617 :
14618 : break;
14619 :
14620 375924 : case REG_EQUAL:
14621 375924 : case REG_EQUIV:
14622 375924 : case REG_NOALIAS:
14623 : /* These notes say something about results of an insn. We can
14624 : only support them if they used to be on I3 in which case they
14625 : remain on I3. Otherwise they are ignored.
14626 :
14627 : If the note refers to an expression that is not a constant, we
14628 : must also ignore the note since we cannot tell whether the
14629 : equivalence is still true. It might be possible to do
14630 : slightly better than this (we only have a problem if I2DEST
14631 : or I1DEST is present in the expression), but it doesn't
14632 : seem worth the trouble. */
14633 :
14634 375924 : if (from_insn == i3
14635 179662 : && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
14636 : place = i3;
14637 : break;
14638 :
14639 0 : case REG_INC:
14640 : /* These notes say something about how a register is used. They must
14641 : be present on any use of the register in I2 or I3. */
14642 0 : if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
14643 0 : place = i3;
14644 :
14645 0 : if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
14646 : {
14647 0 : if (place)
14648 : place2 = i2;
14649 : else
14650 : place = i2;
14651 : }
14652 : break;
14653 :
14654 6059 : case REG_LABEL_TARGET:
14655 6059 : case REG_LABEL_OPERAND:
14656 : /* This can show up in several ways -- either directly in the
14657 : pattern, or hidden off in the constant pool with (or without?)
14658 : a REG_EQUAL note. */
14659 : /* ??? Ignore the without-reg_equal-note problem for now. */
14660 6059 : if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
14661 6059 : || ((tem_note = find_reg_note (i3, REG_EQUAL, NULL_RTX))
14662 0 : && GET_CODE (XEXP (tem_note, 0)) == LABEL_REF
14663 0 : && label_ref_label (XEXP (tem_note, 0)) == XEXP (note, 0)))
14664 : place = i3;
14665 :
14666 6059 : if (i2
14667 6059 : && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
14668 0 : || ((tem_note = find_reg_note (i2, REG_EQUAL, NULL_RTX))
14669 0 : && GET_CODE (XEXP (tem_note, 0)) == LABEL_REF
14670 0 : && label_ref_label (XEXP (tem_note, 0)) == XEXP (note, 0))))
14671 : {
14672 0 : if (place)
14673 : place2 = i2;
14674 : else
14675 : place = i2;
14676 : }
14677 :
14678 : /* For REG_LABEL_TARGET on a JUMP_P, we prefer to put the note
14679 : as a JUMP_LABEL or decrement LABEL_NUSES if it's already
14680 : there. */
14681 6059 : if (place && JUMP_P (place)
14682 4868 : && REG_NOTE_KIND (note) == REG_LABEL_TARGET
14683 0 : && (JUMP_LABEL (place) == NULL
14684 0 : || JUMP_LABEL (place) == XEXP (note, 0)))
14685 : {
14686 0 : rtx label = JUMP_LABEL (place);
14687 :
14688 0 : if (!label)
14689 0 : JUMP_LABEL (place) = XEXP (note, 0);
14690 0 : else if (LABEL_P (label))
14691 0 : LABEL_NUSES (label)--;
14692 : }
14693 :
14694 6059 : if (place2 && JUMP_P (place2)
14695 0 : && REG_NOTE_KIND (note) == REG_LABEL_TARGET
14696 0 : && (JUMP_LABEL (place2) == NULL
14697 0 : || JUMP_LABEL (place2) == XEXP (note, 0)))
14698 : {
14699 0 : rtx label = JUMP_LABEL (place2);
14700 :
14701 0 : if (!label)
14702 0 : JUMP_LABEL (place2) = XEXP (note, 0);
14703 0 : else if (LABEL_P (label))
14704 0 : LABEL_NUSES (label)--;
14705 : place2 = 0;
14706 : }
14707 : break;
14708 :
14709 : case REG_NONNEG:
14710 : /* This note says something about the value of a register prior
14711 : to the execution of an insn. It is too much trouble to see
14712 : if the note is still correct in all situations. It is better
14713 : to simply delete it. */
14714 : break;
14715 :
14716 10311421 : case REG_DEAD:
14717 : /* If we replaced the right hand side of FROM_INSN with a
14718 : REG_EQUAL note, the original use of the dying register
14719 : will not have been combined into I3 and I2. In such cases,
14720 : FROM_INSN is guaranteed to be the first of the combined
14721 : instructions, so we simply need to search back before
14722 : FROM_INSN for the previous use or set of this register,
14723 : then alter the notes there appropriately.
14724 :
14725 : If the register is used as an input in I3, it dies there.
14726 : Similarly for I2, if it is nonzero and adjacent to I3.
14727 :
14728 : If the register is not used as an input in either I3 or I2
14729 : and it is not one of the registers we were supposed to eliminate,
14730 : there are two possibilities. We might have a non-adjacent I2
14731 : or we might have somehow eliminated an additional register
14732 : from a computation. For example, we might have had A & B where
14733 : we discover that B will always be zero. In this case we will
14734 : eliminate the reference to A.
14735 :
14736 : In both cases, we must search to see if we can find a previous
14737 : use of A and put the death note there. */
14738 :
14739 10311421 : if (from_insn
14740 7225213 : && from_insn == i2mod
14741 10313037 : && !reg_overlap_mentioned_p (XEXP (note, 0), i2mod_new_rhs))
14742 : tem_insn = from_insn;
14743 : else
14744 : {
14745 10310108 : if (from_insn
14746 7223900 : && CALL_P (from_insn)
14747 10537038 : && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
14748 : place = from_insn;
14749 10162343 : else if (i2 && reg_set_p (XEXP (note, 0), PATTERN (i2)))
14750 : {
14751 : /* If the new I2 sets the same register that is marked
14752 : dead in the note, we do not in general know where to
14753 : put the note. One important case we _can_ handle is
14754 : when the note comes from I3. */
14755 38828 : if (from_insn == i3)
14756 : place = i3;
14757 : else
14758 : break;
14759 : }
14760 10123515 : else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
14761 : place = i3;
14762 101791 : else if (i2 != 0 && next_nonnote_nondebug_insn (i2) == i3
14763 4042103 : && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
14764 : place = i2;
14765 3897496 : else if ((rtx_equal_p (XEXP (note, 0), elim_i2)
14766 3781561 : && !(i2mod
14767 25806 : && reg_overlap_mentioned_p (XEXP (note, 0),
14768 : i2mod_old_rhs)))
14769 141751 : || rtx_equal_p (XEXP (note, 0), elim_i1)
14770 3949721 : || rtx_equal_p (XEXP (note, 0), elim_i0))
14771 : break;
14772 231781 : tem_insn = i3;
14773 : }
14774 :
14775 231781 : if (place == 0)
14776 : {
14777 49558 : basic_block bb = this_basic_block;
14778 :
14779 2188925 : for (tem_insn = PREV_INSN (tem_insn); place == 0; tem_insn = PREV_INSN (tem_insn))
14780 : {
14781 2188925 : if (!NONDEBUG_INSN_P (tem_insn))
14782 : {
14783 1565158 : if (tem_insn == BB_HEAD (bb))
14784 : break;
14785 1531391 : continue;
14786 : }
14787 :
14788 : /* If the register is being set at TEM_INSN, see if that is all
14789 : TEM_INSN is doing. If so, delete TEM_INSN. Otherwise, make this
14790 : into a REG_UNUSED note instead. Don't delete sets to
14791 : global register vars. */
14792 623767 : if ((REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER
14793 1287 : || !global_regs[REGNO (XEXP (note, 0))])
14794 625054 : && reg_set_p (XEXP (note, 0), PATTERN (tem_insn)))
14795 : {
14796 15569 : rtx set = single_set (tem_insn);
14797 15569 : rtx inner_dest = 0;
14798 :
14799 15569 : if (set != 0)
14800 12183 : for (inner_dest = SET_DEST (set);
14801 12440 : (GET_CODE (inner_dest) == STRICT_LOW_PART
14802 12440 : || GET_CODE (inner_dest) == SUBREG
14803 12440 : || GET_CODE (inner_dest) == ZERO_EXTRACT);
14804 257 : inner_dest = XEXP (inner_dest, 0))
14805 : ;
14806 :
14807 : /* Verify that it was the set, and not a clobber that
14808 : modified the register.
14809 :
14810 : If we cannot delete the setter due to side
14811 : effects, mark the user with an UNUSED note instead
14812 : of deleting it. */
14813 :
14814 12183 : if (set != 0 && ! side_effects_p (SET_SRC (set))
14815 11811 : && rtx_equal_p (XEXP (note, 0), inner_dest))
14816 : {
14817 : /* Move the notes and links of TEM_INSN elsewhere.
14818 : This might delete other dead insns recursively.
14819 : First set the pattern to something that won't use
14820 : any register. */
14821 11696 : rtx old_notes = REG_NOTES (tem_insn);
14822 :
14823 11696 : PATTERN (tem_insn) = pc_rtx;
14824 11696 : REG_NOTES (tem_insn) = NULL;
14825 :
14826 11696 : distribute_notes (old_notes, tem_insn, tem_insn, NULL,
14827 : NULL_RTX, NULL_RTX, NULL_RTX);
14828 11696 : distribute_links (LOG_LINKS (tem_insn));
14829 :
14830 11696 : unsigned int regno = REGNO (XEXP (note, 0));
14831 11696 : reg_stat_type *rsp = ®_stat[regno];
14832 11696 : if (rsp->last_set == tem_insn)
14833 10288 : record_value_for_reg (XEXP (note, 0), NULL, NULL_RTX);
14834 :
14835 11696 : SET_INSN_DELETED (tem_insn);
14836 11696 : if (tem_insn == i2)
14837 607976 : i2 = NULL;
14838 : }
14839 : else
14840 : {
14841 3873 : PUT_REG_NOTE_KIND (note, REG_UNUSED);
14842 :
14843 : /* If there isn't already a REG_UNUSED note, put one
14844 : here. Do not place a REG_DEAD note, even if
14845 : the register is also used here; that would not
14846 : match the algorithm used in lifetime analysis
14847 : and can cause the consistency check in the
14848 : scheduler to fail. */
14849 3873 : if (! find_regno_note (tem_insn, REG_UNUSED,
14850 3873 : REGNO (XEXP (note, 0))))
14851 2142 : place = tem_insn;
14852 : break;
14853 : }
14854 : }
14855 608198 : else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem_insn))
14856 608198 : || (CALL_P (tem_insn)
14857 16219 : && find_reg_fusage (tem_insn, USE, XEXP (note, 0))))
14858 : {
14859 11918 : place = tem_insn;
14860 :
14861 : /* If we are doing a 3->2 combination, and we have a
14862 : register which formerly died in i3 and was not used
14863 : by i2, which now no longer dies in i3 and is used in
14864 : i2 but does not die in i2, and place is between i2
14865 : and i3, then we may need to move a link from place to
14866 : i2. */
14867 3263 : if (i2 && DF_INSN_LUID (place) > DF_INSN_LUID (i2)
14868 79 : && from_insn
14869 79 : && DF_INSN_LUID (from_insn) > DF_INSN_LUID (i2)
14870 11997 : && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
14871 : {
14872 79 : struct insn_link *links = LOG_LINKS (place);
14873 79 : LOG_LINKS (place) = NULL;
14874 79 : distribute_links (links);
14875 : }
14876 : break;
14877 : }
14878 :
14879 607976 : if (tem_insn == BB_HEAD (bb))
14880 : break;
14881 : }
14882 :
14883 : }
14884 :
14885 : /* If the register is set or already dead at PLACE, we needn't do
14886 : anything with this note if it is still a REG_DEAD note.
14887 : We check here if it is set at all, not if is it totally replaced,
14888 : which is what `dead_or_set_p' checks, so also check for it being
14889 : set partially. */
14890 :
14891 6459101 : if (place && REG_NOTE_KIND (note) == REG_DEAD)
14892 : {
14893 6421461 : unsigned int regno = REGNO (XEXP (note, 0));
14894 6421461 : reg_stat_type *rsp = ®_stat[regno];
14895 :
14896 6421461 : if (dead_or_set_p (place, XEXP (note, 0))
14897 6421461 : || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
14898 : {
14899 : /* Unless the register previously died in PLACE, clear
14900 : last_death. [I no longer understand why this is
14901 : being done.] */
14902 2994476 : if (rsp->last_death != place)
14903 577726 : rsp->last_death = 0;
14904 : place = 0;
14905 : }
14906 : else
14907 3426985 : rsp->last_death = place;
14908 :
14909 : /* If this is a death note for a hard reg that is occupying
14910 : multiple registers, ensure that we are still using all
14911 : parts of the object. If we find a piece of the object
14912 : that is unused, we must arrange for an appropriate REG_DEAD
14913 : note to be added for it. However, we can't just emit a USE
14914 : and tag the note to it, since the register might actually
14915 : be dead; so we recurse, and the recursive call then finds
14916 : the previous insn that used this register. */
14917 :
14918 3426985 : if (place && REG_NREGS (XEXP (note, 0)) > 1)
14919 : {
14920 777 : unsigned int endregno = END_REGNO (XEXP (note, 0));
14921 777 : bool all_used = true;
14922 777 : unsigned int i;
14923 :
14924 2331 : for (i = regno; i < endregno; i++)
14925 1554 : if ((! refers_to_regno_p (i, PATTERN (place))
14926 1554 : && ! find_regno_fusage (place, USE, i))
14927 3108 : || dead_or_set_regno_p (place, i))
14928 : {
14929 : all_used = false;
14930 : break;
14931 : }
14932 :
14933 777 : if (! all_used)
14934 : {
14935 : /* Put only REG_DEAD notes for pieces that are
14936 : not already dead or set. */
14937 :
14938 0 : for (i = regno; i < endregno;
14939 0 : i += hard_regno_nregs (i, reg_raw_mode[i]))
14940 : {
14941 0 : rtx piece = regno_reg_rtx[i];
14942 0 : basic_block bb = this_basic_block;
14943 :
14944 0 : if (! dead_or_set_p (place, piece)
14945 0 : && ! reg_bitfield_target_p (piece,
14946 0 : PATTERN (place)))
14947 : {
14948 0 : rtx new_note = alloc_reg_note (REG_DEAD, piece,
14949 : NULL_RTX);
14950 :
14951 0 : distribute_notes (new_note, place, place,
14952 : NULL, NULL_RTX, NULL_RTX,
14953 : NULL_RTX);
14954 : }
14955 0 : else if (! refers_to_regno_p (i, PATTERN (place))
14956 0 : && ! find_regno_fusage (place, USE, i))
14957 0 : for (tem_insn = PREV_INSN (place); ;
14958 0 : tem_insn = PREV_INSN (tem_insn))
14959 : {
14960 0 : if (!NONDEBUG_INSN_P (tem_insn))
14961 : {
14962 0 : if (tem_insn == BB_HEAD (bb))
14963 : break;
14964 0 : continue;
14965 : }
14966 0 : if (dead_or_set_p (tem_insn, piece)
14967 0 : || reg_bitfield_target_p (piece,
14968 0 : PATTERN (tem_insn)))
14969 : {
14970 0 : add_reg_note (tem_insn, REG_UNUSED, piece);
14971 0 : break;
14972 : }
14973 : }
14974 : }
14975 :
14976 : place = 0;
14977 : }
14978 : }
14979 : }
14980 : break;
14981 :
14982 0 : default:
14983 : /* Any other notes should not be present at this point in the
14984 : compilation. */
14985 0 : gcc_unreachable ();
14986 : }
14987 :
14988 4208914 : if (place)
14989 : {
14990 4180498 : XEXP (note, 1) = REG_NOTES (place);
14991 4180498 : REG_NOTES (place) = note;
14992 :
14993 : /* Set added_notes_insn to the earliest insn we added a note to. */
14994 4180498 : if (added_notes_insn == 0
14995 4180498 : || DF_INSN_LUID (added_notes_insn) > DF_INSN_LUID (place))
14996 2791038 : added_notes_insn = place;
14997 : }
14998 :
14999 12953672 : if (place2)
15000 : {
15001 0 : add_shallow_copy_of_reg_note (place2, note);
15002 :
15003 : /* Set added_notes_insn to the earliest insn we added a note to. */
15004 0 : if (added_notes_insn == 0
15005 0 : || DF_INSN_LUID (added_notes_insn) > DF_INSN_LUID (place2))
15006 0 : added_notes_insn = place2;
15007 : }
15008 : }
15009 9779127 : }
15010 :
15011 : /* Similarly to above, distribute the LOG_LINKS that used to be present on
15012 : I3, I2, and I1 to new locations. This is also called to add a link
15013 : pointing at I3 when I3's destination is changed.
15014 :
15015 : If START is nonnull and an insn, we know that the next location for each
15016 : link is no earlier than START. LIMIT is the maximum number of nondebug
15017 : instructions that can be scanned when looking for the next use of a
15018 : definition. */
15019 :
15020 : static void
15021 15940737 : distribute_links (struct insn_link *links, rtx_insn *start, int limit)
15022 : {
15023 15940737 : struct insn_link *link, *next_link;
15024 :
15025 23452283 : for (link = links; link; link = next_link)
15026 : {
15027 7511546 : rtx_insn *place = 0;
15028 7511546 : rtx_insn *insn;
15029 7511546 : rtx set, reg;
15030 :
15031 7511546 : next_link = link->next;
15032 :
15033 : /* If the insn that this link points to is a NOTE, ignore it. */
15034 7511546 : if (NOTE_P (link->insn))
15035 4015445 : continue;
15036 :
15037 3496101 : set = 0;
15038 3496101 : rtx pat = PATTERN (link->insn);
15039 3496101 : if (GET_CODE (pat) == SET)
15040 : set = pat;
15041 615689 : else if (GET_CODE (pat) == PARALLEL)
15042 : {
15043 : int i;
15044 727642 : for (i = 0; i < XVECLEN (pat, 0); i++)
15045 : {
15046 724641 : set = XVECEXP (pat, 0, i);
15047 724641 : if (GET_CODE (set) != SET)
15048 3010 : continue;
15049 :
15050 721631 : reg = SET_DEST (set);
15051 721631 : while (GET_CODE (reg) == ZERO_EXTRACT
15052 730166 : || GET_CODE (reg) == STRICT_LOW_PART
15053 1460250 : || GET_CODE (reg) == SUBREG)
15054 8542 : reg = XEXP (reg, 0);
15055 :
15056 721631 : if (!REG_P (reg))
15057 44135 : continue;
15058 :
15059 677496 : if (REGNO (reg) == link->regno)
15060 : break;
15061 : }
15062 613561 : if (i == XVECLEN (pat, 0))
15063 3001 : continue;
15064 : }
15065 : else
15066 2128 : continue;
15067 :
15068 3490972 : reg = SET_DEST (set);
15069 :
15070 3490972 : while (GET_CODE (reg) == ZERO_EXTRACT
15071 3513809 : || GET_CODE (reg) == STRICT_LOW_PART
15072 7027765 : || GET_CODE (reg) == SUBREG)
15073 23415 : reg = XEXP (reg, 0);
15074 :
15075 3490972 : if (reg == pc_rtx)
15076 407 : continue;
15077 :
15078 : /* A LOG_LINK is defined as being placed on the first insn that uses
15079 : a register and points to the insn that sets the register. Start
15080 : searching at the next insn after the target of the link and stop
15081 : when we reach a set of the register or the end of the basic block.
15082 :
15083 : Note that this correctly handles the link that used to point from
15084 : I3 to I2. Also note that not much searching is typically done here
15085 : since most links don't point very far away. */
15086 :
15087 3490565 : int count = 0;
15088 3490565 : insn = start;
15089 3490565 : if (!insn || NOTE_P (insn))
15090 3442142 : insn = NEXT_INSN (link->insn);
15091 : else
15092 48423 : count = link->insn_count;
15093 11394954 : for (;
15094 14885519 : (insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
15095 10156758 : || BB_HEAD (this_basic_block->next_bb) != insn));
15096 11394954 : insn = NEXT_INSN (insn))
15097 14847710 : if (DEBUG_INSN_P (insn))
15098 2989452 : continue;
15099 11858258 : else if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
15100 : {
15101 3302401 : if (reg_referenced_p (reg, PATTERN (insn)))
15102 3302401 : place = insn;
15103 : break;
15104 : }
15105 8555857 : else if (CALL_P (insn)
15106 8555857 : && find_reg_fusage (insn, USE, reg))
15107 : {
15108 : place = insn;
15109 : break;
15110 : }
15111 8405601 : else if (INSN_P (insn) && reg_set_p (reg, insn))
15112 : break;
15113 8405502 : else if (count >= limit)
15114 : break;
15115 : else
15116 8405502 : count += 1;
15117 3490565 : link->insn_count = count;
15118 :
15119 : /* If we found a place to put the link, place it there unless there
15120 : is already a link to the same insn as LINK at that point. */
15121 :
15122 3490565 : if (place)
15123 : {
15124 3452657 : struct insn_link *link2;
15125 :
15126 4450387 : FOR_EACH_LOG_LINK (link2, place)
15127 1016403 : if (link2->insn == link->insn && link2->regno == link->regno)
15128 : break;
15129 :
15130 3452657 : if (link2 == NULL)
15131 : {
15132 3433984 : link->next = LOG_LINKS (place);
15133 3433984 : LOG_LINKS (place) = link;
15134 :
15135 : /* Set added_links_insn to the earliest insn we added a
15136 : link to. */
15137 3433984 : if (added_links_insn == 0
15138 3433984 : || DF_INSN_LUID (added_links_insn) > DF_INSN_LUID (place))
15139 2724611 : added_links_insn = place;
15140 : }
15141 : }
15142 : }
15143 15940737 : }
15144 :
15145 : /* Check for any register or memory mentioned in EQUIV that is not
15146 : mentioned in EXPR. This is used to restrict EQUIV to "specializations"
15147 : of EXPR where some registers may have been replaced by constants. */
15148 :
15149 : static bool
15150 2528104 : unmentioned_reg_p (rtx equiv, rtx expr)
15151 : {
15152 2528104 : subrtx_iterator::array_type array;
15153 6707421 : FOR_EACH_SUBRTX (iter, array, equiv, NONCONST)
15154 : {
15155 5482118 : const_rtx x = *iter;
15156 3747545 : if ((REG_P (x) || MEM_P (x))
15157 5857968 : && !reg_mentioned_p (x, expr))
15158 1302801 : return true;
15159 : }
15160 1225303 : return false;
15161 2528104 : }
15162 :
15163 : /* Make pseudo-to-pseudo copies after every hard-reg-to-pseudo-copy, because
15164 : the reg-to-reg copy can usefully combine with later instructions, but we
15165 : do not want to combine the hard reg into later instructions, for that
15166 : restricts register allocation. */
15167 : static void
15168 1041492 : make_more_copies (void)
15169 : {
15170 1041492 : basic_block bb;
15171 :
15172 11342990 : FOR_EACH_BB_FN (bb, cfun)
15173 : {
15174 10301498 : rtx_insn *insn;
15175 :
15176 133219185 : FOR_BB_INSNS (bb, insn)
15177 : {
15178 122917687 : if (!NONDEBUG_INSN_P (insn))
15179 64484030 : continue;
15180 :
15181 58433657 : rtx set = single_set (insn);
15182 58433657 : if (!set)
15183 3969439 : continue;
15184 :
15185 54464218 : rtx dest = SET_DEST (set);
15186 54464218 : if (!(REG_P (dest) && !HARD_REGISTER_P (dest)))
15187 31219598 : continue;
15188 :
15189 23244620 : rtx src = SET_SRC (set);
15190 23244620 : if (!(REG_P (src) && HARD_REGISTER_P (src)))
15191 20282715 : continue;
15192 2961905 : if (TEST_HARD_REG_BIT (fixed_reg_set, REGNO (src)))
15193 9058 : continue;
15194 :
15195 2952847 : rtx new_reg = gen_reg_rtx (GET_MODE (dest));
15196 :
15197 : /* The "original" pseudo copies have important attributes
15198 : attached, like pointerness. We want that for these copies
15199 : too, for use by insn recognition and later passes. */
15200 2952847 : set_reg_attrs_from_value (new_reg, dest);
15201 :
15202 2952847 : rtx_insn *new_insn = gen_move_insn (new_reg, src);
15203 2952847 : SET_SRC (set) = new_reg;
15204 2952847 : emit_insn_before (new_insn, insn);
15205 2952847 : df_insn_rescan (insn);
15206 : }
15207 : }
15208 1041492 : }
15209 :
15210 : /* Try combining insns through substitution. */
15211 : static void
15212 1041492 : rest_of_handle_combine (void)
15213 : {
15214 1041492 : make_more_copies ();
15215 :
15216 1041492 : df_set_flags (DF_LR_RUN_DCE + DF_DEFER_INSN_RESCAN);
15217 1041492 : df_note_add_problem ();
15218 1041492 : df_analyze ();
15219 :
15220 1041492 : regstat_init_n_sets_and_refs ();
15221 1041492 : reg_n_sets_max = max_reg_num ();
15222 :
15223 1041492 : bool rebuild_jump_labels_after_combine
15224 1041492 : = combine_instructions (get_insns (), max_reg_num ());
15225 :
15226 : /* Combining insns may have turned an indirect jump into a
15227 : direct jump. Rebuild the JUMP_LABEL fields of jumping
15228 : instructions. */
15229 1041492 : if (rebuild_jump_labels_after_combine)
15230 : {
15231 2427 : if (dom_info_available_p (CDI_DOMINATORS))
15232 0 : free_dominance_info (CDI_DOMINATORS);
15233 2427 : timevar_push (TV_JUMP);
15234 2427 : rebuild_jump_labels (get_insns ());
15235 2427 : cleanup_cfg (0);
15236 2427 : timevar_pop (TV_JUMP);
15237 : }
15238 :
15239 1041492 : regstat_free_n_sets_and_refs ();
15240 1041492 : }
15241 :
15242 : namespace {
15243 :
15244 : const pass_data pass_data_combine =
15245 : {
15246 : RTL_PASS, /* type */
15247 : "combine", /* name */
15248 : OPTGROUP_NONE, /* optinfo_flags */
15249 : TV_COMBINE, /* tv_id */
15250 : PROP_cfglayout, /* properties_required */
15251 : 0, /* properties_provided */
15252 : 0, /* properties_destroyed */
15253 : 0, /* todo_flags_start */
15254 : TODO_df_finish, /* todo_flags_finish */
15255 : };
15256 :
15257 : class pass_combine : public rtl_opt_pass
15258 : {
15259 : public:
15260 288047 : pass_combine (gcc::context *ctxt)
15261 576094 : : rtl_opt_pass (pass_data_combine, ctxt)
15262 : {}
15263 :
15264 : /* opt_pass methods: */
15265 1474422 : bool gate (function *) final override { return (optimize > 0); }
15266 1041492 : unsigned int execute (function *) final override
15267 : {
15268 1041492 : rest_of_handle_combine ();
15269 1041492 : return 0;
15270 : }
15271 :
15272 : }; // class pass_combine
15273 :
15274 : } // anon namespace
15275 :
15276 : rtl_opt_pass *
15277 288047 : make_pass_combine (gcc::context *ctxt)
15278 : {
15279 288047 : return new pass_combine (ctxt);
15280 : }
|