Branch data Line data Source code
1 : : /* Optimize by combining instructions for GNU compiler.
2 : : Copyright (C) 1987-2026 Free Software Foundation, Inc.
3 : :
4 : : This file is part of GCC.
5 : :
6 : : GCC is free software; you can redistribute it and/or modify it under
7 : : the terms of the GNU General Public License as published by the Free
8 : : Software Foundation; either version 3, or (at your option) any later
9 : : version.
10 : :
11 : : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : : for more details.
15 : :
16 : : You should have received a copy of the GNU General Public License
17 : : along with GCC; see the file COPYING3. If not see
18 : : <http://www.gnu.org/licenses/>. */
19 : :
20 : : /* This module is essentially the "combiner" phase of the U. of Arizona
21 : : Portable Optimizer, but redone to work on our list-structured
22 : : representation for RTL instead of their string representation.
23 : :
24 : : The LOG_LINKS of each insn identify the most recent assignment
25 : : to each REG used in the insn. It is a list of previous insns,
26 : : each of which contains a SET for a REG that is used in this insn
27 : : and not used or set in between. LOG_LINKs never cross basic blocks.
28 : : They were set up by the preceding pass (lifetime analysis).
29 : :
30 : : We try to combine each pair of insns joined by a logical link.
31 : : We also try to combine triplets of insns A, B and C when C has
32 : : a link back to B and B has a link back to A. Likewise for a
33 : : small number of quadruplets of insns A, B, C and D for which
34 : : there's high likelihood of success.
35 : :
36 : : We check (with modified_between_p) to avoid combining in such a way
37 : : as to move a computation to a place where its value would be different.
38 : :
39 : : Combination is done by mathematically substituting the previous
40 : : insn(s) values for the regs they set into the expressions in
41 : : the later insns that refer to these regs. If the result is a valid insn
42 : : for our target machine, according to the machine description,
43 : : we install it, delete the earlier insns, and update the data flow
44 : : information (LOG_LINKS and REG_NOTES) for what we did.
45 : :
46 : : There are a few exceptions where the dataflow information isn't
47 : : completely updated (however this is only a local issue since it is
48 : : regenerated before the next pass that uses it):
49 : :
50 : : - reg_live_length is not updated
51 : : - reg_n_refs is not adjusted in the rare case when a register is
52 : : no longer required in a computation
53 : : - there are extremely rare cases (see distribute_notes) when a
54 : : REG_DEAD note is lost
55 : : - a LOG_LINKS entry that refers to an insn with multiple SETs may be
56 : : removed because there is no way to know which register it was
57 : : linking
58 : :
59 : : To simplify substitution, we combine only when the earlier insn(s)
60 : : consist of only a single assignment. To simplify updating afterward,
61 : : we never combine when a subroutine call appears in the middle. */
62 : :
63 : : #include "config.h"
64 : : #include "system.h"
65 : : #include "coretypes.h"
66 : : #include "backend.h"
67 : : #include "target.h"
68 : : #include "rtl.h"
69 : : #include "tree.h"
70 : : #include "cfghooks.h"
71 : : #include "predict.h"
72 : : #include "df.h"
73 : : #include "memmodel.h"
74 : : #include "tm_p.h"
75 : : #include "optabs.h"
76 : : #include "regs.h"
77 : : #include "emit-rtl.h"
78 : : #include "recog.h"
79 : : #include "cgraph.h"
80 : : #include "stor-layout.h"
81 : : #include "cfgrtl.h"
82 : : #include "cfgcleanup.h"
83 : : /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
84 : : #include "explow.h"
85 : : #include "insn-attr.h"
86 : : #include "rtlhooks-def.h"
87 : : #include "expr.h"
88 : : #include "tree-pass.h"
89 : : #include "valtrack.h"
90 : : #include "rtl-iter.h"
91 : : #include "print-rtl.h"
92 : : #include "function-abi.h"
93 : : #include "rtlanal.h"
94 : :
95 : : /* Number of attempts to combine instructions in this function. */
96 : :
97 : : static int combine_attempts;
98 : :
99 : : /* Number of attempts that got as far as substitution in this function. */
100 : :
101 : : static int combine_merges;
102 : :
103 : : /* Number of instructions combined with added SETs in this function. */
104 : :
105 : : static int combine_extras;
106 : :
107 : : /* Number of instructions combined in this function. */
108 : :
109 : : static int combine_successes;
110 : :
111 : : /* combine_instructions may try to replace the right hand side of the
112 : : second instruction with the value of an associated REG_EQUAL note
113 : : before throwing it at try_combine. That is problematic when there
114 : : is a REG_DEAD note for a register used in the old right hand side
115 : : and can cause distribute_notes to do wrong things. This is the
116 : : second instruction if it has been so modified, null otherwise. */
117 : :
118 : : static rtx_insn *i2mod;
119 : :
120 : : /* When I2MOD is nonnull, this is a copy of the old right hand side. */
121 : :
122 : : static rtx i2mod_old_rhs;
123 : :
124 : : /* When I2MOD is nonnull, this is a copy of the new right hand side. */
125 : :
126 : : static rtx i2mod_new_rhs;
127 : :
128 : : struct reg_stat_type {
129 : : /* Record last point of death of (hard or pseudo) register n. */
130 : : rtx_insn *last_death;
131 : :
132 : : /* Record last point of modification of (hard or pseudo) register n. */
133 : : rtx_insn *last_set;
134 : :
135 : : /* The next group of fields allows the recording of the last value assigned
136 : : to (hard or pseudo) register n. We use this information to see if an
137 : : operation being processed is redundant given a prior operation performed
138 : : on the register. For example, an `and' with a constant is redundant if
139 : : all the zero bits are already known to be turned off.
140 : :
141 : : We use an approach similar to that used by cse, but change it in the
142 : : following ways:
143 : :
144 : : (1) We do not want to reinitialize at each label.
145 : : (2) It is useful, but not critical, to know the actual value assigned
146 : : to a register. Often just its form is helpful.
147 : :
148 : : Therefore, we maintain the following fields:
149 : :
150 : : last_set_value the last value assigned
151 : : last_set_label records the value of label_tick when the
152 : : register was assigned
153 : : last_set_table_tick records the value of label_tick when a
154 : : value using the register is assigned
155 : : last_set_invalid set to true when it is not valid
156 : : to use the value of this register in some
157 : : register's value
158 : :
159 : : To understand the usage of these tables, it is important to understand
160 : : the distinction between the value in last_set_value being valid and
161 : : the register being validly contained in some other expression in the
162 : : table.
163 : :
164 : : (The next two parameters are out of date).
165 : :
166 : : reg_stat[i].last_set_value is valid if it is nonzero, and either
167 : : reg_n_sets[i] is 1 or reg_stat[i].last_set_label == label_tick.
168 : :
169 : : Register I may validly appear in any expression returned for the value
170 : : of another register if reg_n_sets[i] is 1. It may also appear in the
171 : : value for register J if reg_stat[j].last_set_invalid is zero, or
172 : : reg_stat[i].last_set_label < reg_stat[j].last_set_label.
173 : :
174 : : If an expression is found in the table containing a register which may
175 : : not validly appear in an expression, the register is replaced by
176 : : something that won't match, (clobber (const_int 0)). */
177 : :
178 : : /* Record last value assigned to (hard or pseudo) register n. */
179 : :
180 : : rtx last_set_value;
181 : :
182 : : /* Record the value of label_tick when an expression involving register n
183 : : is placed in last_set_value. */
184 : :
185 : : int last_set_table_tick;
186 : :
187 : : /* Record the value of label_tick when the value for register n is placed in
188 : : last_set_value. */
189 : :
190 : : int last_set_label;
191 : :
192 : : /* These fields are maintained in parallel with last_set_value and are
193 : : used to store the mode in which the register was last set, the bits
194 : : that were known to be zero when it was last set, and the number of
195 : : sign bits copies it was known to have when it was last set. */
196 : :
197 : : unsigned HOST_WIDE_INT last_set_nonzero_bits;
198 : : unsigned short last_set_sign_bit_copies;
199 : : ENUM_BITFIELD(machine_mode) last_set_mode : MACHINE_MODE_BITSIZE;
200 : :
201 : : /* Set to true if references to register n in expressions should not be
202 : : used. last_set_invalid is set nonzero when this register is being
203 : : assigned to and last_set_table_tick == label_tick. */
204 : :
205 : : bool last_set_invalid;
206 : :
207 : : /* Some registers that are set more than once and used in more than one
208 : : basic block are nevertheless always set in similar ways. For example,
209 : : a QImode register may be loaded from memory in two places on a machine
210 : : where byte loads zero extend.
211 : :
212 : : We record in the following fields if a register has some leading bits
213 : : that are always equal to the sign bit, and what we know about the
214 : : nonzero bits of a register, specifically which bits are known to be
215 : : zero.
216 : :
217 : : If an entry is zero, it means that we don't know anything special. */
218 : :
219 : : unsigned short sign_bit_copies;
220 : :
221 : : unsigned HOST_WIDE_INT nonzero_bits;
222 : :
223 : : /* Record the value of the label_tick when the last truncation
224 : : happened. The field truncated_to_mode is only valid if
225 : : truncation_label == label_tick. */
226 : :
227 : : int truncation_label;
228 : :
229 : : /* Record the last truncation seen for this register. If truncation
230 : : is not a nop to this mode we might be able to save an explicit
231 : : truncation if we know that value already contains a truncated
232 : : value. */
233 : :
234 : : ENUM_BITFIELD(machine_mode) truncated_to_mode : MACHINE_MODE_BITSIZE;
235 : : };
236 : :
237 : :
238 : : static vec<reg_stat_type> reg_stat;
239 : :
240 : : /* One plus the highest pseudo for which we track REG_N_SETS.
241 : : regstat_init_n_sets_and_refs allocates the array for REG_N_SETS just once,
242 : : but during combine_split_insns new pseudos can be created. As we don't have
243 : : updated DF information in that case, it is hard to initialize the array
244 : : after growing. The combiner only cares about REG_N_SETS (regno) == 1,
245 : : so instead of growing the arrays, just assume all newly created pseudos
246 : : during combine might be set multiple times. */
247 : :
248 : : static unsigned int reg_n_sets_max;
249 : :
250 : : /* Record the luid of the last insn that invalidated memory
251 : : (anything that writes memory, and subroutine calls, but not pushes). */
252 : :
253 : : static int mem_last_set;
254 : :
255 : : /* Record the luid of the last CALL_INSN
256 : : so we can tell whether a potential combination crosses any calls. */
257 : :
258 : : static int last_call_luid;
259 : :
260 : : /* When `subst' is called, this is the insn that is being modified
261 : : (by combining in a previous insn). The PATTERN of this insn
262 : : is still the old pattern partially modified and it should not be
263 : : looked at, but this may be used to examine the successors of the insn
264 : : to judge whether a simplification is valid. */
265 : :
266 : : static rtx_insn *subst_insn;
267 : :
268 : : /* This is the lowest LUID that `subst' is currently dealing with.
269 : : get_last_value will not return a value if the register was set at or
270 : : after this LUID. If not for this mechanism, we could get confused if
271 : : I2 or I1 in try_combine were an insn that used the old value of a register
272 : : to obtain a new value. In that case, we might erroneously get the
273 : : new value of the register when we wanted the old one. */
274 : :
275 : : static int subst_low_luid;
276 : :
277 : : /* This contains any hard registers that are used in newpat; reg_dead_at_p
278 : : must consider all these registers to be always live. */
279 : :
280 : : static HARD_REG_SET newpat_used_regs;
281 : :
282 : : /* This is an insn to which a LOG_LINKS entry has been added. If this
283 : : insn is the earlier than I2 or I3, combine should rescan starting at
284 : : that location. */
285 : :
286 : : static rtx_insn *added_links_insn;
287 : :
288 : : /* And similarly, for notes. */
289 : :
290 : : static rtx_insn *added_notes_insn;
291 : :
292 : : /* Basic block in which we are performing combines. */
293 : : static basic_block this_basic_block;
294 : : static bool optimize_this_for_speed_p;
295 : :
296 : :
297 : : /* Length of the currently allocated uid_insn_cost array. */
298 : :
299 : : static int max_uid_known;
300 : :
301 : : /* The following array records the insn_cost for every insn
302 : : in the instruction stream. */
303 : :
304 : : static int *uid_insn_cost;
305 : :
306 : : /* The following array records the LOG_LINKS for every insn in the
307 : : instruction stream as struct insn_link pointers. */
308 : :
309 : : struct insn_link {
310 : : rtx_insn *insn;
311 : : unsigned int regno;
312 : : int insn_count;
313 : : struct insn_link *next;
314 : : };
315 : :
316 : : static struct insn_link **uid_log_links;
317 : :
318 : : static inline int
319 : 755749600 : insn_uid_check (const_rtx insn)
320 : : {
321 : 755749600 : int uid = INSN_UID (insn);
322 : 755749600 : gcc_checking_assert (uid <= max_uid_known);
323 : 755749600 : return uid;
324 : : }
325 : :
326 : : #define INSN_COST(INSN) (uid_insn_cost[insn_uid_check (INSN)])
327 : : #define LOG_LINKS(INSN) (uid_log_links[insn_uid_check (INSN)])
328 : :
329 : : #define FOR_EACH_LOG_LINK(L, INSN) \
330 : : for ((L) = LOG_LINKS (INSN); (L); (L) = (L)->next)
331 : :
332 : : /* Links for LOG_LINKS are allocated from this obstack. */
333 : :
334 : : static struct obstack insn_link_obstack;
335 : :
336 : : /* Allocate a link. */
337 : :
338 : : static inline struct insn_link *
339 : 37981545 : alloc_insn_link (rtx_insn *insn, unsigned int regno, struct insn_link *next)
340 : : {
341 : 37981545 : struct insn_link *l
342 : 37981545 : = (struct insn_link *) obstack_alloc (&insn_link_obstack,
343 : : sizeof (struct insn_link));
344 : 37981545 : l->insn = insn;
345 : 37981545 : l->regno = regno;
346 : 37981545 : l->insn_count = 0;
347 : 37981545 : l->next = next;
348 : 37981545 : return l;
349 : : }
350 : :
351 : : /* Incremented for each basic block. */
352 : :
353 : : static int label_tick;
354 : :
355 : : /* Reset to label_tick for each extended basic block in scanning order. */
356 : :
357 : : static int label_tick_ebb_start;
358 : :
359 : : /* Mode used to compute significance in reg_stat[].nonzero_bits. It is the
360 : : largest integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
361 : :
362 : : static scalar_int_mode nonzero_bits_mode;
363 : :
364 : : /* Nonzero when reg_stat[].nonzero_bits and reg_stat[].sign_bit_copies can
365 : : be safely used. It is zero while computing them and after combine has
366 : : completed. This former test prevents propagating values based on
367 : : previously set values, which can be incorrect if a variable is modified
368 : : in a loop. */
369 : :
370 : : static int nonzero_sign_valid;
371 : :
372 : :
373 : : /* Record one modification to rtl structure
374 : : to be undone by storing old_contents into *where. */
375 : :
376 : : enum undo_kind { UNDO_RTX, UNDO_INT, UNDO_MODE, UNDO_LINKS };
377 : :
378 : : struct undo
379 : : {
380 : : struct undo *next;
381 : : enum undo_kind kind;
382 : : union { rtx r; int i; machine_mode m; struct insn_link *l; } old_contents;
383 : : union { rtx *r; int *i; int regno; struct insn_link **l; } where;
384 : : };
385 : :
386 : : /* Record a bunch of changes to be undone, up to MAX_UNDO of them.
387 : : num_undo says how many are currently recorded.
388 : :
389 : : other_insn is nonzero if we have modified some other insn in the process
390 : : of working on subst_insn. It must be verified too. */
391 : :
392 : : struct undobuf
393 : : {
394 : : struct undo *undos;
395 : : struct undo *frees;
396 : : rtx_insn *other_insn;
397 : : };
398 : :
399 : : static struct undobuf undobuf;
400 : :
401 : : /* Number of times the pseudo being substituted for
402 : : was found and replaced. */
403 : :
404 : : static int n_occurrences;
405 : :
406 : : static rtx reg_nonzero_bits_for_combine (const_rtx, scalar_int_mode,
407 : : scalar_int_mode,
408 : : unsigned HOST_WIDE_INT *);
409 : : static rtx reg_num_sign_bit_copies_for_combine (const_rtx, scalar_int_mode,
410 : : scalar_int_mode,
411 : : unsigned int *);
412 : : static void do_SUBST (rtx *, rtx);
413 : : static void do_SUBST_INT (int *, int);
414 : : static void init_reg_last (void);
415 : : static void setup_incoming_promotions (rtx_insn *);
416 : : static void set_nonzero_bits_and_sign_copies (rtx, const_rtx, void *);
417 : : static bool cant_combine_insn_p (rtx_insn *);
418 : : static bool can_combine_p (rtx_insn *, rtx_insn *, rtx_insn *, rtx_insn *,
419 : : rtx_insn *, rtx_insn *, rtx *, rtx *);
420 : : static bool combinable_i3pat (rtx_insn *, rtx *, rtx, rtx, rtx,
421 : : bool, bool, rtx *);
422 : : static bool contains_muldiv (rtx);
423 : : static rtx_insn *try_combine (rtx_insn *, rtx_insn *, rtx_insn *, rtx_insn *,
424 : : bool *, rtx_insn *);
425 : : static void undo_all (void);
426 : : static void undo_commit (void);
427 : : static rtx *find_split_point (rtx *, rtx_insn *, bool);
428 : : static rtx subst (rtx, rtx, rtx, bool, bool, bool);
429 : : static rtx combine_simplify_rtx (rtx, machine_mode, bool, bool);
430 : : static rtx simplify_if_then_else (rtx);
431 : : static rtx simplify_set (rtx);
432 : : static rtx simplify_logical (rtx);
433 : : static rtx expand_compound_operation (rtx);
434 : : static const_rtx expand_field_assignment (const_rtx);
435 : : static rtx make_extraction (machine_mode, rtx, HOST_WIDE_INT, rtx,
436 : : unsigned HOST_WIDE_INT, bool, bool, bool);
437 : : static int get_pos_from_mask (unsigned HOST_WIDE_INT,
438 : : unsigned HOST_WIDE_INT *);
439 : : static rtx canon_reg_for_combine (rtx, rtx);
440 : : static rtx force_int_to_mode (rtx, scalar_int_mode, scalar_int_mode,
441 : : scalar_int_mode, unsigned HOST_WIDE_INT, bool);
442 : : static rtx force_to_mode (rtx, machine_mode,
443 : : unsigned HOST_WIDE_INT, bool);
444 : : static rtx if_then_else_cond (rtx, rtx *, rtx *);
445 : : static rtx known_cond (rtx, enum rtx_code, rtx, rtx);
446 : : static bool rtx_equal_for_field_assignment_p (rtx, rtx, bool = false);
447 : : static rtx make_field_assignment (rtx);
448 : : static rtx apply_distributive_law (rtx);
449 : : static rtx distribute_and_simplify_rtx (rtx, int);
450 : : static rtx simplify_and_const_int_1 (scalar_int_mode, rtx,
451 : : unsigned HOST_WIDE_INT);
452 : : static rtx simplify_and_const_int (rtx, scalar_int_mode, rtx,
453 : : unsigned HOST_WIDE_INT);
454 : : static bool merge_outer_ops (enum rtx_code *, HOST_WIDE_INT *, enum rtx_code,
455 : : HOST_WIDE_INT, machine_mode, bool *);
456 : : static rtx simplify_shift_const_1 (enum rtx_code, machine_mode, rtx, int);
457 : : static rtx simplify_shift_const (rtx, enum rtx_code, machine_mode, rtx,
458 : : int);
459 : : static int recog_for_combine (rtx *, rtx_insn *, rtx *, unsigned = 0, unsigned = 0);
460 : : static rtx gen_lowpart_for_combine (machine_mode, rtx);
461 : : static rtx gen_lowpart_for_combine_no_emit (machine_mode, rtx);
462 : : static enum rtx_code simplify_compare_const (enum rtx_code, machine_mode,
463 : : rtx *, rtx *);
464 : : static enum rtx_code simplify_comparison (enum rtx_code, rtx *, rtx *);
465 : : static void update_table_tick (rtx);
466 : : static void record_value_for_reg (rtx, rtx_insn *, rtx);
467 : : static void check_promoted_subreg (rtx_insn *, rtx);
468 : : static void record_dead_and_set_regs_1 (rtx, const_rtx, void *);
469 : : static void record_dead_and_set_regs (rtx_insn *);
470 : : static bool get_last_value_validate (rtx *, rtx_insn *, int, bool);
471 : : static rtx get_last_value (const_rtx);
472 : : static void reg_dead_at_p_1 (rtx, const_rtx, void *);
473 : : static bool reg_dead_at_p (rtx, rtx_insn *);
474 : : static void move_deaths (rtx, rtx, int, rtx_insn *, rtx *);
475 : : static bool reg_bitfield_target_p (rtx, rtx);
476 : : static void distribute_notes (rtx, rtx_insn *, rtx_insn *, rtx_insn *,
477 : : rtx, rtx, rtx);
478 : : static void distribute_links (struct insn_link *, rtx_insn * = nullptr,
479 : : int limit = INT_MAX);
480 : : static void mark_used_regs_combine (rtx);
481 : : static void record_promoted_value (rtx_insn *, rtx);
482 : : static bool unmentioned_reg_p (rtx, rtx);
483 : : static void record_truncated_values (rtx *, void *);
484 : : static bool reg_truncated_to_mode (machine_mode, const_rtx);
485 : : static rtx gen_lowpart_or_truncate (machine_mode, rtx);
486 : :
487 : :
488 : : /* It is not safe to use ordinary gen_lowpart in combine.
489 : : See comments in gen_lowpart_for_combine. */
490 : : #undef RTL_HOOKS_GEN_LOWPART
491 : : #define RTL_HOOKS_GEN_LOWPART gen_lowpart_for_combine
492 : :
493 : : /* Our implementation of gen_lowpart never emits a new pseudo. */
494 : : #undef RTL_HOOKS_GEN_LOWPART_NO_EMIT
495 : : #define RTL_HOOKS_GEN_LOWPART_NO_EMIT gen_lowpart_for_combine_no_emit
496 : :
497 : : #undef RTL_HOOKS_REG_NONZERO_REG_BITS
498 : : #define RTL_HOOKS_REG_NONZERO_REG_BITS reg_nonzero_bits_for_combine
499 : :
500 : : #undef RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES
501 : : #define RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES reg_num_sign_bit_copies_for_combine
502 : :
503 : : #undef RTL_HOOKS_REG_TRUNCATED_TO_MODE
504 : : #define RTL_HOOKS_REG_TRUNCATED_TO_MODE reg_truncated_to_mode
505 : :
506 : : static const struct rtl_hooks combine_rtl_hooks = RTL_HOOKS_INITIALIZER;
507 : :
508 : :
509 : : /* Convenience wrapper for the canonicalize_comparison target hook.
510 : : Target hooks cannot use enum rtx_code. */
511 : : static inline void
512 : 24496444 : target_canonicalize_comparison (enum rtx_code *code, rtx *op0, rtx *op1,
513 : : bool op0_preserve_value)
514 : : {
515 : 24496444 : int code_int = (int)*code;
516 : 24496444 : targetm.canonicalize_comparison (&code_int, op0, op1, op0_preserve_value);
517 : 24496444 : *code = (enum rtx_code)code_int;
518 : 820217 : }
519 : :
520 : : /* Try to split PATTERN found in INSN. This returns NULL_RTX if
521 : : PATTERN cannot be split. Otherwise, it returns an insn sequence.
522 : : Updates OLD_NREGS with the max number of regs before the split
523 : : and NEW_NREGS after the split.
524 : : This is a wrapper around split_insns which ensures that the
525 : : reg_stat vector is made larger if the splitter creates a new
526 : : register. */
527 : :
528 : : static rtx_insn *
529 : 11709748 : combine_split_insns (rtx pattern, rtx_insn *insn,
530 : : unsigned int *old_nregs,
531 : : unsigned int *new_regs)
532 : : {
533 : 11709748 : rtx_insn *ret;
534 : 11709748 : unsigned int nregs;
535 : 11709748 : *old_nregs = max_reg_num ();
536 : 11709748 : ret = split_insns (pattern, insn);
537 : 11709748 : *new_regs = nregs = max_reg_num ();
538 : 23419496 : if (nregs > reg_stat.length ())
539 : 3200 : reg_stat.safe_grow_cleared (nregs, true);
540 : 11709748 : return ret;
541 : : }
542 : :
543 : : /* This is used by find_single_use to locate an rtx in LOC that
544 : : contains exactly one use of DEST, which is typically a REG.
545 : : It returns a pointer to the innermost rtx expression
546 : : containing DEST. Appearances of DEST that are being used to
547 : : totally replace it are not counted. */
548 : :
549 : : static rtx *
550 : 33182227 : find_single_use_1 (rtx dest, rtx *loc)
551 : : {
552 : 40147639 : rtx x = *loc;
553 : 40147639 : enum rtx_code code = GET_CODE (x);
554 : 40147639 : rtx *result = NULL;
555 : 40147639 : rtx *this_result;
556 : 40147639 : int i;
557 : 40147639 : const char *fmt;
558 : :
559 : 40147639 : switch (code)
560 : : {
561 : : case CONST:
562 : : case LABEL_REF:
563 : : case SYMBOL_REF:
564 : : CASE_CONST_ANY:
565 : : case CLOBBER:
566 : : return 0;
567 : :
568 : 6921221 : case SET:
569 : : /* If the destination is anything other than PC, a REG or a SUBREG
570 : : of a REG that occupies all of the REG, the insn uses DEST if
571 : : it is mentioned in the destination or the source. Otherwise, we
572 : : need just check the source. */
573 : 6921221 : if (GET_CODE (SET_DEST (x)) != PC
574 : 6921221 : && !REG_P (SET_DEST (x))
575 : 6922525 : && ! (GET_CODE (SET_DEST (x)) == SUBREG
576 : 1304 : && REG_P (SUBREG_REG (SET_DEST (x)))
577 : 1304 : && !read_modify_subreg_p (SET_DEST (x))))
578 : : break;
579 : :
580 : 6920098 : return find_single_use_1 (dest, &SET_SRC (x));
581 : :
582 : 45314 : case MEM:
583 : 45314 : case SUBREG:
584 : 45314 : return find_single_use_1 (dest, &XEXP (x, 0));
585 : :
586 : : default:
587 : : break;
588 : : }
589 : :
590 : : /* If it wasn't one of the common cases above, check each expression and
591 : : vector of this code. Look for a unique usage of DEST. */
592 : :
593 : 19983630 : fmt = GET_RTX_FORMAT (code);
594 : 53405191 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
595 : : {
596 : 33430775 : if (fmt[i] == 'e')
597 : : {
598 : 33083059 : if (dest == XEXP (x, i)
599 : 33083059 : || (REG_P (dest) && REG_P (XEXP (x, i))
600 : 819735 : && REGNO (dest) == REGNO (XEXP (x, i))))
601 : : this_result = loc;
602 : : else
603 : 26157178 : this_result = find_single_use_1 (dest, &XEXP (x, i));
604 : :
605 : 33083059 : if (result == NULL)
606 : : result = this_result;
607 : 43576 : else if (this_result)
608 : : /* Duplicate usage. */
609 : : return NULL;
610 : : }
611 : 347716 : else if (fmt[i] == 'E')
612 : : {
613 : 53326 : int j;
614 : :
615 : 162207 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
616 : : {
617 : 112858 : if (XVECEXP (x, i, j) == dest
618 : 112858 : || (REG_P (dest)
619 : 112858 : && REG_P (XVECEXP (x, i, j))
620 : 4544 : && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
621 : : this_result = loc;
622 : : else
623 : 112858 : this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
624 : :
625 : 112858 : if (result == NULL)
626 : : result = this_result;
627 : 18596 : else if (this_result)
628 : : return NULL;
629 : : }
630 : : }
631 : : }
632 : :
633 : : return result;
634 : : }
635 : :
636 : :
637 : : /* See if DEST, produced in INSN, is used only a single time in the
638 : : sequel. If so, return a pointer to the innermost rtx expression in which
639 : : it is used.
640 : :
641 : : If PLOC is nonzero, *PLOC is set to the insn containing the single use.
642 : :
643 : : Otherwise, we find the single use by finding an insn that has a
644 : : LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
645 : : only referenced once in that insn, we know that it must be the first
646 : : and last insn referencing DEST. */
647 : :
648 : : static rtx *
649 : 7493671 : find_single_use (rtx dest, rtx_insn *insn, rtx_insn **ploc)
650 : : {
651 : 7493671 : basic_block bb;
652 : 7493671 : rtx_insn *next;
653 : 7493671 : rtx *result;
654 : 7493671 : struct insn_link *link;
655 : :
656 : 7493671 : if (!REG_P (dest))
657 : : return 0;
658 : :
659 : 7493671 : bb = BLOCK_FOR_INSN (insn);
660 : 10226105 : for (next = NEXT_INSN (insn);
661 : 10226105 : next && BLOCK_FOR_INSN (next) == bb;
662 : 2732434 : next = NEXT_INSN (next))
663 : 9644625 : if (NONDEBUG_INSN_P (next) && dead_or_set_p (next, dest))
664 : : {
665 : 9191110 : FOR_EACH_LOG_LINK (link, next)
666 : 8182408 : if (link->insn == insn && link->regno == REGNO (dest))
667 : : break;
668 : :
669 : 7920893 : if (link)
670 : : {
671 : 6912191 : result = find_single_use_1 (dest, &PATTERN (next));
672 : 6912191 : if (ploc)
673 : 6912191 : *ploc = next;
674 : 6912191 : return result;
675 : : }
676 : : }
677 : :
678 : : return 0;
679 : : }
680 : :
681 : : /* Substitute NEWVAL, an rtx expression, into INTO, a place in some
682 : : insn. The substitution can be undone by undo_all. If INTO is already
683 : : set to NEWVAL, do not record this change. Because computing NEWVAL might
684 : : also call SUBST, we have to compute it before we put anything into
685 : : the undo table. */
686 : :
687 : : static void
688 : 847402704 : do_SUBST (rtx *into, rtx newval)
689 : : {
690 : 847402704 : struct undo *buf;
691 : 847402704 : rtx oldval = *into;
692 : :
693 : 847402704 : if (oldval == newval)
694 : : return;
695 : :
696 : : /* We'd like to catch as many invalid transformations here as
697 : : possible. Unfortunately, there are way too many mode changes
698 : : that are perfectly valid, so we'd waste too much effort for
699 : : little gain doing the checks here. Focus on catching invalid
700 : : transformations involving integer constants. */
701 : 95118556 : if (GET_MODE_CLASS (GET_MODE (oldval)) == MODE_INT
702 : 58017498 : && CONST_INT_P (newval))
703 : : {
704 : : /* Sanity check that we're replacing oldval with a CONST_INT
705 : : that is a valid sign-extension for the original mode. */
706 : 1790743 : gcc_assert (INTVAL (newval)
707 : : == trunc_int_for_mode (INTVAL (newval), GET_MODE (oldval)));
708 : :
709 : : /* Replacing the operand of a SUBREG or a ZERO_EXTEND with a
710 : : CONST_INT is not valid, because after the replacement, the
711 : : original mode would be gone. Unfortunately, we can't tell
712 : : when do_SUBST is called to replace the operand thereof, so we
713 : : perform this test on oldval instead, checking whether an
714 : : invalid replacement took place before we got here. */
715 : 1790743 : gcc_assert (!(GET_CODE (oldval) == SUBREG
716 : : && CONST_INT_P (SUBREG_REG (oldval))));
717 : 1790743 : gcc_assert (!(GET_CODE (oldval) == ZERO_EXTEND
718 : : && CONST_INT_P (XEXP (oldval, 0))));
719 : : }
720 : :
721 : 95118556 : if (undobuf.frees)
722 : 91039395 : buf = undobuf.frees, undobuf.frees = buf->next;
723 : : else
724 : 4079161 : buf = XNEW (struct undo);
725 : :
726 : 95118556 : buf->kind = UNDO_RTX;
727 : 95118556 : buf->where.r = into;
728 : 95118556 : buf->old_contents.r = oldval;
729 : 95118556 : *into = newval;
730 : :
731 : 95118556 : buf->next = undobuf.undos, undobuf.undos = buf;
732 : : }
733 : :
734 : : #define SUBST(INTO, NEWVAL) do_SUBST (&(INTO), (NEWVAL))
735 : :
736 : : /* Similar to SUBST, but NEWVAL is an int expression. Note that substitution
737 : : for the value of a HOST_WIDE_INT value (including CONST_INT) is
738 : : not safe. */
739 : :
740 : : static void
741 : 15869469 : do_SUBST_INT (int *into, int newval)
742 : : {
743 : 15869469 : struct undo *buf;
744 : 15869469 : int oldval = *into;
745 : :
746 : 15869469 : if (oldval == newval)
747 : : return;
748 : :
749 : 6722889 : if (undobuf.frees)
750 : 6212830 : buf = undobuf.frees, undobuf.frees = buf->next;
751 : : else
752 : 510059 : buf = XNEW (struct undo);
753 : :
754 : 6722889 : buf->kind = UNDO_INT;
755 : 6722889 : buf->where.i = into;
756 : 6722889 : buf->old_contents.i = oldval;
757 : 6722889 : *into = newval;
758 : :
759 : 6722889 : buf->next = undobuf.undos, undobuf.undos = buf;
760 : : }
761 : :
762 : : #define SUBST_INT(INTO, NEWVAL) do_SUBST_INT (&(INTO), (NEWVAL))
763 : :
764 : : /* Similar to SUBST, but just substitute the mode. This is used when
765 : : changing the mode of a pseudo-register, so that any other
766 : : references to the entry in the regno_reg_rtx array will change as
767 : : well. */
768 : :
769 : : static void
770 : 1423247 : subst_mode (int regno, machine_mode newval)
771 : : {
772 : 1423247 : struct undo *buf;
773 : 1423247 : rtx reg = regno_reg_rtx[regno];
774 : 1423247 : machine_mode oldval = GET_MODE (reg);
775 : :
776 : 1423247 : if (oldval == newval)
777 : : return;
778 : :
779 : 1423247 : if (undobuf.frees)
780 : 1348114 : buf = undobuf.frees, undobuf.frees = buf->next;
781 : : else
782 : 75133 : buf = XNEW (struct undo);
783 : :
784 : 1423247 : buf->kind = UNDO_MODE;
785 : 1423247 : buf->where.regno = regno;
786 : 1423247 : buf->old_contents.m = oldval;
787 : 1423247 : adjust_reg_mode (reg, newval);
788 : :
789 : 1423247 : buf->next = undobuf.undos, undobuf.undos = buf;
790 : : }
791 : :
792 : : /* Similar to SUBST, but NEWVAL is a LOG_LINKS expression. */
793 : :
794 : : static void
795 : 73292 : do_SUBST_LINK (struct insn_link **into, struct insn_link *newval)
796 : : {
797 : 73292 : struct undo *buf;
798 : 73292 : struct insn_link * oldval = *into;
799 : :
800 : 73292 : if (oldval == newval)
801 : : return;
802 : :
803 : 73292 : if (undobuf.frees)
804 : 70265 : buf = undobuf.frees, undobuf.frees = buf->next;
805 : : else
806 : 3027 : buf = XNEW (struct undo);
807 : :
808 : 73292 : buf->kind = UNDO_LINKS;
809 : 73292 : buf->where.l = into;
810 : 73292 : buf->old_contents.l = oldval;
811 : 73292 : *into = newval;
812 : :
813 : 73292 : buf->next = undobuf.undos, undobuf.undos = buf;
814 : : }
815 : :
816 : : #define SUBST_LINK(oldval, newval) do_SUBST_LINK (&oldval, newval)
817 : :
818 : : /* Subroutine of try_combine. Determine whether the replacement patterns
819 : : NEWPAT, NEWI2PAT and NEWOTHERPAT are more expensive according to insn_cost
820 : : than the original sequence I0, I1, I2, I3 and undobuf.other_insn. Note
821 : : that I0, I1 and/or NEWI2PAT may be NULL_RTX. Similarly, NEWOTHERPAT and
822 : : undobuf.other_insn may also both be NULL_RTX. Return false if the cost
823 : : of all the instructions can be estimated and the replacements are more
824 : : expensive than the original sequence. */
825 : :
826 : : static bool
827 : 4242536 : combine_validate_cost (rtx_insn *i0, rtx_insn *i1, rtx_insn *i2, rtx_insn *i3,
828 : : rtx newpat, rtx newi2pat, rtx newotherpat)
829 : : {
830 : 4242536 : int i0_cost, i1_cost, i2_cost, i3_cost;
831 : 4242536 : int new_i2_cost, new_i3_cost;
832 : 4242536 : int old_cost, new_cost;
833 : :
834 : : /* Lookup the original insn_costs. */
835 : 4242536 : i2_cost = INSN_COST (i2);
836 : 4242536 : i3_cost = INSN_COST (i3);
837 : :
838 : 4242536 : if (i1)
839 : : {
840 : 117271 : i1_cost = INSN_COST (i1);
841 : 117271 : if (i0)
842 : : {
843 : 5140 : i0_cost = INSN_COST (i0);
844 : 5020 : old_cost = (i0_cost > 0 && i1_cost > 0 && i2_cost > 0 && i3_cost > 0
845 : 10148 : ? i0_cost + i1_cost + i2_cost + i3_cost : 0);
846 : : }
847 : : else
848 : : {
849 : 107497 : old_cost = (i1_cost > 0 && i2_cost > 0 && i3_cost > 0
850 : 219626 : ? i1_cost + i2_cost + i3_cost : 0);
851 : : i0_cost = 0;
852 : : }
853 : : }
854 : : else
855 : : {
856 : 4125265 : old_cost = (i2_cost > 0 && i3_cost > 0) ? i2_cost + i3_cost : 0;
857 : : i1_cost = i0_cost = 0;
858 : : }
859 : :
860 : : /* If we have split a PARALLEL I2 to I1,I2, we have counted its cost twice;
861 : : correct that. */
862 : 4242536 : if (old_cost && i1 && INSN_UID (i1) == INSN_UID (i2))
863 : 2391 : old_cost -= i1_cost;
864 : :
865 : :
866 : : /* Calculate the replacement insn_costs. */
867 : 4242536 : rtx tmp = PATTERN (i3);
868 : 4242536 : PATTERN (i3) = newpat;
869 : 4242536 : int tmpi = INSN_CODE (i3);
870 : 4242536 : INSN_CODE (i3) = -1;
871 : 4242536 : new_i3_cost = insn_cost (i3, optimize_this_for_speed_p);
872 : 4242536 : PATTERN (i3) = tmp;
873 : 4242536 : INSN_CODE (i3) = tmpi;
874 : 4242536 : if (newi2pat)
875 : : {
876 : 210085 : tmp = PATTERN (i2);
877 : 210085 : PATTERN (i2) = newi2pat;
878 : 210085 : tmpi = INSN_CODE (i2);
879 : 210085 : INSN_CODE (i2) = -1;
880 : 210085 : new_i2_cost = insn_cost (i2, optimize_this_for_speed_p);
881 : 210085 : PATTERN (i2) = tmp;
882 : 210085 : INSN_CODE (i2) = tmpi;
883 : 210085 : new_cost = (new_i2_cost > 0 && new_i3_cost > 0)
884 : 210085 : ? new_i2_cost + new_i3_cost : 0;
885 : : }
886 : : else
887 : : {
888 : : new_cost = new_i3_cost;
889 : : new_i2_cost = 0;
890 : : }
891 : :
892 : 4242536 : if (undobuf.other_insn)
893 : : {
894 : 203278 : int old_other_cost, new_other_cost;
895 : :
896 : 203278 : old_other_cost = INSN_COST (undobuf.other_insn);
897 : 203278 : tmp = PATTERN (undobuf.other_insn);
898 : 203278 : PATTERN (undobuf.other_insn) = newotherpat;
899 : 203278 : tmpi = INSN_CODE (undobuf.other_insn);
900 : 203278 : INSN_CODE (undobuf.other_insn) = -1;
901 : 203278 : new_other_cost = insn_cost (undobuf.other_insn,
902 : : optimize_this_for_speed_p);
903 : 203278 : PATTERN (undobuf.other_insn) = tmp;
904 : 203278 : INSN_CODE (undobuf.other_insn) = tmpi;
905 : 203278 : if (old_other_cost > 0 && new_other_cost > 0)
906 : : {
907 : 203278 : old_cost += old_other_cost;
908 : 203278 : new_cost += new_other_cost;
909 : : }
910 : : else
911 : : old_cost = 0;
912 : : }
913 : :
914 : : /* Disallow this combination if both new_cost and old_cost are greater than
915 : : zero, and new_cost is greater than old cost. */
916 : 4242536 : bool reject = old_cost > 0 && new_cost > old_cost;
917 : :
918 : 4242536 : if (dump_file)
919 : : {
920 : 484 : fprintf (dump_file, "%s combination of insns ",
921 : : reject ? "rejecting" : "allowing");
922 : 244 : if (i0)
923 : 0 : fprintf (dump_file, "%d, ", INSN_UID (i0));
924 : 244 : if (i1 && INSN_UID (i1) != INSN_UID (i2))
925 : 1 : fprintf (dump_file, "%d, ", INSN_UID (i1));
926 : 244 : fprintf (dump_file, "%d and %d\n", INSN_UID (i2), INSN_UID (i3));
927 : :
928 : 244 : fprintf (dump_file, "original costs ");
929 : 244 : if (i0)
930 : 0 : fprintf (dump_file, "%d + ", i0_cost);
931 : 244 : if (i1 && INSN_UID (i1) != INSN_UID (i2))
932 : 1 : fprintf (dump_file, "%d + ", i1_cost);
933 : 244 : fprintf (dump_file, "%d + %d = %d\n", i2_cost, i3_cost, old_cost);
934 : :
935 : 244 : if (newi2pat)
936 : 19 : fprintf (dump_file, "replacement costs %d + %d = %d\n",
937 : : new_i2_cost, new_i3_cost, new_cost);
938 : : else
939 : 225 : fprintf (dump_file, "replacement cost %d\n", new_cost);
940 : : }
941 : :
942 : 4242536 : if (reject)
943 : : return false;
944 : :
945 : : /* Update the uid_insn_cost array with the replacement costs. */
946 : 4034730 : INSN_COST (i2) = new_i2_cost;
947 : 4034730 : INSN_COST (i3) = new_i3_cost;
948 : 4034730 : if (i1)
949 : : {
950 : 99548 : INSN_COST (i1) = 0;
951 : 99548 : if (i0)
952 : 5014 : INSN_COST (i0) = 0;
953 : : }
954 : :
955 : : return true;
956 : : }
957 : :
958 : :
959 : : /* Delete any insns that copy a register to itself.
960 : : Return true if the CFG was changed. */
961 : :
962 : : static bool
963 : 1000744 : delete_noop_moves (void)
964 : : {
965 : 1000744 : rtx_insn *insn, *next;
966 : 1000744 : basic_block bb;
967 : :
968 : 1000744 : bool edges_deleted = false;
969 : :
970 : 11392642 : FOR_EACH_BB_FN (bb, cfun)
971 : : {
972 : 137314551 : for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb)); insn = next)
973 : : {
974 : 126922653 : next = NEXT_INSN (insn);
975 : 126922653 : if (INSN_P (insn) && noop_move_p (insn))
976 : : {
977 : 6852 : if (dump_file)
978 : 0 : fprintf (dump_file, "deleting noop move %d\n", INSN_UID (insn));
979 : :
980 : 6852 : edges_deleted |= delete_insn_and_edges (insn);
981 : : }
982 : : }
983 : : }
984 : :
985 : 1000744 : return edges_deleted;
986 : : }
987 : :
988 : :
989 : : /* Return false if we do not want to (or cannot) combine DEF. */
990 : : static bool
991 : 41807129 : can_combine_def_p (df_ref def)
992 : : {
993 : : /* Do not consider if it is pre/post modification in MEM. */
994 : 41807129 : if (DF_REF_FLAGS (def) & DF_REF_PRE_POST_MODIFY)
995 : : return false;
996 : :
997 : 40144350 : unsigned int regno = DF_REF_REGNO (def);
998 : :
999 : : /* Do not combine frame pointer adjustments. */
1000 : 40144350 : if ((regno == FRAME_POINTER_REGNUM
1001 : 0 : && (!reload_completed || frame_pointer_needed))
1002 : 2062 : || (!HARD_FRAME_POINTER_IS_FRAME_POINTER
1003 : 40144350 : && regno == HARD_FRAME_POINTER_REGNUM
1004 : : && (!reload_completed || frame_pointer_needed))
1005 : 40142288 : || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1006 : 0 : && regno == ARG_POINTER_REGNUM && fixed_regs[regno]))
1007 : 2062 : return false;
1008 : :
1009 : : return true;
1010 : : }
1011 : :
1012 : : /* Return false if we do not want to (or cannot) combine USE. */
1013 : : static bool
1014 : 77602564 : can_combine_use_p (df_ref use)
1015 : : {
1016 : : /* Do not consider the usage of the stack pointer by function call. */
1017 : 0 : if (DF_REF_FLAGS (use) & DF_REF_CALL_STACK_USAGE)
1018 : 0 : return false;
1019 : :
1020 : : return true;
1021 : : }
1022 : :
1023 : : /* Fill in log links field for all insns. */
1024 : :
1025 : : static void
1026 : 1000744 : create_log_links (void)
1027 : : {
1028 : 1000744 : basic_block bb;
1029 : 1000744 : rtx_insn **next_use;
1030 : 1000744 : rtx_insn *insn;
1031 : 1000744 : df_ref def, use;
1032 : :
1033 : 1000744 : next_use = XCNEWVEC (rtx_insn *, max_reg_num ());
1034 : :
1035 : : /* Pass through each block from the end, recording the uses of each
1036 : : register and establishing log links when def is encountered.
1037 : : Note that we do not clear next_use array in order to save time,
1038 : : so we have to test whether the use is in the same basic block as def.
1039 : :
1040 : : There are a few cases below when we do not consider the definition or
1041 : : usage -- these are taken from original flow.c did. Don't ask me why it is
1042 : : done this way; I don't know and if it works, I don't want to know. */
1043 : :
1044 : 11392642 : FOR_EACH_BB_FN (bb, cfun)
1045 : : {
1046 : 137298708 : FOR_BB_INSNS_REVERSE (bb, insn)
1047 : : {
1048 : 126906810 : if (!NONDEBUG_INSN_P (insn))
1049 : 65199795 : continue;
1050 : :
1051 : : /* Log links are created only once. */
1052 : 61707015 : gcc_assert (!LOG_LINKS (insn));
1053 : :
1054 : 494406524 : FOR_EACH_INSN_DEF (def, insn)
1055 : : {
1056 : 432699509 : unsigned int regno = DF_REF_REGNO (def);
1057 : 432699509 : rtx_insn *use_insn;
1058 : :
1059 : 432699509 : if (!next_use[regno])
1060 : 390892380 : continue;
1061 : :
1062 : 41807129 : if (!can_combine_def_p (def))
1063 : 1664841 : continue;
1064 : :
1065 : 40142288 : use_insn = next_use[regno];
1066 : 40142288 : next_use[regno] = NULL;
1067 : :
1068 : 40142288 : if (BLOCK_FOR_INSN (use_insn) != bb)
1069 : 2249703 : continue;
1070 : :
1071 : : /* flow.c claimed:
1072 : :
1073 : : We don't build a LOG_LINK for hard registers contained
1074 : : in ASM_OPERANDs. If these registers get replaced,
1075 : : we might wind up changing the semantics of the insn,
1076 : : even if reload can make what appear to be valid
1077 : : assignments later. */
1078 : 37893437 : if (regno < FIRST_PSEUDO_REGISTER
1079 : 37892585 : && asm_noperands (PATTERN (use_insn)) >= 0)
1080 : 852 : continue;
1081 : :
1082 : : /* Don't add duplicate links between instructions. */
1083 : 37891733 : struct insn_link *links;
1084 : 50667296 : FOR_EACH_LOG_LINK (links, use_insn)
1085 : 12775563 : if (insn == links->insn && regno == links->regno)
1086 : : break;
1087 : :
1088 : 37891733 : if (!links)
1089 : 37891733 : LOG_LINKS (use_insn)
1090 : 75783466 : = alloc_insn_link (insn, regno, LOG_LINKS (use_insn));
1091 : : }
1092 : :
1093 : 139309579 : FOR_EACH_INSN_USE (use, insn)
1094 : 150560809 : if (can_combine_use_p (use))
1095 : 72958245 : next_use[DF_REF_REGNO (use)] = insn;
1096 : : }
1097 : : }
1098 : :
1099 : 1000744 : free (next_use);
1100 : 1000744 : }
1101 : :
1102 : : /* Walk the LOG_LINKS of insn B to see if we find a reference to A. Return
1103 : : true if we found a LOG_LINK that proves that A feeds B. This only works
1104 : : if there are no instructions between A and B which could have a link
1105 : : depending on A, since in that case we would not record a link for B. */
1106 : :
1107 : : static bool
1108 : 12853992 : insn_a_feeds_b (rtx_insn *a, rtx_insn *b)
1109 : : {
1110 : 12853992 : struct insn_link *links;
1111 : 16242039 : FOR_EACH_LOG_LINK (links, b)
1112 : 13694353 : if (links->insn == a)
1113 : : return true;
1114 : : return false;
1115 : : }
1116 : :
1117 : : /* Main entry point for combiner. F is the first insn of the function.
1118 : : NREGS is the first unused pseudo-reg number.
1119 : :
1120 : : Return nonzero if the CFG was changed (e.g. if the combiner has
1121 : : turned an indirect jump instruction into a direct jump). */
1122 : : static bool
1123 : 1044865 : combine_instructions (rtx_insn *f, unsigned int nregs)
1124 : : {
1125 : 1044865 : rtx_insn *insn, *next;
1126 : 1044865 : struct insn_link *links, *nextlinks;
1127 : 1044865 : rtx_insn *first;
1128 : 1044865 : basic_block last_bb;
1129 : :
1130 : 1044865 : bool new_direct_jump_p = false;
1131 : :
1132 : 3095604 : for (first = f; first && !NONDEBUG_INSN_P (first); )
1133 : 2050739 : first = NEXT_INSN (first);
1134 : 1044865 : if (!first)
1135 : : return false;
1136 : :
1137 : 1000744 : combine_attempts = 0;
1138 : 1000744 : combine_merges = 0;
1139 : 1000744 : combine_extras = 0;
1140 : 1000744 : combine_successes = 0;
1141 : :
1142 : 1000744 : rtl_hooks = combine_rtl_hooks;
1143 : :
1144 : 1000744 : reg_stat.safe_grow_cleared (nregs, true);
1145 : :
1146 : 1000744 : init_recog_no_volatile ();
1147 : :
1148 : : /* Allocate array for insn info. */
1149 : 1000744 : max_uid_known = get_max_uid ();
1150 : 1000744 : uid_log_links = XCNEWVEC (struct insn_link *, max_uid_known + 1);
1151 : 1000744 : uid_insn_cost = XCNEWVEC (int, max_uid_known + 1);
1152 : 1000744 : gcc_obstack_init (&insn_link_obstack);
1153 : :
1154 : 1000744 : nonzero_bits_mode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, 0).require ();
1155 : :
1156 : : /* Don't use reg_stat[].nonzero_bits when computing it. This can cause
1157 : : problems when, for example, we have j <<= 1 in a loop. */
1158 : :
1159 : 1000744 : nonzero_sign_valid = 0;
1160 : 1000744 : label_tick = label_tick_ebb_start = 1;
1161 : :
1162 : : /* Scan all SETs and see if we can deduce anything about what
1163 : : bits are known to be zero for some registers and how many copies
1164 : : of the sign bit are known to exist for those registers.
1165 : :
1166 : : Also set any known values so that we can use it while searching
1167 : : for what bits are known to be set. */
1168 : :
1169 : 1000744 : setup_incoming_promotions (first);
1170 : : /* Allow the entry block and the first block to fall into the same EBB.
1171 : : Conceptually the incoming promotions are assigned to the entry block. */
1172 : 1000744 : last_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1173 : :
1174 : 1000744 : create_log_links ();
1175 : 11392642 : FOR_EACH_BB_FN (this_basic_block, cfun)
1176 : : {
1177 : 10391898 : optimize_this_for_speed_p = optimize_bb_for_speed_p (this_basic_block);
1178 : 10391898 : last_call_luid = 0;
1179 : 10391898 : mem_last_set = -1;
1180 : :
1181 : 10391898 : label_tick++;
1182 : 10391898 : if (!single_pred_p (this_basic_block)
1183 : 10391898 : || single_pred (this_basic_block) != last_bb)
1184 : 4993397 : label_tick_ebb_start = label_tick;
1185 : 10391898 : last_bb = this_basic_block;
1186 : :
1187 : 137298708 : FOR_BB_INSNS (this_basic_block, insn)
1188 : 126906810 : if (INSN_P (insn) && BLOCK_FOR_INSN (insn))
1189 : : {
1190 : 110431052 : rtx links;
1191 : :
1192 : 110431052 : subst_low_luid = DF_INSN_LUID (insn);
1193 : 110431052 : subst_insn = insn;
1194 : :
1195 : 110431052 : note_stores (insn, set_nonzero_bits_and_sign_copies, insn);
1196 : 110431052 : record_dead_and_set_regs (insn);
1197 : :
1198 : 110431052 : if (AUTO_INC_DEC)
1199 : : for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
1200 : : if (REG_NOTE_KIND (links) == REG_INC)
1201 : : set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX,
1202 : : insn);
1203 : :
1204 : : /* Record the current insn_cost of this instruction. */
1205 : 110431052 : INSN_COST (insn) = insn_cost (insn, optimize_this_for_speed_p);
1206 : 110431052 : if (dump_file)
1207 : : {
1208 : 1695 : fprintf (dump_file, "insn_cost %d for ", INSN_COST (insn));
1209 : 1695 : dump_insn_slim (dump_file, insn);
1210 : : }
1211 : : }
1212 : : }
1213 : :
1214 : 1000744 : nonzero_sign_valid = 1;
1215 : :
1216 : : /* Now scan all the insns in forward order. */
1217 : 1000744 : label_tick = label_tick_ebb_start = 1;
1218 : 1000744 : init_reg_last ();
1219 : 1000744 : setup_incoming_promotions (first);
1220 : 1000744 : last_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1221 : 1000744 : int max_combine = param_max_combine_insns;
1222 : :
1223 : 11392642 : FOR_EACH_BB_FN (this_basic_block, cfun)
1224 : : {
1225 : 10391898 : rtx_insn *last_combined_insn = NULL;
1226 : :
1227 : : /* Ignore instruction combination in basic blocks that are going to
1228 : : be removed as unreachable anyway. See PR82386. */
1229 : 10391898 : if (EDGE_COUNT (this_basic_block->preds) == 0)
1230 : 1576 : continue;
1231 : :
1232 : 10390322 : optimize_this_for_speed_p = optimize_bb_for_speed_p (this_basic_block);
1233 : 10390322 : last_call_luid = 0;
1234 : 10390322 : mem_last_set = -1;
1235 : :
1236 : 10390322 : label_tick++;
1237 : 10390322 : if (!single_pred_p (this_basic_block)
1238 : 10390322 : || single_pred (this_basic_block) != last_bb)
1239 : 4993057 : label_tick_ebb_start = label_tick;
1240 : 10390322 : last_bb = this_basic_block;
1241 : :
1242 : 10390322 : rtl_profile_for_bb (this_basic_block);
1243 : 10390322 : for (insn = BB_HEAD (this_basic_block);
1244 : 141889034 : insn != NEXT_INSN (BB_END (this_basic_block));
1245 : 127463982 : insn = next ? next : NEXT_INSN (insn))
1246 : : {
1247 : 131498712 : next = 0;
1248 : 131498712 : if (!NONDEBUG_INSN_P (insn))
1249 : 65455705 : continue;
1250 : :
1251 : : while (last_combined_insn
1252 : 66044786 : && (!NONDEBUG_INSN_P (last_combined_insn)
1253 : 55840221 : || last_combined_insn->deleted ()))
1254 : 1779 : last_combined_insn = PREV_INSN (last_combined_insn);
1255 : 66043007 : if (last_combined_insn == NULL_RTX
1256 : 55839658 : || BLOCK_FOR_INSN (last_combined_insn) != this_basic_block
1257 : 121882468 : || DF_INSN_LUID (last_combined_insn) <= DF_INSN_LUID (insn))
1258 : : last_combined_insn = insn;
1259 : :
1260 : : /* See if we know about function return values before this
1261 : : insn based upon SUBREG flags. */
1262 : 66043007 : check_promoted_subreg (insn, PATTERN (insn));
1263 : :
1264 : : /* See if we can find hardregs and subreg of pseudos in
1265 : : narrower modes. This could help turning TRUNCATEs
1266 : : into SUBREGs. */
1267 : 66043007 : note_uses (&PATTERN (insn), record_truncated_values, NULL);
1268 : :
1269 : : /* Try this insn with each insn it links back to. */
1270 : :
1271 : 102971979 : FOR_EACH_LOG_LINK (links, insn)
1272 : 40834239 : if ((next = try_combine (insn, links->insn, NULL,
1273 : : NULL, &new_direct_jump_p,
1274 : : last_combined_insn)) != 0)
1275 : : {
1276 : 3905267 : statistics_counter_event (cfun, "two-insn combine", 1);
1277 : 3905267 : goto retry;
1278 : : }
1279 : :
1280 : : /* Try each sequence of three linked insns ending with this one. */
1281 : :
1282 : 62137740 : if (max_combine >= 3)
1283 : 98488606 : FOR_EACH_LOG_LINK (links, insn)
1284 : : {
1285 : 36535896 : rtx_insn *link = links->insn;
1286 : :
1287 : : /* If the linked insn has been replaced by a note, then there
1288 : : is no point in pursuing this chain any further. */
1289 : 36535896 : if (NOTE_P (link))
1290 : 238 : continue;
1291 : :
1292 : 54417428 : FOR_EACH_LOG_LINK (nextlinks, link)
1293 : 17959244 : if ((next = try_combine (insn, link, nextlinks->insn,
1294 : : NULL, &new_direct_jump_p,
1295 : : last_combined_insn)) != 0)
1296 : : {
1297 : 77474 : statistics_counter_event (cfun, "three-insn combine", 1);
1298 : 77474 : goto retry;
1299 : : }
1300 : : }
1301 : :
1302 : : /* Try combining an insn with two different insns whose results it
1303 : : uses. */
1304 : 61952710 : if (max_combine >= 3)
1305 : 98376126 : FOR_EACH_LOG_LINK (links, insn)
1306 : 48898305 : for (nextlinks = links->next; nextlinks;
1307 : 12460453 : nextlinks = nextlinks->next)
1308 : 12474889 : if ((next = try_combine (insn, links->insn,
1309 : : nextlinks->insn, NULL,
1310 : : &new_direct_jump_p,
1311 : : last_combined_insn)) != 0)
1312 : :
1313 : : {
1314 : 14436 : statistics_counter_event (cfun, "three-insn combine", 1);
1315 : 14436 : goto retry;
1316 : : }
1317 : :
1318 : : /* Try four-instruction combinations. */
1319 : 61938274 : if (max_combine >= 4)
1320 : 98352947 : FOR_EACH_LOG_LINK (links, insn)
1321 : : {
1322 : 36419643 : struct insn_link *next1;
1323 : 36419643 : rtx_insn *link = links->insn;
1324 : :
1325 : : /* If the linked insn has been replaced by a note, then there
1326 : : is no point in pursuing this chain any further. */
1327 : 36419643 : if (NOTE_P (link))
1328 : 238 : continue;
1329 : :
1330 : 54279579 : FOR_EACH_LOG_LINK (next1, link)
1331 : : {
1332 : 17861476 : rtx_insn *link1 = next1->insn;
1333 : 17861476 : if (NOTE_P (link1))
1334 : 77 : continue;
1335 : : /* I0 -> I1 -> I2 -> I3. */
1336 : 29160019 : FOR_EACH_LOG_LINK (nextlinks, link1)
1337 : 11299699 : if ((next = try_combine (insn, link, link1,
1338 : : nextlinks->insn,
1339 : : &new_direct_jump_p,
1340 : : last_combined_insn)) != 0)
1341 : : {
1342 : 1079 : statistics_counter_event (cfun, "four-insn combine", 1);
1343 : 1079 : goto retry;
1344 : : }
1345 : : /* I0, I1 -> I2, I2 -> I3. */
1346 : 21860511 : for (nextlinks = next1->next; nextlinks;
1347 : 4000191 : nextlinks = nextlinks->next)
1348 : 4000414 : if ((next = try_combine (insn, link, link1,
1349 : : nextlinks->insn,
1350 : : &new_direct_jump_p,
1351 : : last_combined_insn)) != 0)
1352 : : {
1353 : 223 : statistics_counter_event (cfun, "four-insn combine", 1);
1354 : 223 : goto retry;
1355 : : }
1356 : : }
1357 : :
1358 : 48874531 : for (next1 = links->next; next1; next1 = next1->next)
1359 : : {
1360 : 12460017 : rtx_insn *link1 = next1->insn;
1361 : 12460017 : if (NOTE_P (link1))
1362 : 8 : continue;
1363 : : /* I0 -> I2; I1, I2 -> I3. */
1364 : 15767950 : FOR_EACH_LOG_LINK (nextlinks, link)
1365 : 3311352 : if ((next = try_combine (insn, link, link1,
1366 : : nextlinks->insn,
1367 : : &new_direct_jump_p,
1368 : : last_combined_insn)) != 0)
1369 : : {
1370 : 3411 : statistics_counter_event (cfun, "four-insn combine", 1);
1371 : 3411 : goto retry;
1372 : : }
1373 : : /* I0 -> I1; I1, I2 -> I3. */
1374 : 15944377 : FOR_EACH_LOG_LINK (nextlinks, link1)
1375 : 3487957 : if ((next = try_combine (insn, link, link1,
1376 : : nextlinks->insn,
1377 : : &new_direct_jump_p,
1378 : : last_combined_insn)) != 0)
1379 : : {
1380 : 178 : statistics_counter_event (cfun, "four-insn combine", 1);
1381 : 178 : goto retry;
1382 : : }
1383 : : }
1384 : : }
1385 : :
1386 : : /* Try this insn with each REG_EQUAL note it links back to. */
1387 : 98478488 : FOR_EACH_LOG_LINK (links, insn)
1388 : : {
1389 : 36470211 : rtx set, note;
1390 : 36470211 : rtx_insn *temp = links->insn;
1391 : 36470211 : if ((set = single_set (temp)) != 0
1392 : 36079396 : && (note = find_reg_equal_equiv_note (temp)) != 0
1393 : 2552337 : && (note = XEXP (note, 0), GET_CODE (note)) != EXPR_LIST
1394 : 2552337 : && ! side_effects_p (SET_SRC (set))
1395 : : /* Avoid using a register that may already been marked
1396 : : dead by an earlier instruction. */
1397 : 2552337 : && ! unmentioned_reg_p (note, SET_SRC (set))
1398 : 37700041 : && (GET_MODE (note) == VOIDmode
1399 : 27408 : ? SCALAR_INT_MODE_P (GET_MODE (SET_DEST (set)))
1400 : 1202422 : : (GET_MODE (SET_DEST (set)) == GET_MODE (note)
1401 : 1202395 : && (GET_CODE (SET_DEST (set)) != ZERO_EXTRACT
1402 : 0 : || (GET_MODE (XEXP (SET_DEST (set), 0))
1403 : : == GET_MODE (note))))))
1404 : : {
1405 : : /* Temporarily replace the set's source with the
1406 : : contents of the REG_EQUAL note. The insn will
1407 : : be deleted or recognized by try_combine. */
1408 : 1229786 : rtx orig_src = SET_SRC (set);
1409 : 1229786 : rtx orig_dest = SET_DEST (set);
1410 : 1229786 : if (GET_CODE (SET_DEST (set)) == ZERO_EXTRACT)
1411 : 0 : SET_DEST (set) = XEXP (SET_DEST (set), 0);
1412 : 1229786 : SET_SRC (set) = note;
1413 : 1229786 : i2mod = temp;
1414 : 1229786 : i2mod_old_rhs = copy_rtx (orig_src);
1415 : 1229786 : i2mod_new_rhs = copy_rtx (note);
1416 : 1229786 : next = try_combine (insn, i2mod, NULL, NULL,
1417 : : &new_direct_jump_p,
1418 : : last_combined_insn);
1419 : 1229786 : i2mod = NULL;
1420 : 1229786 : if (next)
1421 : : {
1422 : 32662 : statistics_counter_event (cfun, "insn-with-note combine", 1);
1423 : 32662 : goto retry;
1424 : : }
1425 : 1197124 : INSN_CODE (temp) = -1;
1426 : 1197124 : SET_SRC (set) = orig_src;
1427 : 1197124 : SET_DEST (set) = orig_dest;
1428 : : }
1429 : : }
1430 : :
1431 : 62008277 : if (!NOTE_P (insn))
1432 : 62008277 : record_dead_and_set_regs (insn);
1433 : :
1434 : 131498712 : retry:
1435 : 131498712 : ;
1436 : : }
1437 : : }
1438 : :
1439 : 1000744 : default_rtl_profile ();
1440 : 1000744 : clear_bb_flags ();
1441 : :
1442 : 1000744 : if (purge_all_dead_edges ())
1443 : 1444 : new_direct_jump_p = true;
1444 : 1000744 : if (delete_noop_moves ())
1445 : 0 : new_direct_jump_p = true;
1446 : :
1447 : : /* Clean up. */
1448 : 1000744 : obstack_free (&insn_link_obstack, NULL);
1449 : 1000744 : free (uid_log_links);
1450 : 1000744 : free (uid_insn_cost);
1451 : 1000744 : reg_stat.release ();
1452 : :
1453 : 1000744 : {
1454 : 1000744 : struct undo *undo, *next;
1455 : 5668124 : for (undo = undobuf.frees; undo; undo = next)
1456 : : {
1457 : 4667380 : next = undo->next;
1458 : 4667380 : free (undo);
1459 : : }
1460 : 1000744 : undobuf.frees = 0;
1461 : : }
1462 : :
1463 : 1000744 : statistics_counter_event (cfun, "attempts", combine_attempts);
1464 : 1000744 : statistics_counter_event (cfun, "merges", combine_merges);
1465 : 1000744 : statistics_counter_event (cfun, "extras", combine_extras);
1466 : 1000744 : statistics_counter_event (cfun, "successes", combine_successes);
1467 : :
1468 : 1000744 : nonzero_sign_valid = 0;
1469 : 1000744 : rtl_hooks = general_rtl_hooks;
1470 : :
1471 : : /* Make recognizer allow volatile MEMs again. */
1472 : 1000744 : init_recog ();
1473 : :
1474 : 1000744 : return new_direct_jump_p;
1475 : : }
1476 : :
1477 : : /* Wipe the last_xxx fields of reg_stat in preparation for another pass. */
1478 : :
1479 : : static void
1480 : 1000744 : init_reg_last (void)
1481 : : {
1482 : 1000744 : unsigned int i;
1483 : 1000744 : reg_stat_type *p;
1484 : :
1485 : 142160555 : FOR_EACH_VEC_ELT (reg_stat, i, p)
1486 : 141159811 : memset (p, 0, offsetof (reg_stat_type, sign_bit_copies));
1487 : 1000744 : }
1488 : :
1489 : : /* Set up any promoted values for incoming argument registers. */
1490 : :
1491 : : static void
1492 : 2001488 : setup_incoming_promotions (rtx_insn *first)
1493 : : {
1494 : 2001488 : tree arg;
1495 : 2001488 : bool strictly_local = false;
1496 : :
1497 : 5420760 : for (arg = DECL_ARGUMENTS (current_function_decl); arg;
1498 : 3419272 : arg = DECL_CHAIN (arg))
1499 : : {
1500 : 3419272 : rtx x, reg = DECL_INCOMING_RTL (arg);
1501 : 3419272 : int uns1, uns3;
1502 : 3419272 : machine_mode mode1, mode2, mode3, mode4;
1503 : :
1504 : : /* Only continue if the incoming argument is in a register. */
1505 : 3419272 : if (!REG_P (reg))
1506 : 3419178 : continue;
1507 : :
1508 : : /* Determine, if possible, whether all call sites of the current
1509 : : function lie within the current compilation unit. (This does
1510 : : take into account the exporting of a function via taking its
1511 : : address, and so forth.) */
1512 : 2685916 : strictly_local
1513 : 2685916 : = cgraph_node::local_info_node (current_function_decl)->local;
1514 : :
1515 : : /* The mode and signedness of the argument before any promotions happen
1516 : : (equal to the mode of the pseudo holding it at that stage). */
1517 : 2685916 : mode1 = TYPE_MODE (TREE_TYPE (arg));
1518 : 2685916 : uns1 = TYPE_UNSIGNED (TREE_TYPE (arg));
1519 : :
1520 : : /* The mode and signedness of the argument after any source language and
1521 : : TARGET_PROMOTE_PROTOTYPES-driven promotions. */
1522 : 2685916 : mode2 = TYPE_MODE (DECL_ARG_TYPE (arg));
1523 : 2685916 : uns3 = TYPE_UNSIGNED (DECL_ARG_TYPE (arg));
1524 : :
1525 : : /* The mode and signedness of the argument as it is actually passed,
1526 : : see assign_parm_setup_reg in function.cc. */
1527 : 2685916 : mode3 = promote_function_mode (TREE_TYPE (arg), mode1, &uns3,
1528 : 2685916 : TREE_TYPE (cfun->decl), 0);
1529 : :
1530 : : /* The mode of the register in which the argument is being passed. */
1531 : 2685916 : mode4 = GET_MODE (reg);
1532 : :
1533 : : /* Eliminate sign extensions in the callee when:
1534 : : (a) A mode promotion has occurred; */
1535 : 2685916 : if (mode1 == mode3)
1536 : 2685822 : continue;
1537 : : /* (b) The mode of the register is the same as the mode of
1538 : : the argument as it is passed; */
1539 : 94 : if (mode3 != mode4)
1540 : 0 : continue;
1541 : : /* (c) There's no language level extension; */
1542 : 94 : if (mode1 == mode2)
1543 : : ;
1544 : : /* (c.1) All callers are from the current compilation unit. If that's
1545 : : the case we don't have to rely on an ABI, we only have to know
1546 : : what we're generating right now, and we know that we will do the
1547 : : mode1 to mode2 promotion with the given sign. */
1548 : 0 : else if (!strictly_local)
1549 : 0 : continue;
1550 : : /* (c.2) The combination of the two promotions is useful. This is
1551 : : true when the signs match, or if the first promotion is unsigned.
1552 : : In the later case, (sign_extend (zero_extend x)) is the same as
1553 : : (zero_extend (zero_extend x)), so make sure to force UNS3 true. */
1554 : 0 : else if (uns1)
1555 : 0 : uns3 = true;
1556 : 0 : else if (uns3)
1557 : 0 : continue;
1558 : :
1559 : : /* Record that the value was promoted from mode1 to mode3,
1560 : : so that any sign extension at the head of the current
1561 : : function may be eliminated. */
1562 : 94 : x = gen_rtx_CLOBBER (mode1, const0_rtx);
1563 : 94 : x = gen_rtx_fmt_e ((uns3 ? ZERO_EXTEND : SIGN_EXTEND), mode3, x);
1564 : 94 : record_value_for_reg (reg, first, x);
1565 : : }
1566 : 2001488 : }
1567 : :
1568 : : /* If MODE has a precision lower than PREC and SRC is a non-negative constant
1569 : : that would appear negative in MODE, sign-extend SRC for use in nonzero_bits
1570 : : because some machines (maybe most) will actually do the sign-extension and
1571 : : this is the conservative approach.
1572 : :
1573 : : ??? For 2.5, try to tighten up the MD files in this regard instead of this
1574 : : kludge. */
1575 : :
1576 : : static rtx
1577 : 0 : sign_extend_short_imm (rtx src, machine_mode mode, unsigned int prec)
1578 : : {
1579 : 0 : scalar_int_mode int_mode;
1580 : 0 : if (CONST_INT_P (src)
1581 : 0 : && is_a <scalar_int_mode> (mode, &int_mode)
1582 : 0 : && GET_MODE_PRECISION (int_mode) < prec
1583 : 0 : && INTVAL (src) > 0
1584 : 0 : && val_signbit_known_set_p (int_mode, INTVAL (src)))
1585 : 0 : src = GEN_INT (INTVAL (src) | ~GET_MODE_MASK (int_mode));
1586 : :
1587 : 0 : return src;
1588 : : }
1589 : :
1590 : : /* Update RSP for pseudo-register X from INSN's REG_EQUAL note (if one exists)
1591 : : and SET. */
1592 : :
1593 : : static void
1594 : 23346455 : update_rsp_from_reg_equal (reg_stat_type *rsp, rtx_insn *insn, const_rtx set,
1595 : : rtx x)
1596 : : {
1597 : 23346455 : rtx reg_equal_note = insn ? find_reg_equal_equiv_note (insn) : NULL_RTX;
1598 : 23346455 : unsigned HOST_WIDE_INT bits = 0;
1599 : 23346455 : rtx reg_equal = NULL, src = SET_SRC (set);
1600 : 23346455 : unsigned int num = 0;
1601 : :
1602 : 23346455 : if (reg_equal_note)
1603 : 997123 : reg_equal = XEXP (reg_equal_note, 0);
1604 : :
1605 : 23346455 : if (SHORT_IMMEDIATES_SIGN_EXTEND)
1606 : : {
1607 : : src = sign_extend_short_imm (src, GET_MODE (x), BITS_PER_WORD);
1608 : : if (reg_equal)
1609 : : reg_equal = sign_extend_short_imm (reg_equal, GET_MODE (x), BITS_PER_WORD);
1610 : : }
1611 : :
1612 : : /* Don't call nonzero_bits if it cannot change anything. */
1613 : 23346455 : if (rsp->nonzero_bits != HOST_WIDE_INT_M1U)
1614 : : {
1615 : 20150645 : machine_mode mode = GET_MODE (x);
1616 : 20150645 : if (GET_MODE_CLASS (mode) == MODE_INT
1617 : 20150645 : && HWI_COMPUTABLE_MODE_P (mode))
1618 : 20150513 : mode = nonzero_bits_mode;
1619 : 20150645 : bits = nonzero_bits (src, mode);
1620 : 20150645 : if (reg_equal && bits)
1621 : 942913 : bits &= nonzero_bits (reg_equal, mode);
1622 : 20150645 : rsp->nonzero_bits |= bits;
1623 : : }
1624 : :
1625 : : /* Don't call num_sign_bit_copies if it cannot change anything. */
1626 : 23346455 : if (rsp->sign_bit_copies != 1)
1627 : : {
1628 : 20009380 : num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
1629 : 20009380 : if (reg_equal && maybe_ne (num, GET_MODE_PRECISION (GET_MODE (x))))
1630 : : {
1631 : 939436 : unsigned int numeq = num_sign_bit_copies (reg_equal, GET_MODE (x));
1632 : 939436 : if (num == 0 || numeq > num)
1633 : 20009380 : num = numeq;
1634 : : }
1635 : 20009380 : if (rsp->sign_bit_copies == 0 || num < rsp->sign_bit_copies)
1636 : 19312332 : rsp->sign_bit_copies = num;
1637 : : }
1638 : 23346455 : }
1639 : :
1640 : : /* Called via note_stores. If X is a pseudo that is narrower than
1641 : : HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
1642 : :
1643 : : If we are setting only a portion of X and we can't figure out what
1644 : : portion, assume all bits will be used since we don't know what will
1645 : : be happening.
1646 : :
1647 : : Similarly, set how many bits of X are known to be copies of the sign bit
1648 : : at all locations in the function. This is the smallest number implied
1649 : : by any set of X. */
1650 : :
1651 : : static void
1652 : 72338080 : set_nonzero_bits_and_sign_copies (rtx x, const_rtx set, void *data)
1653 : : {
1654 : 72338080 : rtx_insn *insn = (rtx_insn *) data;
1655 : 72338080 : scalar_int_mode mode;
1656 : :
1657 : 72338080 : if (REG_P (x)
1658 : 58267327 : && REGNO (x) >= FIRST_PSEUDO_REGISTER
1659 : : /* If this register is undefined at the start of the file, we can't
1660 : : say what its contents were. */
1661 : 57960650 : && ! REGNO_REG_SET_P
1662 : : (DF_LR_IN (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb), REGNO (x))
1663 : 28880905 : && is_a <scalar_int_mode> (GET_MODE (x), &mode)
1664 : 96592265 : && HWI_COMPUTABLE_MODE_P (mode))
1665 : : {
1666 : 23544499 : reg_stat_type *rsp = ®_stat[REGNO (x)];
1667 : :
1668 : 23544499 : if (set == 0 || GET_CODE (set) == CLOBBER)
1669 : : {
1670 : 21717 : rsp->nonzero_bits = GET_MODE_MASK (mode);
1671 : 21717 : rsp->sign_bit_copies = 1;
1672 : 21717 : return;
1673 : : }
1674 : :
1675 : : /* If this register is being initialized using itself, and the
1676 : : register is uninitialized in this basic block, and there are
1677 : : no LOG_LINKS which set the register, then part of the
1678 : : register is uninitialized. In that case we can't assume
1679 : : anything about the number of nonzero bits.
1680 : :
1681 : : ??? We could do better if we checked this in
1682 : : reg_{nonzero_bits,num_sign_bit_copies}_for_combine. Then we
1683 : : could avoid making assumptions about the insn which initially
1684 : : sets the register, while still using the information in other
1685 : : insns. We would have to be careful to check every insn
1686 : : involved in the combination. */
1687 : :
1688 : 23522782 : if (insn
1689 : 22113977 : && reg_referenced_p (x, PATTERN (insn))
1690 : 26022902 : && !REGNO_REG_SET_P (DF_LR_IN (BLOCK_FOR_INSN (insn)),
1691 : : REGNO (x)))
1692 : : {
1693 : 243102 : struct insn_link *link;
1694 : :
1695 : 361579 : FOR_EACH_LOG_LINK (link, insn)
1696 : 277529 : if (dead_or_set_p (link->insn, x))
1697 : : break;
1698 : 243102 : if (!link)
1699 : : {
1700 : 84050 : rsp->nonzero_bits = GET_MODE_MASK (mode);
1701 : 84050 : rsp->sign_bit_copies = 1;
1702 : 84050 : return;
1703 : : }
1704 : : }
1705 : :
1706 : : /* If this is a complex assignment, see if we can convert it into a
1707 : : simple assignment. */
1708 : 23438732 : set = expand_field_assignment (set);
1709 : :
1710 : : /* If this is a simple assignment, or we have a paradoxical SUBREG,
1711 : : set what we know about X. */
1712 : :
1713 : 23438732 : if (SET_DEST (set) == x
1714 : 23438732 : || (paradoxical_subreg_p (SET_DEST (set))
1715 : 4033 : && SUBREG_REG (SET_DEST (set)) == x))
1716 : 23346455 : update_rsp_from_reg_equal (rsp, insn, set, x);
1717 : : else
1718 : : {
1719 : 92277 : rsp->nonzero_bits = GET_MODE_MASK (mode);
1720 : 92277 : rsp->sign_bit_copies = 1;
1721 : : }
1722 : : }
1723 : : }
1724 : :
1725 : : /* See if INSN can be combined into I3. PRED, PRED2, SUCC and SUCC2 are
1726 : : optionally insns that were previously combined into I3 or that will be
1727 : : combined into the merger of INSN and I3. The order is PRED, PRED2,
1728 : : INSN, SUCC, SUCC2, I3.
1729 : :
1730 : : Return false if the combination is not allowed for any reason.
1731 : :
1732 : : If the combination is allowed, *PDEST will be set to the single
1733 : : destination of INSN and *PSRC to the single source, and this function
1734 : : will return true. */
1735 : :
1736 : : static bool
1737 : 60637980 : can_combine_p (rtx_insn *insn, rtx_insn *i3, rtx_insn *pred ATTRIBUTE_UNUSED,
1738 : : rtx_insn *pred2 ATTRIBUTE_UNUSED, rtx_insn *succ, rtx_insn *succ2,
1739 : : rtx *pdest, rtx *psrc)
1740 : : {
1741 : 60637980 : int i;
1742 : 60637980 : const_rtx set = 0;
1743 : 60637980 : rtx src, dest;
1744 : 60637980 : rtx_insn *p;
1745 : 60637980 : rtx link;
1746 : 60637980 : bool all_adjacent = true;
1747 : 60637980 : bool (*is_volatile_p) (const_rtx);
1748 : :
1749 : 60637980 : if (succ)
1750 : : {
1751 : 14203354 : if (succ2)
1752 : : {
1753 : 2080186 : if (next_active_insn (succ2) != i3)
1754 : 193573 : all_adjacent = false;
1755 : 2080186 : if (next_active_insn (succ) != succ2)
1756 : 2004905 : all_adjacent = false;
1757 : : }
1758 : 12123168 : else if (next_active_insn (succ) != i3)
1759 : 2004905 : all_adjacent = false;
1760 : 14203354 : if (next_active_insn (insn) != succ)
1761 : 16850802 : all_adjacent = false;
1762 : : }
1763 : 46434626 : else if (next_active_insn (insn) != i3)
1764 : 16850802 : all_adjacent = false;
1765 : :
1766 : : /* Can combine only if previous insn is a SET of a REG or a SUBREG,
1767 : : or a PARALLEL consisting of such a SET and CLOBBERs.
1768 : :
1769 : : If INSN has CLOBBER parallel parts, ignore them for our processing.
1770 : : By definition, these happen during the execution of the insn. When it
1771 : : is merged with another insn, all bets are off. If they are, in fact,
1772 : : needed and aren't also supplied in I3, they may be added by
1773 : : recog_for_combine. Otherwise, it won't match.
1774 : :
1775 : : We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
1776 : : note.
1777 : :
1778 : : Get the source and destination of INSN. If more than one, can't
1779 : : combine. */
1780 : :
1781 : 60637980 : if (GET_CODE (PATTERN (insn)) == SET)
1782 : : set = PATTERN (insn);
1783 : 15916543 : else if (GET_CODE (PATTERN (insn)) == PARALLEL
1784 : 15916543 : && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1785 : : {
1786 : 47659231 : for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1787 : : {
1788 : 32272743 : rtx elt = XVECEXP (PATTERN (insn), 0, i);
1789 : :
1790 : 32272743 : switch (GET_CODE (elt))
1791 : : {
1792 : : /* This is important to combine floating point insns
1793 : : for the SH4 port. */
1794 : 126350 : case USE:
1795 : : /* Combining an isolated USE doesn't make sense.
1796 : : We depend here on combinable_i3pat to reject them. */
1797 : : /* The code below this loop only verifies that the inputs of
1798 : : the SET in INSN do not change. We call reg_set_between_p
1799 : : to verify that the REG in the USE does not change between
1800 : : I3 and INSN.
1801 : : If the USE in INSN was for a pseudo register, the matching
1802 : : insn pattern will likely match any register; combining this
1803 : : with any other USE would only be safe if we knew that the
1804 : : used registers have identical values, or if there was
1805 : : something to tell them apart, e.g. different modes. For
1806 : : now, we forgo such complicated tests and simply disallow
1807 : : combining of USES of pseudo registers with any other USE. */
1808 : 126350 : if (REG_P (XEXP (elt, 0))
1809 : 126350 : && GET_CODE (PATTERN (i3)) == PARALLEL)
1810 : : {
1811 : 227 : rtx i3pat = PATTERN (i3);
1812 : 227 : int i = XVECLEN (i3pat, 0) - 1;
1813 : 227 : unsigned int regno = REGNO (XEXP (elt, 0));
1814 : :
1815 : 465 : do
1816 : : {
1817 : 465 : rtx i3elt = XVECEXP (i3pat, 0, i);
1818 : :
1819 : 465 : if (GET_CODE (i3elt) == USE
1820 : 209 : && REG_P (XEXP (i3elt, 0))
1821 : 701 : && (REGNO (XEXP (i3elt, 0)) == regno
1822 : 182 : ? reg_set_between_p (XEXP (elt, 0),
1823 : 27 : PREV_INSN (insn), i3)
1824 : : : regno >= FIRST_PSEUDO_REGISTER))
1825 : 182 : return false;
1826 : : }
1827 : 283 : while (--i >= 0);
1828 : : }
1829 : : break;
1830 : :
1831 : : /* We can ignore CLOBBERs. */
1832 : : case CLOBBER:
1833 : : break;
1834 : :
1835 : 16513895 : case SET:
1836 : : /* Ignore SETs whose result isn't used but not those that
1837 : : have side-effects. */
1838 : 16513895 : if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
1839 : 183322 : && insn_nothrow_p (insn)
1840 : 16684056 : && !side_effects_p (elt))
1841 : : break;
1842 : :
1843 : : /* If we have already found a SET, this is a second one and
1844 : : so we cannot combine with this insn. */
1845 : 16424380 : if (set)
1846 : : return false;
1847 : :
1848 : : set = elt;
1849 : : break;
1850 : :
1851 : : default:
1852 : : /* Anything else means we can't combine. */
1853 : : return false;
1854 : : }
1855 : : }
1856 : :
1857 : 15386488 : if (set == 0
1858 : : /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
1859 : : so don't do anything with it. */
1860 : 15386488 : || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
1861 : : return false;
1862 : : }
1863 : : else
1864 : : return false;
1865 : :
1866 : : if (set == 0)
1867 : : return false;
1868 : :
1869 : : /* The simplification in expand_field_assignment may call back to
1870 : : get_last_value, so set safe guard here. */
1871 : 60089007 : subst_low_luid = DF_INSN_LUID (insn);
1872 : :
1873 : 60089007 : set = expand_field_assignment (set);
1874 : 60089007 : src = SET_SRC (set), dest = SET_DEST (set);
1875 : :
1876 : : /* Do not eliminate user-specified register if it is in an
1877 : : asm input because we may break the register asm usage defined
1878 : : in GCC manual if allow to do so.
1879 : : Be aware that this may cover more cases than we expect but this
1880 : : should be harmless. */
1881 : 59582465 : if (REG_P (dest) && REG_USERVAR_P (dest) && HARD_REGISTER_P (dest)
1882 : 60089010 : && extract_asm_operands (PATTERN (i3)))
1883 : : return false;
1884 : :
1885 : : /* Don't eliminate a store in the stack pointer. */
1886 : 60089007 : if (dest == stack_pointer_rtx
1887 : : /* Don't combine with an insn that sets a register to itself if it has
1888 : : a REG_EQUAL note. This may be part of a LIBCALL sequence. */
1889 : 58193416 : || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
1890 : : /* Can't merge an ASM_OPERANDS. */
1891 : 58193416 : || GET_CODE (src) == ASM_OPERANDS
1892 : : /* Can't merge a function call. */
1893 : 58189802 : || GET_CODE (src) == CALL
1894 : : /* Don't eliminate a function call argument. */
1895 : 58189802 : || (CALL_P (i3)
1896 : 8696418 : && (find_reg_fusage (i3, USE, dest)
1897 : 172428 : || (REG_P (dest)
1898 : 172428 : && REGNO (dest) < FIRST_PSEUDO_REGISTER
1899 : 283 : && global_regs[REGNO (dest)])))
1900 : : /* Don't substitute into an incremented register. */
1901 : : || FIND_REG_INC_NOTE (i3, dest)
1902 : : || (succ && FIND_REG_INC_NOTE (succ, dest))
1903 : 58189802 : || (succ2 && FIND_REG_INC_NOTE (succ2, dest))
1904 : : /* Don't substitute into a non-local goto, this confuses CFG. */
1905 : 49665809 : || (JUMP_P (i3) && find_reg_note (i3, REG_NON_LOCAL_GOTO, NULL_RTX))
1906 : : /* Make sure that DEST is not used after INSN but before SUCC, or
1907 : : after SUCC and before SUCC2, or after SUCC2 but before I3. */
1908 : 49665088 : || (!all_adjacent
1909 : 12156940 : && ((succ2
1910 : 924763 : && (reg_used_between_p (dest, succ2, i3)
1911 : 904630 : || reg_used_between_p (dest, succ, succ2)))
1912 : 12104892 : || (!succ2 && succ && reg_used_between_p (dest, succ, i3))
1913 : 11838759 : || (!succ2 && !succ && reg_used_between_p (dest, insn, i3))
1914 : 11838759 : || (succ
1915 : : /* SUCC and SUCC2 can be split halves from a PARALLEL; in
1916 : : that case SUCC is not in the insn stream, so use SUCC2
1917 : : instead for this test. */
1918 : 9797004 : && reg_used_between_p (dest, insn,
1919 : : succ2
1920 : 872715 : && INSN_UID (succ) == INSN_UID (succ2)
1921 : : ? succ2 : succ))))
1922 : : /* Make sure that the value that is to be substituted for the register
1923 : : does not use any registers whose values alter in between. However,
1924 : : If the insns are adjacent, a use can't cross a set even though we
1925 : : think it might (this can happen for a sequence of insns each setting
1926 : : the same destination; last_set of that register might point to
1927 : : a NOTE). If INSN has a REG_EQUIV note, the register is always
1928 : : equivalent to the memory so the substitution is valid even if there
1929 : : are intervening stores. Also, don't move a volatile asm or
1930 : : UNSPEC_VOLATILE across any other insns. */
1931 : : || (! all_adjacent
1932 : 11838759 : && (((!MEM_P (src)
1933 : 3304545 : || ! find_reg_note (insn, REG_EQUIV, src))
1934 : 11724937 : && modified_between_p (src, insn, i3))
1935 : 10779214 : || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
1936 : 10779214 : || GET_CODE (src) == UNSPEC_VOLATILE))
1937 : : /* Don't combine across a CALL_INSN, because that would possibly
1938 : : change whether the life span of some REGs crosses calls or not,
1939 : : and it is a pain to update that information.
1940 : : Exception: if source is a constant, moving it later can't hurt.
1941 : : Accept that as a special case. */
1942 : 108365763 : || (DF_INSN_LUID (insn) < last_call_luid && ! CONSTANT_P (src)))
1943 : 12138220 : return false;
1944 : :
1945 : : /* DEST must be a REG. */
1946 : 47950787 : if (REG_P (dest))
1947 : : {
1948 : : /* If register alignment is being enforced for multi-word items in all
1949 : : cases except for parameters, it is possible to have a register copy
1950 : : insn referencing a hard register that is not allowed to contain the
1951 : : mode being copied and which would not be valid as an operand of most
1952 : : insns. Eliminate this problem by not combining with such an insn.
1953 : :
1954 : : Also, on some machines we don't want to extend the life of a hard
1955 : : register. */
1956 : :
1957 : 47449081 : if (REG_P (src)
1958 : 47449081 : && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1959 : 29290 : && !targetm.hard_regno_mode_ok (REGNO (dest), GET_MODE (dest)))
1960 : : /* Don't extend the life of a hard register unless it is
1961 : : user variable (if we have few registers) or it can't
1962 : : fit into the desired register (meaning something special
1963 : : is going on).
1964 : : Also avoid substituting a return register into I3, because
1965 : : reload can't handle a conflict with constraints of other
1966 : : inputs. */
1967 : 2545200 : || (REGNO (src) < FIRST_PSEUDO_REGISTER
1968 : 37121 : && !targetm.hard_regno_mode_ok (REGNO (src),
1969 : 37121 : GET_MODE (src)))))
1970 : 0 : return false;
1971 : : }
1972 : : else
1973 : : return false;
1974 : :
1975 : :
1976 : 47449081 : if (GET_CODE (PATTERN (i3)) == PARALLEL)
1977 : 34803900 : for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1978 : 23442032 : if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER)
1979 : : {
1980 : 11025084 : rtx reg = XEXP (XVECEXP (PATTERN (i3), 0, i), 0);
1981 : :
1982 : : /* If the clobber represents an earlyclobber operand, we must not
1983 : : substitute an expression containing the clobbered register.
1984 : : As we do not analyze the constraint strings here, we have to
1985 : : make the conservative assumption. However, if the register is
1986 : : a fixed hard reg, the clobber cannot represent any operand;
1987 : : we leave it up to the machine description to either accept or
1988 : : reject use-and-clobber patterns. */
1989 : 11025084 : if (!REG_P (reg)
1990 : 10678447 : || REGNO (reg) >= FIRST_PSEUDO_REGISTER
1991 : 21656247 : || !fixed_regs[REGNO (reg)])
1992 : 432221 : if (reg_overlap_mentioned_p (reg, src))
1993 : : return false;
1994 : : }
1995 : :
1996 : : /* If INSN contains anything volatile, or is an `asm' (whether volatile
1997 : : or not), reject, unless nothing volatile comes between it and I3 */
1998 : :
1999 : 47448417 : if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
2000 : : {
2001 : : /* Make sure neither succ nor succ2 contains a volatile reference. */
2002 : 693952 : if (succ2 != 0 && volatile_refs_p (PATTERN (succ2)))
2003 : : return false;
2004 : 693859 : if (succ != 0 && volatile_refs_p (PATTERN (succ)))
2005 : : return false;
2006 : : /* We'll check insns between INSN and I3 below. */
2007 : : }
2008 : :
2009 : : /* If INSN is an asm, and DEST is a hard register, reject, since it has
2010 : : to be an explicit register variable, and was chosen for a reason. */
2011 : :
2012 : 47412982 : if (GET_CODE (src) == ASM_OPERANDS
2013 : 47412982 : && REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER)
2014 : : return false;
2015 : :
2016 : : /* If INSN contains volatile references (specifically volatile MEMs),
2017 : : we cannot combine across any other volatile references.
2018 : : Even if INSN doesn't contain volatile references, any intervening
2019 : : volatile insn might affect machine state. */
2020 : :
2021 : 94166579 : is_volatile_p = volatile_refs_p (PATTERN (insn))
2022 : 47412982 : ? volatile_refs_p
2023 : : : volatile_insn_p;
2024 : :
2025 : 211031325 : for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
2026 : 116409918 : if (INSN_P (p) && p != succ && p != succ2 && is_volatile_p (PATTERN (p)))
2027 : : return false;
2028 : :
2029 : : /* If INSN contains an autoincrement or autodecrement, make sure that
2030 : : register is not used between there and I3, and not already used in
2031 : : I3 either. Neither must it be used in PRED or SUCC, if they exist.
2032 : : Also insist that I3 not be a jump if using LRA; if it were one
2033 : : and the incremented register were spilled, we would lose.
2034 : : Reload handles this correctly. */
2035 : :
2036 : 47208425 : if (AUTO_INC_DEC)
2037 : : for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2038 : : if (REG_NOTE_KIND (link) == REG_INC
2039 : : && ((JUMP_P (i3) && targetm.lra_p ())
2040 : : || reg_used_between_p (XEXP (link, 0), insn, i3)
2041 : : || (pred != NULL_RTX
2042 : : && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (pred)))
2043 : : || (pred2 != NULL_RTX
2044 : : && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (pred2)))
2045 : : || (succ != NULL_RTX
2046 : : && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (succ)))
2047 : : || (succ2 != NULL_RTX
2048 : : && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (succ2)))
2049 : : || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
2050 : : return false;
2051 : :
2052 : : /* If we get here, we have passed all the tests and the combination is
2053 : : to be allowed. */
2054 : :
2055 : 47208425 : *pdest = dest;
2056 : 47208425 : *psrc = src;
2057 : :
2058 : 47208425 : return true;
2059 : : }
2060 : :
2061 : : /* LOC is the location within I3 that contains its pattern or the component
2062 : : of a PARALLEL of the pattern. We validate that it is valid for combining.
2063 : :
2064 : : One problem is if I3 modifies its output, as opposed to replacing it
2065 : : entirely, we can't allow the output to contain I2DEST, I1DEST or I0DEST as
2066 : : doing so would produce an insn that is not equivalent to the original insns.
2067 : :
2068 : : Consider:
2069 : :
2070 : : (set (reg:DI 101) (reg:DI 100))
2071 : : (set (subreg:SI (reg:DI 101) 0) <foo>)
2072 : :
2073 : : This is NOT equivalent to:
2074 : :
2075 : : (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
2076 : : (set (reg:DI 101) (reg:DI 100))])
2077 : :
2078 : : Not only does this modify 100 (in which case it might still be valid
2079 : : if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
2080 : :
2081 : : We can also run into a problem if I2 sets a register that I1
2082 : : uses and I1 gets directly substituted into I3 (not via I2). In that
2083 : : case, we would be getting the wrong value of I2DEST into I3, so we
2084 : : must reject the combination. This case occurs when I2 and I1 both
2085 : : feed into I3, rather than when I1 feeds into I2, which feeds into I3.
2086 : : If I1_NOT_IN_SRC is nonzero, it means that finding I1 in the source
2087 : : of a SET must prevent combination from occurring. The same situation
2088 : : can occur for I0, in which case I0_NOT_IN_SRC is set.
2089 : :
2090 : : Before doing the above check, we first try to expand a field assignment
2091 : : into a set of logical operations.
2092 : :
2093 : : If PI3_DEST_KILLED is nonzero, it is a pointer to a location in which
2094 : : we place a register that is both set and used within I3. If more than one
2095 : : such register is detected, we fail.
2096 : :
2097 : : Return true if the combination is valid, false otherwise. */
2098 : :
2099 : : static bool
2100 : 67254116 : combinable_i3pat (rtx_insn *i3, rtx *loc, rtx i2dest, rtx i1dest, rtx i0dest,
2101 : : bool i1_not_in_src, bool i0_not_in_src, rtx *pi3dest_killed)
2102 : : {
2103 : 67254116 : rtx x = *loc;
2104 : :
2105 : 67254116 : if (GET_CODE (x) == SET)
2106 : : {
2107 : 45722785 : rtx set = x ;
2108 : 45722785 : rtx dest = SET_DEST (set);
2109 : 45722785 : rtx src = SET_SRC (set);
2110 : 45722785 : rtx inner_dest = dest;
2111 : 45722785 : rtx subdest;
2112 : :
2113 : 45722785 : while (GET_CODE (inner_dest) == STRICT_LOW_PART
2114 : 46213044 : || GET_CODE (inner_dest) == SUBREG
2115 : 46213044 : || GET_CODE (inner_dest) == ZERO_EXTRACT)
2116 : 490259 : inner_dest = XEXP (inner_dest, 0);
2117 : :
2118 : : /* Check for the case where I3 modifies its output, as discussed
2119 : : above. We don't want to prevent pseudos from being combined
2120 : : into the address of a MEM, so only prevent the combination if
2121 : : i1 or i2 set the same MEM. */
2122 : 471238 : if ((inner_dest != dest &&
2123 : : (!MEM_P (inner_dest)
2124 : 793 : || rtx_equal_p (i2dest, inner_dest)
2125 : 793 : || (i1dest && rtx_equal_p (i1dest, inner_dest))
2126 : 793 : || (i0dest && rtx_equal_p (i0dest, inner_dest)))
2127 : 470445 : && (reg_overlap_mentioned_p (i2dest, inner_dest)
2128 : 350467 : || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))
2129 : 349210 : || (i0dest && reg_overlap_mentioned_p (i0dest, inner_dest))))
2130 : :
2131 : : /* This is the same test done in can_combine_p except we can't test
2132 : : all_adjacent; we don't have to, since this instruction will stay
2133 : : in place, thus we are not considering increasing the lifetime of
2134 : : INNER_DEST.
2135 : :
2136 : : Also, if this insn sets a function argument, combining it with
2137 : : something that might need a spill could clobber a previous
2138 : : function argument; the all_adjacent test in can_combine_p also
2139 : : checks this; here, we do a more specific test for this case. */
2140 : :
2141 : 45601441 : || (REG_P (inner_dest)
2142 : 28969447 : && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
2143 : 7345580 : && !targetm.hard_regno_mode_ok (REGNO (inner_dest),
2144 : 7345580 : GET_MODE (inner_dest)))
2145 : 45601441 : || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src))
2146 : 91317588 : || (i0_not_in_src && reg_overlap_mentioned_p (i0dest, src)))
2147 : 157098 : return false;
2148 : :
2149 : : /* If DEST is used in I3, it is being killed in this insn, so
2150 : : record that for later. We have to consider paradoxical
2151 : : subregs here, since they kill the whole register, but we
2152 : : ignore partial subregs, STRICT_LOW_PART, etc.
2153 : : Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
2154 : : STACK_POINTER_REGNUM, since these are always considered to be
2155 : : live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
2156 : 45565687 : subdest = dest;
2157 : 45565687 : if (GET_CODE (subdest) == SUBREG && !partial_subreg_p (subdest))
2158 : 250610 : subdest = SUBREG_REG (subdest);
2159 : 45565687 : if (pi3dest_killed
2160 : 33181593 : && REG_P (subdest)
2161 : 20860597 : && reg_referenced_p (subdest, PATTERN (i3))
2162 : 1146402 : && REGNO (subdest) != FRAME_POINTER_REGNUM
2163 : 1146402 : && (HARD_FRAME_POINTER_IS_FRAME_POINTER
2164 : 1146402 : || REGNO (subdest) != HARD_FRAME_POINTER_REGNUM)
2165 : 1146402 : && (FRAME_POINTER_REGNUM == ARG_POINTER_REGNUM
2166 : 1146402 : || (REGNO (subdest) != ARG_POINTER_REGNUM
2167 : 0 : || ! fixed_regs [REGNO (subdest)]))
2168 : 46712089 : && REGNO (subdest) != STACK_POINTER_REGNUM)
2169 : : {
2170 : 1107822 : if (*pi3dest_killed)
2171 : : return false;
2172 : :
2173 : 1089983 : *pi3dest_killed = subdest;
2174 : : }
2175 : : }
2176 : :
2177 : 21531331 : else if (GET_CODE (x) == PARALLEL)
2178 : : {
2179 : : int i;
2180 : :
2181 : 32758723 : for (i = 0; i < XVECLEN (x, 0); i++)
2182 : 22064450 : if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest, i0dest,
2183 : : i1_not_in_src, i0_not_in_src, pi3dest_killed))
2184 : : return false;
2185 : : }
2186 : :
2187 : : return true;
2188 : : }
2189 : :
2190 : : /* Return true if X is an arithmetic expression that contains a multiplication
2191 : : and division. We don't count multiplications by powers of two here. */
2192 : :
2193 : : static bool
2194 : 17049915 : contains_muldiv (rtx x)
2195 : : {
2196 : 17718627 : switch (GET_CODE (x))
2197 : : {
2198 : : case MOD: case DIV: case UMOD: case UDIV:
2199 : : return true;
2200 : :
2201 : 472632 : case MULT:
2202 : 472632 : return ! (CONST_INT_P (XEXP (x, 1))
2203 : 125352 : && pow2p_hwi (UINTVAL (XEXP (x, 1))));
2204 : 17089152 : default:
2205 : 17089152 : if (BINARY_P (x))
2206 : 5853276 : return contains_muldiv (XEXP (x, 0))
2207 : 5853276 : || contains_muldiv (XEXP (x, 1));
2208 : :
2209 : 11235876 : if (UNARY_P (x))
2210 : 668712 : return contains_muldiv (XEXP (x, 0));
2211 : :
2212 : : return false;
2213 : : }
2214 : : }
2215 : :
2216 : : /* Determine whether INSN can be used in a combination. Return true if
2217 : : not. This is used in try_combine to detect early some cases where we
2218 : : can't perform combinations. */
2219 : :
2220 : : static bool
2221 : 163596407 : cant_combine_insn_p (rtx_insn *insn)
2222 : : {
2223 : 163596407 : rtx set;
2224 : 163596407 : rtx src, dest;
2225 : :
2226 : : /* If this isn't really an insn, we can't do anything.
2227 : : This can occur when flow deletes an insn that it has merged into an
2228 : : auto-increment address. */
2229 : 163596407 : if (!NONDEBUG_INSN_P (insn))
2230 : : return true;
2231 : :
2232 : : /* Never combine loads and stores involving hard regs that are likely
2233 : : to be spilled. The register allocator can usually handle such
2234 : : reg-reg moves by tying. If we allow the combiner to make
2235 : : substitutions of likely-spilled regs, reload might die.
2236 : : As an exception, we allow combinations involving fixed regs; these are
2237 : : not available to the register allocator so there's no risk involved. */
2238 : :
2239 : 163595993 : set = single_set (insn);
2240 : 163595993 : if (! set)
2241 : : return false;
2242 : 150684927 : src = SET_SRC (set);
2243 : 150684927 : dest = SET_DEST (set);
2244 : 150684927 : if (GET_CODE (src) == SUBREG)
2245 : 983970 : src = SUBREG_REG (src);
2246 : 150684927 : if (GET_CODE (dest) == SUBREG)
2247 : 1566097 : dest = SUBREG_REG (dest);
2248 : 40594221 : if (REG_P (src) && REG_P (dest)
2249 : 184531705 : && ((HARD_REGISTER_P (src)
2250 : 6670875 : && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (src))
2251 : : #ifdef LEAF_REGISTERS
2252 : : && ! LEAF_REGISTERS [REGNO (src)])
2253 : : #else
2254 : : )
2255 : : #endif
2256 : 27491623 : || (HARD_REGISTER_P (dest)
2257 : 19427661 : && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (dest))
2258 : 19148380 : && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (dest))))))
2259 : 23939768 : return true;
2260 : :
2261 : : return false;
2262 : : }
2263 : :
2264 : : struct likely_spilled_retval_info
2265 : : {
2266 : : unsigned regno, nregs;
2267 : : unsigned mask;
2268 : : };
2269 : :
2270 : : /* Called via note_stores by likely_spilled_retval_p. Remove from info->mask
2271 : : hard registers that are known to be written to / clobbered in full. */
2272 : : static void
2273 : 161283 : likely_spilled_retval_1 (rtx x, const_rtx set, void *data)
2274 : : {
2275 : 161283 : struct likely_spilled_retval_info *const info =
2276 : : (struct likely_spilled_retval_info *) data;
2277 : 161283 : unsigned regno, nregs;
2278 : 161283 : unsigned new_mask;
2279 : :
2280 : 161283 : if (!REG_P (XEXP (set, 0)))
2281 : : return;
2282 : 161283 : regno = REGNO (x);
2283 : 161283 : if (regno >= info->regno + info->nregs)
2284 : : return;
2285 : 161283 : nregs = REG_NREGS (x);
2286 : 161283 : if (regno + nregs <= info->regno)
2287 : : return;
2288 : 161283 : new_mask = (2U << (nregs - 1)) - 1;
2289 : 161283 : if (regno < info->regno)
2290 : 0 : new_mask >>= info->regno - regno;
2291 : : else
2292 : 161283 : new_mask <<= regno - info->regno;
2293 : 161283 : info->mask &= ~new_mask;
2294 : : }
2295 : :
2296 : : /* Return true iff part of the return value is live during INSN, and
2297 : : it is likely spilled. This can happen when more than one insn is needed
2298 : : to copy the return value, e.g. when we consider to combine into the
2299 : : second copy insn for a complex value. */
2300 : :
2301 : : static bool
2302 : 46773589 : likely_spilled_retval_p (rtx_insn *insn)
2303 : : {
2304 : 46773589 : rtx_insn *use = BB_END (this_basic_block);
2305 : 46773589 : rtx reg;
2306 : 46773589 : rtx_insn *p;
2307 : 46773589 : unsigned regno, nregs;
2308 : : /* We assume here that no machine mode needs more than
2309 : : 32 hard registers when the value overlaps with a register
2310 : : for which TARGET_FUNCTION_VALUE_REGNO_P is true. */
2311 : 46773589 : unsigned mask;
2312 : 46773589 : struct likely_spilled_retval_info info;
2313 : :
2314 : 46773589 : if (!NONJUMP_INSN_P (use) || GET_CODE (PATTERN (use)) != USE || insn == use)
2315 : : return false;
2316 : 3077771 : reg = XEXP (PATTERN (use), 0);
2317 : 3077771 : if (!REG_P (reg) || !targetm.calls.function_value_regno_p (REGNO (reg)))
2318 : 0 : return false;
2319 : 3077771 : regno = REGNO (reg);
2320 : 3077771 : nregs = REG_NREGS (reg);
2321 : 3077771 : if (nregs == 1)
2322 : : return false;
2323 : 158573 : mask = (2U << (nregs - 1)) - 1;
2324 : :
2325 : : /* Disregard parts of the return value that are set later. */
2326 : 158573 : info.regno = regno;
2327 : 158573 : info.nregs = nregs;
2328 : 158573 : info.mask = mask;
2329 : 541866 : for (p = PREV_INSN (use); info.mask && p != insn; p = PREV_INSN (p))
2330 : 224720 : if (INSN_P (p))
2331 : 224720 : note_stores (p, likely_spilled_retval_1, &info);
2332 : 317146 : mask = info.mask;
2333 : :
2334 : : /* Check if any of the (probably) live return value registers is
2335 : : likely spilled. */
2336 : : nregs --;
2337 : 317146 : do
2338 : : {
2339 : 317146 : if ((mask & 1 << nregs)
2340 : 317146 : && targetm.class_likely_spilled_p (REGNO_REG_CLASS (regno + nregs)))
2341 : : return true;
2342 : 317136 : } while (nregs--);
2343 : : return false;
2344 : : }
2345 : :
2346 : : /* Adjust INSN after we made a change to its destination.
2347 : :
2348 : : Changing the destination can invalidate notes that say something about
2349 : : the results of the insn and a LOG_LINK pointing to the insn. */
2350 : :
2351 : : static void
2352 : 16520 : adjust_for_new_dest (rtx_insn *insn)
2353 : : {
2354 : : /* For notes, be conservative and simply remove them. */
2355 : 16520 : remove_reg_equal_equiv_notes (insn, true);
2356 : :
2357 : : /* The new insn will have a destination that was previously the destination
2358 : : of an insn just above it. Call distribute_links to make a LOG_LINK from
2359 : : the next use of that destination. */
2360 : :
2361 : 16520 : rtx set = single_set (insn);
2362 : 16520 : gcc_assert (set);
2363 : :
2364 : 16520 : rtx reg = SET_DEST (set);
2365 : :
2366 : 16520 : while (GET_CODE (reg) == ZERO_EXTRACT
2367 : 16520 : || GET_CODE (reg) == STRICT_LOW_PART
2368 : 33040 : || GET_CODE (reg) == SUBREG)
2369 : 0 : reg = XEXP (reg, 0);
2370 : 16520 : gcc_assert (REG_P (reg));
2371 : :
2372 : 16520 : distribute_links (alloc_insn_link (insn, REGNO (reg), NULL));
2373 : :
2374 : 16520 : df_insn_rescan (insn);
2375 : 16520 : }
2376 : :
2377 : : /* Return TRUE if combine can reuse reg X in mode MODE.
2378 : : ADDED_SETS is trueif the original set is still required. */
2379 : : static bool
2380 : 2667226 : can_change_dest_mode (rtx x, bool added_sets, machine_mode mode)
2381 : : {
2382 : 2667226 : unsigned int regno;
2383 : :
2384 : 2667226 : if (!REG_P (x))
2385 : : return false;
2386 : :
2387 : : /* Don't change between modes with different underlying register sizes,
2388 : : since this could lead to invalid subregs. */
2389 : 2667226 : if (maybe_ne (REGMODE_NATURAL_SIZE (mode),
2390 : 2667226 : REGMODE_NATURAL_SIZE (GET_MODE (x))))
2391 : : return false;
2392 : :
2393 : 2667226 : regno = REGNO (x);
2394 : : /* Allow hard registers if the new mode is legal, and occupies no more
2395 : : registers than the old mode. */
2396 : 2667226 : if (regno < FIRST_PSEUDO_REGISTER)
2397 : 1183732 : return (targetm.hard_regno_mode_ok (regno, mode)
2398 : 1183732 : && REG_NREGS (x) >= hard_regno_nregs (regno, mode));
2399 : :
2400 : : /* Or a pseudo that is only used once. */
2401 : 1483494 : return (regno < reg_n_sets_max
2402 : 1483492 : && REG_N_SETS (regno) == 1
2403 : 1434173 : && !added_sets
2404 : 2917667 : && !REG_USERVAR_P (x));
2405 : : }
2406 : :
2407 : :
2408 : : /* Check whether X, the destination of a set, refers to part of
2409 : : the register specified by REG. */
2410 : :
2411 : : static bool
2412 : 16757 : reg_subword_p (rtx x, rtx reg)
2413 : : {
2414 : : /* Check that reg is an integer mode register. */
2415 : 16757 : if (!REG_P (reg) || GET_MODE_CLASS (GET_MODE (reg)) != MODE_INT)
2416 : : return false;
2417 : :
2418 : 16287 : if (GET_CODE (x) == STRICT_LOW_PART
2419 : 15762 : || GET_CODE (x) == ZERO_EXTRACT)
2420 : 548 : x = XEXP (x, 0);
2421 : :
2422 : 16287 : return GET_CODE (x) == SUBREG
2423 : 16088 : && !paradoxical_subreg_p (x)
2424 : 16088 : && SUBREG_REG (x) == reg
2425 : 32375 : && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT;
2426 : : }
2427 : :
2428 : : /* Return whether PAT is a PARALLEL of exactly N register SETs followed
2429 : : by an arbitrary number of CLOBBERs. */
2430 : : static bool
2431 : 100211855 : is_parallel_of_n_reg_sets (rtx pat, int n)
2432 : : {
2433 : 100211855 : if (GET_CODE (pat) != PARALLEL)
2434 : : return false;
2435 : :
2436 : 26799854 : int len = XVECLEN (pat, 0);
2437 : 26799854 : if (len < n)
2438 : : return false;
2439 : :
2440 : : int i;
2441 : 53239361 : for (i = 0; i < n; i++)
2442 : 50483845 : if (GET_CODE (XVECEXP (pat, 0, i)) != SET
2443 : 29853453 : || !REG_P (SET_DEST (XVECEXP (pat, 0, i))))
2444 : : return false;
2445 : 3117980 : for ( ; i < len; i++)
2446 : 953410 : switch (GET_CODE (XVECEXP (pat, 0, i)))
2447 : : {
2448 : 362465 : case CLOBBER:
2449 : 362465 : if (XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
2450 : : return false;
2451 : 362464 : break;
2452 : : default:
2453 : : return false;
2454 : : }
2455 : : return true;
2456 : : }
2457 : :
2458 : : /* Return whether INSN, a PARALLEL of N register SETs (and maybe some
2459 : : CLOBBERs), can be split into individual SETs in that order, without
2460 : : changing semantics. */
2461 : : static bool
2462 : 374175 : can_split_parallel_of_n_reg_sets (rtx_insn *insn, int n)
2463 : : {
2464 : 374175 : if (!insn_nothrow_p (insn))
2465 : : return false;
2466 : :
2467 : 372666 : rtx pat = PATTERN (insn);
2468 : :
2469 : 372666 : int i, j;
2470 : 1004298 : for (i = 0; i < n; i++)
2471 : : {
2472 : 688482 : if (side_effects_p (SET_SRC (XVECEXP (pat, 0, i))))
2473 : : return false;
2474 : :
2475 : 685374 : rtx reg = SET_DEST (XVECEXP (pat, 0, i));
2476 : :
2477 : 1001190 : for (j = i + 1; j < n; j++)
2478 : 369558 : if (reg_referenced_p (reg, XVECEXP (pat, 0, j)))
2479 : : return false;
2480 : : }
2481 : :
2482 : : return true;
2483 : : }
2484 : :
2485 : : /* Return whether X is just a single_set, with the source
2486 : : a general_operand. */
2487 : : static bool
2488 : 66009798 : is_just_move (rtx_insn *x)
2489 : : {
2490 : 66009798 : rtx set = single_set (x);
2491 : 66009798 : if (!set)
2492 : : return false;
2493 : :
2494 : 65585258 : return general_operand (SET_SRC (set), VOIDmode);
2495 : : }
2496 : :
2497 : : /* Callback function to count autoincs. */
2498 : :
2499 : : static int
2500 : 1028154 : count_auto_inc (rtx, rtx, rtx, rtx, rtx, void *arg)
2501 : : {
2502 : 1028154 : (*((int *) arg))++;
2503 : :
2504 : 1028154 : return 0;
2505 : : }
2506 : :
2507 : : /* Try to combine the insns I0, I1 and I2 into I3.
2508 : : Here I0, I1 and I2 appear earlier than I3.
2509 : : I0 and I1 can be zero; then we combine just I2 into I3, or I1 and I2 into
2510 : : I3.
2511 : :
2512 : : If we are combining more than two insns and the resulting insn is not
2513 : : recognized, try splitting it into two insns. If that happens, I2 and I3
2514 : : are retained and I1/I0 are pseudo-deleted by turning them into a NOTE.
2515 : : Otherwise, I0, I1 and I2 are pseudo-deleted.
2516 : :
2517 : : Return 0 if the combination does not work. Then nothing is changed.
2518 : : If we did the combination, return the insn at which combine should
2519 : : resume scanning.
2520 : :
2521 : : Set NEW_DIRECT_JUMP_P to true if try_combine creates a
2522 : : new direct jump instruction.
2523 : :
2524 : : LAST_COMBINED_INSN is either I3, or some insn after I3 that has
2525 : : been I3 passed to an earlier try_combine within the same basic
2526 : : block. */
2527 : :
2528 : : static rtx_insn *
2529 : 94597580 : try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0,
2530 : : bool *new_direct_jump_p, rtx_insn *last_combined_insn)
2531 : : {
2532 : : /* New patterns for I3 and I2, respectively. */
2533 : 94597580 : rtx newpat, newi2pat = 0;
2534 : 94597580 : rtvec newpat_vec_with_clobbers = 0;
2535 : 94597580 : bool substed_i2 = false, substed_i1 = false, substed_i0 = false;
2536 : : /* Indicates need to preserve SET in I0, I1 or I2 in I3 if it is not
2537 : : dead. */
2538 : 94597580 : bool added_sets_0, added_sets_1, added_sets_2;
2539 : : /* Total number of SETs to put into I3. */
2540 : 94597580 : int total_sets;
2541 : : /* Nonzero if I2's or I1's body now appears in I3. */
2542 : 94597580 : int i2_is_used = 0, i1_is_used = 0;
2543 : : /* INSN_CODEs for new I3, new I2, and user of condition code. */
2544 : 94597580 : int insn_code_number, i2_code_number = 0, other_code_number = 0;
2545 : : /* Contains I3 if the destination of I3 is used in its source, which means
2546 : : that the old life of I3 is being killed. If that usage is placed into
2547 : : I2 and not in I3, a REG_DEAD note must be made. */
2548 : 94597580 : rtx i3dest_killed = 0;
2549 : : /* SET_DEST and SET_SRC of I2, I1 and I0. */
2550 : 94597580 : rtx i2dest = 0, i2src = 0, i1dest = 0, i1src = 0, i0dest = 0, i0src = 0;
2551 : : /* Copy of SET_SRC of I1 and I0, if needed. */
2552 : 94597580 : rtx i1src_copy = 0, i0src_copy = 0, i0src_copy2 = 0;
2553 : : /* Set if I2DEST was reused as a scratch register. */
2554 : 94597580 : bool i2scratch = false;
2555 : : /* The PATTERNs of I0, I1, and I2, or a copy of them in certain cases. */
2556 : 94597580 : rtx i0pat = 0, i1pat = 0, i2pat = 0;
2557 : : /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
2558 : 94597580 : bool i2dest_in_i2src = false, i1dest_in_i1src = false;
2559 : 94597580 : bool i2dest_in_i1src = false, i0dest_in_i0src = false;
2560 : 94597580 : bool i1dest_in_i0src = false, i2dest_in_i0src = false;;
2561 : 94597580 : bool i2dest_killed = false, i1dest_killed = false, i0dest_killed = false;
2562 : 94597580 : bool i1_feeds_i2_n = false, i0_feeds_i2_n = false, i0_feeds_i1_n = false;
2563 : : /* Notes that must be added to REG_NOTES in I3 and I2. */
2564 : 94597580 : rtx new_i3_notes, new_i2_notes;
2565 : : /* Notes that we substituted I3 into I2 instead of the normal case. */
2566 : 94597580 : bool i3_subst_into_i2 = false;
2567 : : /* Notes that I1, I2 or I3 is a MULT operation. */
2568 : 94597580 : bool have_mult = false;
2569 : 94597580 : bool swap_i2i3 = false;
2570 : 94597580 : bool split_i2i3 = false;
2571 : 94597580 : bool changed_i3_dest = false;
2572 : 94597580 : bool i2_was_move = false, i3_was_move = false;
2573 : 94597580 : int n_auto_inc = 0;
2574 : :
2575 : 94597580 : int maxreg;
2576 : 94597580 : rtx_insn *temp_insn;
2577 : 94597580 : rtx temp_expr;
2578 : 94597580 : struct insn_link *link;
2579 : 94597580 : rtx other_pat = 0;
2580 : 94597580 : rtx new_other_notes;
2581 : 94597580 : int i;
2582 : 94597580 : scalar_int_mode dest_mode, temp_mode;
2583 : 94597580 : bool has_non_call_exception = false;
2584 : :
2585 : : /* Immediately return if any of I0,I1,I2 are the same insn (I3 can
2586 : : never be). */
2587 : 94597580 : if (i1 == i2 || i0 == i2 || (i0 && i0 == i1))
2588 : : return 0;
2589 : :
2590 : : /* Only try four-insn combinations when there's high likelihood of
2591 : : success. Look for simple insns, such as loads of constants or
2592 : : binary operations involving a constant. */
2593 : 21695871 : if (i0)
2594 : : {
2595 : 21695871 : int i;
2596 : 21695871 : int ngood = 0;
2597 : 21695871 : int nshift = 0;
2598 : 21695871 : rtx set0, set3;
2599 : :
2600 : 21695871 : if (!flag_expensive_optimizations)
2601 : : return 0;
2602 : :
2603 : 86516322 : for (i = 0; i < 4; i++)
2604 : : {
2605 : 70717167 : rtx_insn *insn = i == 0 ? i0 : i == 1 ? i1 : i == 2 ? i2 : i3;
2606 : 70717167 : rtx set = single_set (insn);
2607 : 70717167 : rtx src;
2608 : 70717167 : if (!set)
2609 : 2344217 : continue;
2610 : 68372950 : src = SET_SRC (set);
2611 : 68372950 : if (CONSTANT_P (src))
2612 : : {
2613 : 4541831 : ngood += 2;
2614 : 4541831 : break;
2615 : : }
2616 : 63831119 : else if (BINARY_P (src) && CONSTANT_P (XEXP (src, 1)))
2617 : 7947200 : ngood++;
2618 : 55883919 : else if (GET_CODE (src) == IF_THEN_ELSE)
2619 : 2066958 : ngood++;
2620 : 53816961 : else if (GET_CODE (src) == ASHIFT || GET_CODE (src) == ASHIFTRT
2621 : 53731806 : || GET_CODE (src) == LSHIFTRT)
2622 : 114974 : nshift++;
2623 : : }
2624 : :
2625 : : /* If I0 loads a memory and I3 sets the same memory, then I1 and I2
2626 : : are likely manipulating its value. Ideally we'll be able to combine
2627 : : all four insns into a bitfield insertion of some kind.
2628 : :
2629 : : Note the source in I0 might be inside a sign/zero extension and the
2630 : : memory modes in I0 and I3 might be different. So extract the address
2631 : : from the destination of I3 and search for it in the source of I0.
2632 : :
2633 : : In the event that there's a match but the source/dest do not actually
2634 : : refer to the same memory, the worst that happens is we try some
2635 : : combinations that we wouldn't have otherwise. */
2636 : 20340986 : if ((set0 = single_set (i0))
2637 : : /* Ensure the source of SET0 is a MEM, possibly buried inside
2638 : : an extension. */
2639 : 20209738 : && (GET_CODE (SET_SRC (set0)) == MEM
2640 : 16938502 : || ((GET_CODE (SET_SRC (set0)) == ZERO_EXTEND
2641 : 16938502 : || GET_CODE (SET_SRC (set0)) == SIGN_EXTEND)
2642 : 565655 : && GET_CODE (XEXP (SET_SRC (set0), 0)) == MEM))
2643 : 3382086 : && (set3 = single_set (i3))
2644 : : /* Ensure the destination of SET3 is a MEM. */
2645 : 2929416 : && GET_CODE (SET_DEST (set3)) == MEM
2646 : : /* Would it be better to extract the base address for the MEM
2647 : : in SET3 and look for that? I don't have cases where it matters
2648 : : but I could envision such cases. */
2649 : 20685192 : && rtx_referenced_p (XEXP (SET_DEST (set3), 0), SET_SRC (set0)))
2650 : 22789 : ngood += 2;
2651 : :
2652 : 20340986 : if (ngood < 2 && nshift < 2)
2653 : : return 0;
2654 : : }
2655 : :
2656 : : /* Exit early if one of the insns involved can't be used for
2657 : : combinations. */
2658 : 79575390 : if (CALL_P (i2)
2659 : 74562902 : || (i1 && CALL_P (i1))
2660 : 71173007 : || (i0 && CALL_P (i0))
2661 : 70713771 : || cant_combine_insn_p (i3)
2662 : 67443979 : || cant_combine_insn_p (i2)
2663 : 51872197 : || (i1 && cant_combine_insn_p (i1))
2664 : 47003516 : || (i0 && cant_combine_insn_p (i0))
2665 : 126348979 : || likely_spilled_retval_p (i3))
2666 : 32801811 : return 0;
2667 : :
2668 : 46773579 : combine_attempts++;
2669 : 46773579 : undobuf.other_insn = 0;
2670 : :
2671 : : /* Reset the hard register usage information. */
2672 : 46773579 : CLEAR_HARD_REG_SET (newpat_used_regs);
2673 : :
2674 : 46773579 : if (dump_file && (dump_flags & TDF_DETAILS))
2675 : : {
2676 : 174 : if (i0)
2677 : 20 : fprintf (dump_file, "\nTrying %d, %d, %d -> %d:\n",
2678 : 20 : INSN_UID (i0), INSN_UID (i1), INSN_UID (i2), INSN_UID (i3));
2679 : 154 : else if (i1)
2680 : 26 : fprintf (dump_file, "\nTrying %d, %d -> %d:\n",
2681 : 26 : INSN_UID (i1), INSN_UID (i2), INSN_UID (i3));
2682 : : else
2683 : 128 : fprintf (dump_file, "\nTrying %d -> %d:\n",
2684 : 128 : INSN_UID (i2), INSN_UID (i3));
2685 : :
2686 : 174 : if (i0)
2687 : 20 : dump_insn_slim (dump_file, i0);
2688 : 174 : if (i1)
2689 : 46 : dump_insn_slim (dump_file, i1);
2690 : 174 : dump_insn_slim (dump_file, i2);
2691 : 174 : dump_insn_slim (dump_file, i3);
2692 : : }
2693 : :
2694 : : /* If multiple insns feed into one of I2 or I3, they can be in any
2695 : : order. To simplify the code below, reorder them in sequence. */
2696 : 46773579 : if (i0 && DF_INSN_LUID (i0) > DF_INSN_LUID (i2))
2697 : : std::swap (i0, i2);
2698 : 46773579 : if (i0 && DF_INSN_LUID (i0) > DF_INSN_LUID (i1))
2699 : : std::swap (i0, i1);
2700 : 46773579 : if (i1 && DF_INSN_LUID (i1) > DF_INSN_LUID (i2))
2701 : : std::swap (i1, i2);
2702 : :
2703 : 46773579 : added_links_insn = 0;
2704 : 46773579 : added_notes_insn = 0;
2705 : :
2706 : : /* First check for one important special case that the code below will
2707 : : not handle. Namely, the case where I1 is zero, I2 is a PARALLEL
2708 : : and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
2709 : : we may be able to replace that destination with the destination of I3.
2710 : : This occurs in the common code where we compute both a quotient and
2711 : : remainder into a structure, in which case we want to do the computation
2712 : : directly into the structure to avoid register-register copies.
2713 : :
2714 : : Note that this case handles both multiple sets in I2 and also cases
2715 : : where I2 has a number of CLOBBERs inside the PARALLEL.
2716 : :
2717 : : We make very conservative checks below and only try to handle the
2718 : : most common cases of this. For example, we only handle the case
2719 : : where I2 and I3 are adjacent to avoid making difficult register
2720 : : usage tests. */
2721 : :
2722 : 29233845 : if (i1 == 0 && NONJUMP_INSN_P (i3) && GET_CODE (PATTERN (i3)) == SET
2723 : 15292116 : && REG_P (SET_SRC (PATTERN (i3)))
2724 : 5149425 : && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
2725 : 4929171 : && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
2726 : 4055059 : && GET_CODE (PATTERN (i2)) == PARALLEL
2727 : 1063822 : && ! side_effects_p (SET_DEST (PATTERN (i3)))
2728 : : /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
2729 : : below would need to check what is inside (and reg_overlap_mentioned_p
2730 : : doesn't support those codes anyway). Don't allow those destinations;
2731 : : the resulting insn isn't likely to be recognized anyway. */
2732 : 576191 : && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
2733 : 576163 : && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
2734 : 575245 : && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
2735 : 575245 : SET_DEST (PATTERN (i3)))
2736 : 47348709 : && next_active_insn (i2) == i3)
2737 : : {
2738 : 356398 : rtx p2 = PATTERN (i2);
2739 : :
2740 : : /* Make sure that the destination of I3,
2741 : : which we are going to substitute into one output of I2,
2742 : : is not used within another output of I2. We must avoid making this:
2743 : : (parallel [(set (mem (reg 69)) ...)
2744 : : (set (reg 69) ...)])
2745 : : which is not well-defined as to order of actions.
2746 : : (Besides, reload can't handle output reloads for this.)
2747 : :
2748 : : The problem can also happen if the dest of I3 is a memory ref,
2749 : : if another dest in I2 is an indirect memory ref.
2750 : :
2751 : : Neither can this PARALLEL be an asm. We do not allow combining
2752 : : that usually (see can_combine_p), so do not here either. */
2753 : 356398 : bool ok = true;
2754 : 1081293 : for (i = 0; ok && i < XVECLEN (p2, 0); i++)
2755 : : {
2756 : 724895 : if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
2757 : 355928 : || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
2758 : 1448391 : && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
2759 : 723496 : SET_DEST (XVECEXP (p2, 0, i))))
2760 : : ok = false;
2761 : 724126 : else if (GET_CODE (XVECEXP (p2, 0, i)) == SET
2762 : 368200 : && GET_CODE (SET_SRC (XVECEXP (p2, 0, i))) == ASM_OPERANDS)
2763 : 1894 : ok = false;
2764 : : }
2765 : :
2766 : 356398 : if (ok)
2767 : 420816 : for (i = 0; i < XVECLEN (p2, 0); i++)
2768 : 389180 : if (GET_CODE (XVECEXP (p2, 0, i)) == SET
2769 : 389180 : && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
2770 : : {
2771 : 322868 : combine_merges++;
2772 : :
2773 : 322868 : subst_insn = i3;
2774 : 322868 : subst_low_luid = DF_INSN_LUID (i2);
2775 : :
2776 : 322868 : added_sets_2 = added_sets_1 = added_sets_0 = false;
2777 : 322868 : i2src = SET_SRC (XVECEXP (p2, 0, i));
2778 : 322868 : i2dest = SET_DEST (XVECEXP (p2, 0, i));
2779 : 322868 : i2dest_killed = dead_or_set_p (i2, i2dest);
2780 : :
2781 : : /* Replace the dest in I2 with our dest and make the resulting
2782 : : insn the new pattern for I3. Then skip to where we validate
2783 : : the pattern. Everything was set up above. */
2784 : 322868 : SUBST (SET_DEST (XVECEXP (p2, 0, i)), SET_DEST (PATTERN (i3)));
2785 : 322868 : newpat = p2;
2786 : 322868 : i3_subst_into_i2 = true;
2787 : 322868 : goto validate_replacement;
2788 : : }
2789 : : }
2790 : :
2791 : : /* If I2 is setting a pseudo to a constant and I3 is setting some
2792 : : sub-part of it to another constant, merge them by making a new
2793 : : constant. */
2794 : 46450711 : if (i1 == 0
2795 : 28910977 : && (temp_expr = single_set (i2)) != 0
2796 : 28635825 : && is_a <scalar_int_mode> (GET_MODE (SET_DEST (temp_expr)), &temp_mode)
2797 : 18841421 : && CONST_SCALAR_INT_P (SET_SRC (temp_expr))
2798 : 2770354 : && GET_CODE (PATTERN (i3)) == SET
2799 : 1370152 : && CONST_SCALAR_INT_P (SET_SRC (PATTERN (i3)))
2800 : 46467468 : && reg_subword_p (SET_DEST (PATTERN (i3)), SET_DEST (temp_expr)))
2801 : : {
2802 : 16088 : rtx dest = SET_DEST (PATTERN (i3));
2803 : 16088 : rtx temp_dest = SET_DEST (temp_expr);
2804 : 16088 : int offset = -1;
2805 : 16088 : int width = 0;
2806 : :
2807 : 16088 : if (GET_CODE (dest) == ZERO_EXTRACT)
2808 : : {
2809 : 1 : if (CONST_INT_P (XEXP (dest, 1))
2810 : 1 : && CONST_INT_P (XEXP (dest, 2))
2811 : 2 : && is_a <scalar_int_mode> (GET_MODE (XEXP (dest, 0)),
2812 : : &dest_mode))
2813 : : {
2814 : 1 : width = INTVAL (XEXP (dest, 1));
2815 : 1 : offset = INTVAL (XEXP (dest, 2));
2816 : 1 : dest = XEXP (dest, 0);
2817 : 1 : if (BITS_BIG_ENDIAN)
2818 : : offset = GET_MODE_PRECISION (dest_mode) - width - offset;
2819 : : }
2820 : : }
2821 : : else
2822 : : {
2823 : 16087 : if (GET_CODE (dest) == STRICT_LOW_PART)
2824 : 525 : dest = XEXP (dest, 0);
2825 : 16087 : if (is_a <scalar_int_mode> (GET_MODE (dest), &dest_mode))
2826 : : {
2827 : 16087 : width = GET_MODE_PRECISION (dest_mode);
2828 : 16087 : offset = 0;
2829 : : }
2830 : : }
2831 : :
2832 : 16088 : if (offset >= 0)
2833 : : {
2834 : : /* If this is the low part, we're done. */
2835 : 16088 : if (subreg_lowpart_p (dest))
2836 : : ;
2837 : : /* Handle the case where inner is twice the size of outer. */
2838 : 4649 : else if (GET_MODE_PRECISION (temp_mode)
2839 : 4649 : == 2 * GET_MODE_PRECISION (dest_mode))
2840 : 4646 : offset += GET_MODE_PRECISION (dest_mode);
2841 : : /* Otherwise give up for now. */
2842 : : else
2843 : : offset = -1;
2844 : : }
2845 : :
2846 : 16085 : if (offset >= 0)
2847 : : {
2848 : 16085 : rtx inner = SET_SRC (PATTERN (i3));
2849 : 16085 : rtx outer = SET_SRC (temp_expr);
2850 : :
2851 : 32170 : wide_int o = wi::insert (rtx_mode_t (outer, temp_mode),
2852 : 16085 : rtx_mode_t (inner, dest_mode),
2853 : 32170 : offset, width);
2854 : :
2855 : 16085 : combine_merges++;
2856 : 16085 : subst_insn = i3;
2857 : 16085 : subst_low_luid = DF_INSN_LUID (i2);
2858 : 16085 : added_sets_2 = added_sets_1 = added_sets_0 = false;
2859 : 16085 : i2dest = temp_dest;
2860 : 16085 : i2dest_killed = dead_or_set_p (i2, i2dest);
2861 : :
2862 : : /* Replace the source in I2 with the new constant and make the
2863 : : resulting insn the new pattern for I3. Then skip to where we
2864 : : validate the pattern. Everything was set up above. */
2865 : 16085 : SUBST (SET_SRC (temp_expr),
2866 : : immed_wide_int_const (o, temp_mode));
2867 : :
2868 : 16085 : newpat = PATTERN (i2);
2869 : :
2870 : : /* The dest of I3 has been replaced with the dest of I2. */
2871 : 16085 : changed_i3_dest = true;
2872 : 16085 : goto validate_replacement;
2873 : 16085 : }
2874 : : }
2875 : :
2876 : : /* If we have no I1 and I2 looks like:
2877 : : (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
2878 : : (set Y OP)])
2879 : : make up a dummy I1 that is
2880 : : (set Y OP)
2881 : : and change I2 to be
2882 : : (set (reg:CC X) (compare:CC Y (const_int 0)))
2883 : :
2884 : : (We can ignore any trailing CLOBBERs.)
2885 : :
2886 : : This undoes a previous combination and allows us to match a branch-and-
2887 : : decrement insn. */
2888 : :
2889 : 46434626 : if (i1 == 0
2890 : 28894892 : && is_parallel_of_n_reg_sets (PATTERN (i2), 2)
2891 : 227146 : && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
2892 : : == MODE_CC)
2893 : 146066 : && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
2894 : 119858 : && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
2895 : 79973 : && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
2896 : 79973 : SET_SRC (XVECEXP (PATTERN (i2), 0, 1)))
2897 : 73510 : && !reg_used_between_p (SET_DEST (XVECEXP (PATTERN (i2), 0, 0)), i2, i3)
2898 : 46508136 : && !reg_used_between_p (SET_DEST (XVECEXP (PATTERN (i2), 0, 1)), i2, i3))
2899 : : {
2900 : : /* We make I1 with the same INSN_UID as I2. This gives it
2901 : : the same DF_INSN_LUID for value tracking. Our fake I1 will
2902 : : never appear in the insn stream so giving it the same INSN_UID
2903 : : as I2 will not cause a problem. */
2904 : :
2905 : 146584 : i1 = gen_rtx_INSN (VOIDmode, NULL, i2, BLOCK_FOR_INSN (i2),
2906 : 73292 : XVECEXP (PATTERN (i2), 0, 1), INSN_LOCATION (i2),
2907 : : -1, NULL_RTX);
2908 : 73292 : INSN_UID (i1) = INSN_UID (i2);
2909 : :
2910 : 73292 : SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
2911 : 73292 : SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
2912 : : SET_DEST (PATTERN (i1)));
2913 : 73292 : unsigned int regno = REGNO (SET_DEST (PATTERN (i1)));
2914 : 73292 : SUBST_LINK (LOG_LINKS (i2),
2915 : : alloc_insn_link (i1, regno, LOG_LINKS (i2)));
2916 : : }
2917 : :
2918 : : /* If I2 is a PARALLEL of two SETs of REGs (and perhaps some CLOBBERs),
2919 : : make those two SETs separate I1 and I2 insns, and make an I0 that is
2920 : : the original I1. */
2921 : 46434626 : if (i0 == 0
2922 : 43864242 : && is_parallel_of_n_reg_sets (PATTERN (i2), 2)
2923 : 374175 : && can_split_parallel_of_n_reg_sets (i2, 2)
2924 : 315816 : && !reg_used_between_p (SET_DEST (XVECEXP (PATTERN (i2), 0, 0)), i2, i3)
2925 : 277470 : && !reg_used_between_p (SET_DEST (XVECEXP (PATTERN (i2), 0, 1)), i2, i3)
2926 : 259553 : && !reg_set_between_p (SET_DEST (XVECEXP (PATTERN (i2), 0, 0)), i2, i3)
2927 : 46694170 : && !reg_set_between_p (SET_DEST (XVECEXP (PATTERN (i2), 0, 1)), i2, i3))
2928 : : {
2929 : : /* If there is no I1, there is no I0 either. */
2930 : 259544 : i0 = i1;
2931 : :
2932 : : /* We make I1 with the same INSN_UID as I2. This gives it
2933 : : the same DF_INSN_LUID for value tracking. Our fake I1 will
2934 : : never appear in the insn stream so giving it the same INSN_UID
2935 : : as I2 will not cause a problem. */
2936 : :
2937 : 519088 : i1 = gen_rtx_INSN (VOIDmode, NULL, i2, BLOCK_FOR_INSN (i2),
2938 : 259544 : XVECEXP (PATTERN (i2), 0, 0), INSN_LOCATION (i2),
2939 : : -1, NULL_RTX);
2940 : 259544 : INSN_UID (i1) = INSN_UID (i2);
2941 : :
2942 : 259544 : SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 1));
2943 : : }
2944 : :
2945 : : /* Verify that I2 and maybe I1 and I0 can be combined into I3. */
2946 : 46434626 : if (!can_combine_p (i2, i3, i0, i1, NULL, NULL, &i2dest, &i2src))
2947 : : {
2948 : 11827911 : if (dump_file && (dump_flags & TDF_DETAILS))
2949 : 8 : fprintf (dump_file, "Can't combine i2 into i3\n");
2950 : 11827911 : undo_all ();
2951 : 11827911 : return 0;
2952 : : }
2953 : 34606715 : if (i1 && !can_combine_p (i1, i3, i0, NULL, i2, NULL, &i1dest, &i1src))
2954 : : {
2955 : 1371926 : if (dump_file && (dump_flags & TDF_DETAILS))
2956 : 0 : fprintf (dump_file, "Can't combine i1 into i3\n");
2957 : 1371926 : undo_all ();
2958 : 1371926 : return 0;
2959 : : }
2960 : 33234789 : if (i0 && !can_combine_p (i0, i3, NULL, NULL, i1, i2, &i0dest, &i0src))
2961 : : {
2962 : 229718 : if (dump_file && (dump_flags & TDF_DETAILS))
2963 : 0 : fprintf (dump_file, "Can't combine i0 into i3\n");
2964 : 229718 : undo_all ();
2965 : 229718 : return 0;
2966 : : }
2967 : :
2968 : : /* With non-call exceptions we can end up trying to combine multiple
2969 : : insns with possible EH side effects. Make sure we can combine
2970 : : that to a single insn which means there must be at most one insn
2971 : : in the combination with an EH side effect. */
2972 : 33005071 : if (cfun->can_throw_non_call_exceptions)
2973 : : {
2974 : 6085146 : if (find_reg_note (i3, REG_EH_REGION, NULL_RTX)
2975 : 6061495 : || find_reg_note (i2, REG_EH_REGION, NULL_RTX)
2976 : 6061413 : || (i1 && find_reg_note (i1, REG_EH_REGION, NULL_RTX))
2977 : 12146557 : || (i0 && find_reg_note (i0, REG_EH_REGION, NULL_RTX)))
2978 : : {
2979 : 23735 : has_non_call_exception = true;
2980 : 23735 : if (insn_could_throw_p (i3)
2981 : 23735 : + insn_could_throw_p (i2)
2982 : 23735 : + (i1 ? insn_could_throw_p (i1) : 0)
2983 : 23735 : + (i0 ? insn_could_throw_p (i0) : 0) > 1)
2984 : : {
2985 : 172 : if (dump_file && (dump_flags & TDF_DETAILS))
2986 : 0 : fprintf (dump_file, "Can't combine multiple insns with EH "
2987 : : "side-effects\n");
2988 : 172 : undo_all ();
2989 : 172 : return 0;
2990 : : }
2991 : : }
2992 : : }
2993 : :
2994 : : /* Record whether i2 and i3 are trivial moves. */
2995 : 33004899 : i2_was_move = is_just_move (i2);
2996 : 33004899 : i3_was_move = is_just_move (i3);
2997 : :
2998 : : /* Record whether I2DEST is used in I2SRC and similarly for the other
2999 : : cases. Knowing this will help in register status updating below. */
3000 : 33004899 : i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
3001 : 33004899 : i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
3002 : 10521479 : i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
3003 : 33004899 : i0dest_in_i0src = i0 && reg_overlap_mentioned_p (i0dest, i0src);
3004 : 1850468 : i1dest_in_i0src = i0 && reg_overlap_mentioned_p (i1dest, i0src);
3005 : 1850468 : i2dest_in_i0src = i0 && reg_overlap_mentioned_p (i2dest, i0src);
3006 : 33004899 : i2dest_killed = dead_or_set_p (i2, i2dest);
3007 : 33004899 : i1dest_killed = i1 && dead_or_set_p (i1, i1dest);
3008 : 33004899 : i0dest_killed = i0 && dead_or_set_p (i0, i0dest);
3009 : :
3010 : : /* For the earlier insns, determine which of the subsequent ones they
3011 : : feed. */
3012 : 33004899 : i1_feeds_i2_n = i1 && insn_a_feeds_b (i1, i2);
3013 : 33004899 : i0_feeds_i1_n = i0 && insn_a_feeds_b (i0, i1);
3014 : 3218891 : i0_feeds_i2_n = (i0 && (!i0_feeds_i1_n ? insn_a_feeds_b (i0, i2)
3015 : 1368423 : : (!reg_overlap_mentioned_p (i1dest, i0dest)
3016 : 1346437 : && reg_overlap_mentioned_p (i0dest, i2src))));
3017 : :
3018 : : /* Ensure that I3's pattern can be the destination of combines. */
3019 : 33004899 : if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest, i0dest,
3020 : 33004899 : i1 && i2dest_in_i1src && !i1_feeds_i2_n,
3021 : 1850468 : i0 && ((i2dest_in_i0src && !i0_feeds_i2_n)
3022 : 1823623 : || (i1dest_in_i0src && !i0_feeds_i1_n)),
3023 : : &i3dest_killed))
3024 : : {
3025 : 174897 : undo_all ();
3026 : 174897 : return 0;
3027 : : }
3028 : :
3029 : : /* See if any of the insns is a MULT operation. Unless one is, we will
3030 : : reject a combination that is, since it must be slower. Be conservative
3031 : : here. */
3032 : 32830002 : if (GET_CODE (i2src) == MULT
3033 : 32038742 : || (i1 != 0 && GET_CODE (i1src) == MULT)
3034 : 31699905 : || (i0 != 0 && GET_CODE (i0src) == MULT)
3035 : 64483844 : || (GET_CODE (PATTERN (i3)) == SET
3036 : 24924766 : && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
3037 : : have_mult = true;
3038 : :
3039 : : /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
3040 : : We used to do this EXCEPT in one case: I3 has a post-inc in an
3041 : : output operand. However, that exception can give rise to insns like
3042 : : mov r3,(r3)+
3043 : : which is a famous insn on the PDP-11 where the value of r3 used as the
3044 : : source was model-dependent. Avoid this sort of thing. */
3045 : :
3046 : : #if 0
3047 : : if (!(GET_CODE (PATTERN (i3)) == SET
3048 : : && REG_P (SET_SRC (PATTERN (i3)))
3049 : : && MEM_P (SET_DEST (PATTERN (i3)))
3050 : : && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
3051 : : || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
3052 : : /* It's not the exception. */
3053 : : #endif
3054 : 32830002 : if (AUTO_INC_DEC)
3055 : : {
3056 : : rtx link;
3057 : : for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
3058 : : if (REG_NOTE_KIND (link) == REG_INC
3059 : : && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
3060 : : || (i1 != 0
3061 : : && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
3062 : : {
3063 : : undo_all ();
3064 : : return 0;
3065 : : }
3066 : : }
3067 : :
3068 : : /* See if the SETs in I1 or I2 need to be kept around in the merged
3069 : : instruction: whenever the value set there is still needed past I3.
3070 : : For the SET in I2, this is easy: we see if I2DEST dies or is set in I3.
3071 : :
3072 : : For the SET in I1, we have two cases: if I1 and I2 independently feed
3073 : : into I3, the set in I1 needs to be kept around unless I1DEST dies
3074 : : or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
3075 : : in I1 needs to be kept around unless I1DEST dies or is set in either
3076 : : I2 or I3. The same considerations apply to I0. */
3077 : :
3078 : 32830002 : added_sets_2 = !dead_or_set_p (i3, i2dest);
3079 : :
3080 : 32830002 : if (i1)
3081 : 10463978 : added_sets_1 = !(dead_or_set_p (i3, i1dest)
3082 : 8022886 : || (i1_feeds_i2_n && dead_or_set_p (i2, i1dest)));
3083 : : else
3084 : : added_sets_1 = false;
3085 : :
3086 : 32830002 : if (i0)
3087 : 2690231 : added_sets_0 = !(dead_or_set_p (i3, i0dest)
3088 : 1649371 : || (i0_feeds_i1_n && dead_or_set_p (i1, i0dest))
3089 : 336099 : || ((i0_feeds_i2_n || (i0_feeds_i1_n && i1_feeds_i2_n))
3090 : 811216 : && dead_or_set_p (i2, i0dest)));
3091 : : else
3092 : : added_sets_0 = false;
3093 : :
3094 : : /* We are about to copy insns for the case where they need to be kept
3095 : : around. Check that they can be copied in the merged instruction. */
3096 : :
3097 : 32830002 : if (targetm.cannot_copy_insn_p
3098 : 32830002 : && ((added_sets_2 && targetm.cannot_copy_insn_p (i2))
3099 : 0 : || (i1 && added_sets_1 && targetm.cannot_copy_insn_p (i1))
3100 : 0 : || (i0 && added_sets_0 && targetm.cannot_copy_insn_p (i0))))
3101 : : {
3102 : 0 : undo_all ();
3103 : 0 : return 0;
3104 : : }
3105 : :
3106 : : /* We cannot safely duplicate volatile references in any case. */
3107 : :
3108 : 7317215 : if ((added_sets_2 && volatile_refs_p (PATTERN (i2)))
3109 : 32793243 : || (added_sets_1 && volatile_refs_p (PATTERN (i1)))
3110 : 65594939 : || (added_sets_0 && volatile_refs_p (PATTERN (i0))))
3111 : : {
3112 : 67402 : undo_all ();
3113 : 67402 : return 0;
3114 : : }
3115 : :
3116 : : /* Count how many auto_inc expressions there were in the original insns;
3117 : : we need to have the same number in the resulting patterns. */
3118 : :
3119 : 32762600 : if (i0)
3120 : 1813598 : for_each_inc_dec (PATTERN (i0), count_auto_inc, &n_auto_inc);
3121 : 32762600 : if (i1)
3122 : 10431865 : for_each_inc_dec (PATTERN (i1), count_auto_inc, &n_auto_inc);
3123 : 32762600 : for_each_inc_dec (PATTERN (i2), count_auto_inc, &n_auto_inc);
3124 : 32762600 : for_each_inc_dec (PATTERN (i3), count_auto_inc, &n_auto_inc);
3125 : :
3126 : : /* If the set in I2 needs to be kept around, we must make a copy of
3127 : : PATTERN (I2), so that when we substitute I1SRC for I1DEST in
3128 : : PATTERN (I2), we are only substituting for the original I1DEST, not into
3129 : : an already-substituted copy. This also prevents making self-referential
3130 : : rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
3131 : : I2DEST. */
3132 : :
3133 : 32762600 : if (added_sets_2)
3134 : : {
3135 : 7277584 : if (GET_CODE (PATTERN (i2)) == PARALLEL)
3136 : 2267372 : i2pat = gen_rtx_SET (i2dest, copy_rtx (i2src));
3137 : : else
3138 : 5010212 : i2pat = copy_rtx (PATTERN (i2));
3139 : : }
3140 : :
3141 : 32762600 : if (added_sets_1)
3142 : : {
3143 : 4001188 : if (GET_CODE (PATTERN (i1)) == PARALLEL)
3144 : 1291550 : i1pat = gen_rtx_SET (i1dest, copy_rtx (i1src));
3145 : : else
3146 : 2709638 : i1pat = copy_rtx (PATTERN (i1));
3147 : : }
3148 : :
3149 : 32762600 : if (added_sets_0)
3150 : : {
3151 : 503310 : if (GET_CODE (PATTERN (i0)) == PARALLEL)
3152 : 183594 : i0pat = gen_rtx_SET (i0dest, copy_rtx (i0src));
3153 : : else
3154 : 319716 : i0pat = copy_rtx (PATTERN (i0));
3155 : : }
3156 : :
3157 : 32762600 : combine_merges++;
3158 : :
3159 : : /* Substitute in the latest insn for the regs set by the earlier ones. */
3160 : :
3161 : 32762600 : maxreg = max_reg_num ();
3162 : :
3163 : 32762600 : subst_insn = i3;
3164 : :
3165 : : /* Many machines have insns that can both perform an
3166 : : arithmetic operation and set the condition code. These operations will
3167 : : be represented as a PARALLEL with the first element of the vector
3168 : : being a COMPARE of an arithmetic operation with the constant zero.
3169 : : The second element of the vector will set some pseudo to the result
3170 : : of the same arithmetic operation. If we simplify the COMPARE, we won't
3171 : : match such a pattern and so will generate an extra insn. Here we test
3172 : : for this case, where both the comparison and the operation result are
3173 : : needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
3174 : : I2SRC. Later we will make the PARALLEL that contains I2. */
3175 : :
3176 : 22330735 : if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
3177 : 4297729 : && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
3178 : 1849280 : && CONST_INT_P (XEXP (SET_SRC (PATTERN (i3)), 1))
3179 : 33658441 : && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
3180 : : {
3181 : 826546 : rtx newpat_dest;
3182 : 826546 : rtx *cc_use_loc = NULL;
3183 : 826546 : rtx_insn *cc_use_insn = NULL;
3184 : 826546 : rtx op0 = i2src, op1 = XEXP (SET_SRC (PATTERN (i3)), 1);
3185 : 826546 : machine_mode compare_mode, orig_compare_mode;
3186 : 826546 : enum rtx_code compare_code = UNKNOWN, orig_compare_code = UNKNOWN;
3187 : 826546 : scalar_int_mode mode;
3188 : :
3189 : 826546 : newpat = PATTERN (i3);
3190 : 826546 : newpat_dest = SET_DEST (newpat);
3191 : 826546 : compare_mode = orig_compare_mode = GET_MODE (newpat_dest);
3192 : :
3193 : 826546 : if (undobuf.other_insn == 0
3194 : 826546 : && (cc_use_loc = find_single_use (SET_DEST (newpat), i3,
3195 : : &cc_use_insn)))
3196 : : {
3197 : 820217 : compare_code = orig_compare_code = GET_CODE (*cc_use_loc);
3198 : 820217 : if (is_a <scalar_int_mode> (GET_MODE (i2dest), &mode))
3199 : 820217 : compare_code = simplify_compare_const (compare_code, mode,
3200 : : &op0, &op1);
3201 : 820217 : target_canonicalize_comparison (&compare_code, &op0, &op1, 1);
3202 : : }
3203 : :
3204 : : /* Do the rest only if op1 is const0_rtx, which may be the
3205 : : result of simplification. */
3206 : 826546 : if (op1 == const0_rtx)
3207 : : {
3208 : : /* If a single use of the CC is found, prepare to modify it
3209 : : when SELECT_CC_MODE returns a new CC-class mode, or when
3210 : : the above simplify_compare_const() returned a new comparison
3211 : : operator. undobuf.other_insn is assigned the CC use insn
3212 : : when modifying it. */
3213 : 511592 : if (cc_use_loc)
3214 : : {
3215 : : #ifdef SELECT_CC_MODE
3216 : 508953 : machine_mode new_mode
3217 : 508953 : = SELECT_CC_MODE (compare_code, op0, op1);
3218 : 508953 : if (new_mode != orig_compare_mode
3219 : 508953 : && can_change_dest_mode (SET_DEST (newpat),
3220 : : added_sets_2, new_mode))
3221 : : {
3222 : 400 : unsigned int regno = REGNO (newpat_dest);
3223 : 400 : compare_mode = new_mode;
3224 : 400 : if (regno < FIRST_PSEUDO_REGISTER)
3225 : 400 : newpat_dest = gen_rtx_REG (compare_mode, regno);
3226 : : else
3227 : : {
3228 : 0 : subst_mode (regno, compare_mode);
3229 : 0 : newpat_dest = regno_reg_rtx[regno];
3230 : : }
3231 : : }
3232 : : #endif
3233 : : /* Cases for modifying the CC-using comparison. */
3234 : 508953 : if (compare_code != orig_compare_code
3235 : 414 : && COMPARISON_P (*cc_use_loc))
3236 : : {
3237 : : /* Replace cc_use_loc with entire new RTX. */
3238 : 414 : SUBST (*cc_use_loc,
3239 : : gen_rtx_fmt_ee (compare_code, GET_MODE (*cc_use_loc),
3240 : : newpat_dest, const0_rtx));
3241 : 414 : undobuf.other_insn = cc_use_insn;
3242 : : }
3243 : 508539 : else if (compare_mode != orig_compare_mode)
3244 : : {
3245 : 1 : subrtx_ptr_iterator::array_type array;
3246 : :
3247 : : /* Just replace the CC reg with a new mode. */
3248 : 4 : FOR_EACH_SUBRTX_PTR (iter, array, cc_use_loc, NONCONST)
3249 : : {
3250 : 3 : rtx *loc = *iter;
3251 : 3 : if (REG_P (*loc)
3252 : 3 : && REGNO (*loc) == REGNO (newpat_dest))
3253 : : {
3254 : 1 : SUBST (*loc, newpat_dest);
3255 : 1 : iter.skip_subrtxes ();
3256 : : }
3257 : : }
3258 : 1 : undobuf.other_insn = cc_use_insn;
3259 : 1 : }
3260 : : }
3261 : :
3262 : : /* Now we modify the current newpat:
3263 : : First, SET_DEST(newpat) is updated if the CC mode has been
3264 : : altered. For targets without SELECT_CC_MODE, this should be
3265 : : optimized away. */
3266 : 511592 : if (compare_mode != orig_compare_mode)
3267 : 400 : SUBST (SET_DEST (newpat), newpat_dest);
3268 : : /* This is always done to propagate i2src into newpat. */
3269 : 511592 : SUBST (SET_SRC (newpat),
3270 : : gen_rtx_COMPARE (compare_mode, op0, op1));
3271 : : /* Create new version of i2pat if needed; the below PARALLEL
3272 : : creation needs this to work correctly. */
3273 : 511592 : if (! rtx_equal_p (i2src, op0))
3274 : 31 : i2pat = gen_rtx_SET (i2dest, op0);
3275 : 511592 : i2_is_used = 1;
3276 : : }
3277 : : }
3278 : :
3279 : 826546 : if (i2_is_used == 0)
3280 : : {
3281 : : /* It is possible that the source of I2 or I1 may be performing
3282 : : an unneeded operation, such as a ZERO_EXTEND of something
3283 : : that is known to have the high part zero. Handle that case
3284 : : by letting subst look at the inner insns.
3285 : :
3286 : : Another way to do this would be to have a function that tries
3287 : : to simplify a single insn instead of merging two or more
3288 : : insns. We don't do this because of the potential of infinite
3289 : : loops and because of the potential extra memory required.
3290 : : However, doing it the way we are is a bit of a kludge and
3291 : : doesn't catch all cases.
3292 : :
3293 : : But only do this if -fexpensive-optimizations since it slows
3294 : : things down and doesn't usually win.
3295 : :
3296 : : This is not done in the COMPARE case above because the
3297 : : unmodified I2PAT is used in the PARALLEL and so a pattern
3298 : : with a modified I2SRC would not match. */
3299 : :
3300 : 32251008 : if (flag_expensive_optimizations)
3301 : : {
3302 : : /* Pass pc_rtx so no substitutions are done, just
3303 : : simplifications. */
3304 : 30074418 : if (i1)
3305 : : {
3306 : 9793553 : subst_low_luid = DF_INSN_LUID (i1);
3307 : 9793553 : i1src = subst (i1src, pc_rtx, pc_rtx, false, false, false);
3308 : : }
3309 : :
3310 : 30074418 : subst_low_luid = DF_INSN_LUID (i2);
3311 : 30074418 : i2src = subst (i2src, pc_rtx, pc_rtx, false, false, false);
3312 : : }
3313 : :
3314 : 32251008 : n_occurrences = 0; /* `subst' counts here */
3315 : 32251008 : subst_low_luid = DF_INSN_LUID (i2);
3316 : :
3317 : : /* If I1 feeds into I2 and I1DEST is in I1SRC, we need to make a unique
3318 : : copy of I2SRC each time we substitute it, in order to avoid creating
3319 : : self-referential RTL when we will be substituting I1SRC for I1DEST
3320 : : later. Likewise if I0 feeds into I2, either directly or indirectly
3321 : : through I1, and I0DEST is in I0SRC. */
3322 : 32251008 : newpat = subst (PATTERN (i3), i2dest, i2src, false, false,
3323 : 32251008 : (i1_feeds_i2_n && i1dest_in_i1src)
3324 : 32251008 : || ((i0_feeds_i2_n || (i0_feeds_i1_n && i1_feeds_i2_n))
3325 : : && i0dest_in_i0src));
3326 : 32251008 : substed_i2 = true;
3327 : :
3328 : : /* Record whether I2's body now appears within I3's body. */
3329 : 32251008 : i2_is_used = n_occurrences;
3330 : : }
3331 : :
3332 : : /* If we already got a failure, don't try to do more. Otherwise, try to
3333 : : substitute I1 if we have it. */
3334 : :
3335 : 32762600 : if (i1 && GET_CODE (newpat) != CLOBBER)
3336 : : {
3337 : : /* Before we can do this substitution, we must redo the test done
3338 : : above (see detailed comments there) that ensures I1DEST isn't
3339 : : mentioned in any SETs in NEWPAT that are field assignments. */
3340 : 10388955 : if (!combinable_i3pat (NULL, &newpat, i1dest, NULL_RTX, NULL_RTX,
3341 : : false, false, 0))
3342 : : {
3343 : 38 : undo_all ();
3344 : 38 : return 0;
3345 : : }
3346 : :
3347 : 10388917 : n_occurrences = 0;
3348 : 10388917 : subst_low_luid = DF_INSN_LUID (i1);
3349 : :
3350 : : /* If the following substitution will modify I1SRC, make a copy of it
3351 : : for the case where it is substituted for I1DEST in I2PAT later. */
3352 : 10388917 : if (added_sets_2 && i1_feeds_i2_n)
3353 : 1424967 : i1src_copy = copy_rtx (i1src);
3354 : :
3355 : : /* If I0 feeds into I1 and I0DEST is in I0SRC, we need to make a unique
3356 : : copy of I1SRC each time we substitute it, in order to avoid creating
3357 : : self-referential RTL when we will be substituting I0SRC for I0DEST
3358 : : later. */
3359 : 20777834 : newpat = subst (newpat, i1dest, i1src, false, false,
3360 : 10388917 : i0_feeds_i1_n && i0dest_in_i0src);
3361 : 10388917 : substed_i1 = true;
3362 : :
3363 : : /* Record whether I1's body now appears within I3's body. */
3364 : 10388917 : i1_is_used = n_occurrences;
3365 : : }
3366 : :
3367 : : /* Likewise for I0 if we have it. */
3368 : :
3369 : 32762562 : if (i0 && GET_CODE (newpat) != CLOBBER)
3370 : : {
3371 : 1795812 : if (!combinable_i3pat (NULL, &newpat, i0dest, NULL_RTX, NULL_RTX,
3372 : : false, false, 0))
3373 : : {
3374 : 2 : undo_all ();
3375 : 2 : return 0;
3376 : : }
3377 : :
3378 : : /* If the following substitution will modify I0SRC, make a copy of it
3379 : : for the case where it is substituted for I0DEST in I1PAT later. */
3380 : 1795810 : if (added_sets_1 && i0_feeds_i1_n)
3381 : 356520 : i0src_copy = copy_rtx (i0src);
3382 : : /* And a copy for I0DEST in I2PAT substitution. */
3383 : 1795810 : if (added_sets_2 && ((i0_feeds_i1_n && i1_feeds_i2_n)
3384 : 205081 : || (i0_feeds_i2_n)))
3385 : 319486 : i0src_copy2 = copy_rtx (i0src);
3386 : :
3387 : 1795810 : n_occurrences = 0;
3388 : 1795810 : subst_low_luid = DF_INSN_LUID (i0);
3389 : 1795810 : newpat = subst (newpat, i0dest, i0src, false, false, false);
3390 : 1795810 : substed_i0 = true;
3391 : : }
3392 : :
3393 : 32762560 : if (n_auto_inc)
3394 : : {
3395 : 514638 : int new_n_auto_inc = 0;
3396 : 514638 : for_each_inc_dec (newpat, count_auto_inc, &new_n_auto_inc);
3397 : :
3398 : 514638 : if (n_auto_inc != new_n_auto_inc)
3399 : : {
3400 : 1126 : if (dump_file && (dump_flags & TDF_DETAILS))
3401 : 0 : fprintf (dump_file, "Number of auto_inc expressions changed\n");
3402 : 1126 : undo_all ();
3403 : 1126 : return 0;
3404 : : }
3405 : : }
3406 : :
3407 : : /* Fail if an autoincrement side-effect has been duplicated. Be careful
3408 : : to count all the ways that I2SRC and I1SRC can be used. */
3409 : 32761434 : if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
3410 : : && i2_is_used + added_sets_2 > 1)
3411 : : || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
3412 : : && (i1_is_used + added_sets_1 + (added_sets_2 && i1_feeds_i2_n) > 1))
3413 : : || (i0 != 0 && FIND_REG_INC_NOTE (i0, NULL_RTX) != 0
3414 : : && (n_occurrences + added_sets_0
3415 : : + (added_sets_1 && i0_feeds_i1_n)
3416 : : + (added_sets_2 && i0_feeds_i2_n) > 1))
3417 : : /* Fail if we tried to make a new register. */
3418 : 32761434 : || max_reg_num () != maxreg
3419 : : /* Fail if we couldn't do something and have a CLOBBER. */
3420 : 32761434 : || GET_CODE (newpat) == CLOBBER
3421 : : /* Fail if this new pattern is a MULT and we didn't have one before
3422 : : at the outer level. */
3423 : 65210980 : || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
3424 : 270818 : && ! have_mult))
3425 : : {
3426 : 334300 : undo_all ();
3427 : 334300 : return 0;
3428 : : }
3429 : :
3430 : : /* If the actions of the earlier insns must be kept
3431 : : in addition to substituting them into the latest one,
3432 : : we must make a new PARALLEL for the latest insn
3433 : : to hold additional the SETs. */
3434 : :
3435 : 32427134 : if (added_sets_0 || added_sets_1 || added_sets_2)
3436 : : {
3437 : 10780249 : int extra_sets = added_sets_0 + added_sets_1 + added_sets_2;
3438 : 10780249 : combine_extras++;
3439 : :
3440 : 10780249 : if (GET_CODE (newpat) == PARALLEL)
3441 : : {
3442 : 2067428 : rtvec old = XVEC (newpat, 0);
3443 : 2067428 : total_sets = XVECLEN (newpat, 0) + extra_sets;
3444 : 2067428 : newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
3445 : 2067428 : memcpy (XVEC (newpat, 0)->elem, &old->elem[0],
3446 : 2067428 : sizeof (old->elem[0]) * old->num_elem);
3447 : : }
3448 : : else
3449 : : {
3450 : 8712821 : rtx old = newpat;
3451 : 8712821 : total_sets = 1 + extra_sets;
3452 : 8712821 : newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
3453 : 8712821 : XVECEXP (newpat, 0, 0) = old;
3454 : : }
3455 : :
3456 : 10780249 : if (added_sets_0)
3457 : 488160 : XVECEXP (newpat, 0, --total_sets) = i0pat;
3458 : :
3459 : 10780249 : if (added_sets_1)
3460 : : {
3461 : 3958981 : rtx t = i1pat;
3462 : 3958981 : if (i0_feeds_i1_n)
3463 : 354790 : t = subst (t, i0dest, i0src_copy ? i0src_copy : i0src,
3464 : : false, false, false);
3465 : :
3466 : 3958981 : XVECEXP (newpat, 0, --total_sets) = t;
3467 : : }
3468 : 10780249 : if (added_sets_2)
3469 : : {
3470 : 7233085 : rtx t = i2pat;
3471 : 7233085 : if (i1_feeds_i2_n)
3472 : 1413652 : t = subst (t, i1dest, i1src_copy ? i1src_copy : i1src, false, false,
3473 : 1413652 : i0_feeds_i1_n && i0dest_in_i0src);
3474 : 7233085 : if ((i0_feeds_i1_n && i1_feeds_i2_n) || i0_feeds_i2_n)
3475 : 318493 : t = subst (t, i0dest, i0src_copy2 ? i0src_copy2 : i0src,
3476 : : false, false, false);
3477 : :
3478 : 7233085 : XVECEXP (newpat, 0, --total_sets) = t;
3479 : : }
3480 : : }
3481 : :
3482 : 25194049 : validate_replacement:
3483 : :
3484 : : /* Note which hard regs this insn has as inputs. */
3485 : 32766087 : mark_used_regs_combine (newpat);
3486 : :
3487 : : /* If recog_for_combine fails, it strips existing clobbers. If we'll
3488 : : consider splitting this pattern, we might need these clobbers. */
3489 : 32766087 : if (i1 && GET_CODE (newpat) == PARALLEL
3490 : 7189495 : && GET_CODE (XVECEXP (newpat, 0, XVECLEN (newpat, 0) - 1)) == CLOBBER)
3491 : : {
3492 : 1654595 : int len = XVECLEN (newpat, 0);
3493 : :
3494 : 1654595 : newpat_vec_with_clobbers = rtvec_alloc (len);
3495 : 6668121 : for (i = 0; i < len; i++)
3496 : 3358931 : RTVEC_ELT (newpat_vec_with_clobbers, i) = XVECEXP (newpat, 0, i);
3497 : : }
3498 : :
3499 : : /* We have recognized nothing yet. */
3500 : 32766087 : insn_code_number = -1;
3501 : :
3502 : : /* See if this is a PARALLEL of two SETs where one SET's destination is
3503 : : a register that is unused and this isn't marked as an instruction that
3504 : : might trap in an EH region. In that case, we just need the other SET.
3505 : : We prefer this over the PARALLEL.
3506 : :
3507 : : This can occur when simplifying a divmod insn. We *must* test for this
3508 : : case here because the code below that splits two independent SETs doesn't
3509 : : handle this case correctly when it updates the register status.
3510 : :
3511 : : It's pointless doing this if we originally had two sets, one from
3512 : : i3, and one from i2. Combining then splitting the parallel results
3513 : : in the original i2 again plus an invalid insn (which we delete).
3514 : : The net effect is only to move instructions around, which makes
3515 : : debug info less accurate.
3516 : :
3517 : : If the remaining SET came from I2 its destination should not be used
3518 : : between I2 and I3. See PR82024. */
3519 : :
3520 : 7233085 : if (!(added_sets_2 && i1 == 0)
3521 : 27452721 : && is_parallel_of_n_reg_sets (newpat, 2)
3522 : 34329336 : && asm_noperands (newpat) < 0)
3523 : : {
3524 : 1562362 : rtx set0 = XVECEXP (newpat, 0, 0);
3525 : 1562362 : rtx set1 = XVECEXP (newpat, 0, 1);
3526 : 1562362 : rtx oldpat = newpat;
3527 : :
3528 : 1562362 : if (((REG_P (SET_DEST (set1))
3529 : 1562362 : && find_reg_note (i3, REG_UNUSED, SET_DEST (set1)))
3530 : 1521766 : || (GET_CODE (SET_DEST (set1)) == SUBREG
3531 : 0 : && find_reg_note (i3, REG_UNUSED, SUBREG_REG (SET_DEST (set1)))))
3532 : 40596 : && insn_nothrow_p (i3)
3533 : 1601709 : && !side_effects_p (SET_SRC (set1)))
3534 : : {
3535 : 39113 : newpat = set0;
3536 : 39113 : insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3537 : : }
3538 : :
3539 : 1523249 : else if (((REG_P (SET_DEST (set0))
3540 : 1523249 : && find_reg_note (i3, REG_UNUSED, SET_DEST (set0)))
3541 : 1499744 : || (GET_CODE (SET_DEST (set0)) == SUBREG
3542 : 0 : && find_reg_note (i3, REG_UNUSED,
3543 : 0 : SUBREG_REG (SET_DEST (set0)))))
3544 : 23505 : && insn_nothrow_p (i3)
3545 : 1546174 : && !side_effects_p (SET_SRC (set0)))
3546 : : {
3547 : 22882 : rtx dest = SET_DEST (set1);
3548 : 22882 : if (GET_CODE (dest) == SUBREG)
3549 : 0 : dest = SUBREG_REG (dest);
3550 : 22882 : if (!reg_used_between_p (dest, i2, i3))
3551 : : {
3552 : 22881 : newpat = set1;
3553 : 22881 : insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3554 : :
3555 : 22881 : if (insn_code_number >= 0)
3556 : : changed_i3_dest = true;
3557 : : }
3558 : : }
3559 : :
3560 : 39113 : if (insn_code_number < 0)
3561 : 1556826 : newpat = oldpat;
3562 : : }
3563 : :
3564 : : /* Is the result of combination a valid instruction? */
3565 : 1556826 : if (insn_code_number < 0)
3566 : 32760551 : insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3567 : :
3568 : : /* If we were combining three insns and the result is a simple SET
3569 : : with no ASM_OPERANDS that wasn't recognized, try to split it into two
3570 : : insns. There are two ways to do this. It can be split using a
3571 : : machine-specific method (like when you have an addition of a large
3572 : : constant) or by combine in the function find_split_point. */
3573 : :
3574 : 10247519 : if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
3575 : 37392769 : && asm_noperands (newpat) < 0)
3576 : : {
3577 : 4626205 : rtx parallel, *split;
3578 : 4626205 : rtx_insn *m_split_insn;
3579 : 4626205 : unsigned int old_nregs, new_nregs;
3580 : :
3581 : : /* See if the MD file can split NEWPAT. If it can't, see if letting it
3582 : : use I2DEST as a scratch register will help. In the latter case,
3583 : : convert I2DEST to the mode of the source of NEWPAT if we can. */
3584 : :
3585 : 4626205 : m_split_insn = combine_split_insns (newpat, i3, &old_nregs, &new_nregs);
3586 : :
3587 : : /* We can only use I2DEST as a scratch reg if it doesn't overlap any
3588 : : inputs of NEWPAT. */
3589 : :
3590 : : /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
3591 : : possible to try that as a scratch reg. This would require adding
3592 : : more code to make it work though. */
3593 : :
3594 : 4626205 : if (m_split_insn == 0 && ! reg_overlap_mentioned_p (i2dest, newpat))
3595 : : {
3596 : 4491000 : machine_mode new_mode = GET_MODE (SET_DEST (newpat));
3597 : :
3598 : : /* ??? Reusing i2dest without resetting the reg_stat entry for it
3599 : : (temporarily, until we are committed to this instruction
3600 : : combination) does not work: for example, any call to nonzero_bits
3601 : : on the register (from a splitter in the MD file, for example)
3602 : : will get the old information, which is invalid.
3603 : :
3604 : : Since nowadays we can create registers during combine just fine,
3605 : : we should just create a new one here, not reuse i2dest. */
3606 : :
3607 : : /* First try to split using the original register as a
3608 : : scratch register. */
3609 : 4491000 : parallel = gen_rtx_PARALLEL (VOIDmode,
3610 : : gen_rtvec (2, newpat,
3611 : : gen_rtx_CLOBBER (VOIDmode,
3612 : : i2dest)));
3613 : 4491000 : m_split_insn = combine_split_insns (parallel, i3, &old_nregs, &new_nregs);
3614 : :
3615 : : /* If that didn't work, try changing the mode of I2DEST if
3616 : : we can. */
3617 : 4491000 : if (m_split_insn == 0
3618 : 4491000 : && new_mode != GET_MODE (i2dest)
3619 : 1727888 : && new_mode != VOIDmode
3620 : 5644341 : && can_change_dest_mode (i2dest, added_sets_2, new_mode))
3621 : : {
3622 : 859111 : machine_mode old_mode = GET_MODE (i2dest);
3623 : 859111 : rtx ni2dest;
3624 : :
3625 : 859111 : if (REGNO (i2dest) < FIRST_PSEUDO_REGISTER)
3626 : 9942 : ni2dest = gen_rtx_REG (new_mode, REGNO (i2dest));
3627 : : else
3628 : : {
3629 : 849169 : subst_mode (REGNO (i2dest), new_mode);
3630 : 849169 : ni2dest = regno_reg_rtx[REGNO (i2dest)];
3631 : : }
3632 : :
3633 : 859111 : parallel = (gen_rtx_PARALLEL
3634 : : (VOIDmode,
3635 : : gen_rtvec (2, newpat,
3636 : : gen_rtx_CLOBBER (VOIDmode,
3637 : : ni2dest))));
3638 : 859111 : m_split_insn = combine_split_insns (parallel, i3, &old_nregs, &new_nregs);
3639 : :
3640 : 859111 : if (m_split_insn == 0
3641 : 859111 : && REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
3642 : : {
3643 : 849169 : struct undo *buf;
3644 : :
3645 : 849169 : adjust_reg_mode (regno_reg_rtx[REGNO (i2dest)], old_mode);
3646 : 849169 : buf = undobuf.undos;
3647 : 849169 : undobuf.undos = buf->next;
3648 : 849169 : buf->next = undobuf.frees;
3649 : 849169 : undobuf.frees = buf;
3650 : : }
3651 : : }
3652 : :
3653 : 4491000 : i2scratch = m_split_insn != 0;
3654 : : }
3655 : :
3656 : : /* If recog_for_combine has discarded clobbers, try to use them
3657 : : again for the split. */
3658 : 4626205 : if (m_split_insn == 0 && newpat_vec_with_clobbers)
3659 : : {
3660 : 1603070 : parallel = gen_rtx_PARALLEL (VOIDmode, newpat_vec_with_clobbers);
3661 : 1603070 : m_split_insn = combine_split_insns (parallel, i3, &old_nregs, &new_nregs);
3662 : : }
3663 : :
3664 : 4637874 : if (m_split_insn && NEXT_INSN (m_split_insn) == NULL_RTX)
3665 : : {
3666 : 1476 : rtx m_split_pat = PATTERN (m_split_insn);
3667 : 1476 : insn_code_number = recog_for_combine (&m_split_pat, i3, &new_i3_notes,
3668 : : old_nregs, new_nregs);
3669 : 1476 : if (insn_code_number >= 0)
3670 : 177 : newpat = m_split_pat;
3671 : : }
3672 : 10193 : else if (m_split_insn && NEXT_INSN (NEXT_INSN (m_split_insn)) == NULL_RTX
3673 : 4634922 : && (next_nonnote_nondebug_insn (i2) == i3
3674 : 6 : || !modified_between_p (PATTERN (m_split_insn), i2, i3)))
3675 : : {
3676 : 10193 : rtx i2set, i3set;
3677 : 10193 : rtx newi3pat = PATTERN (NEXT_INSN (m_split_insn));
3678 : 10193 : newi2pat = PATTERN (m_split_insn);
3679 : :
3680 : 10193 : i3set = single_set (NEXT_INSN (m_split_insn));
3681 : 10193 : i2set = single_set (m_split_insn);
3682 : :
3683 : 10193 : i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
3684 : :
3685 : : /* If I2 or I3 has multiple SETs, we won't know how to track
3686 : : register status, so don't use these insns. If I2's destination
3687 : : is used between I2 and I3, we also can't use these insns. */
3688 : :
3689 : 10193 : if (i2_code_number >= 0 && i2set && i3set
3690 : 20386 : && (next_nonnote_nondebug_insn (i2) == i3
3691 : 6 : || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
3692 : 10193 : insn_code_number = recog_for_combine (&newi3pat, i3,
3693 : : &new_i3_notes,
3694 : : old_nregs, new_nregs);
3695 : 10193 : if (insn_code_number >= 0)
3696 : 10193 : newpat = newi3pat;
3697 : :
3698 : : /* It is possible that both insns now set the destination of I3.
3699 : : If so, we must show an extra use of it. */
3700 : :
3701 : 10193 : if (insn_code_number >= 0)
3702 : : {
3703 : 10193 : rtx new_i3_dest = SET_DEST (i3set);
3704 : 10193 : rtx new_i2_dest = SET_DEST (i2set);
3705 : :
3706 : 10193 : while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
3707 : 10233 : || GET_CODE (new_i3_dest) == STRICT_LOW_PART
3708 : 20448 : || GET_CODE (new_i3_dest) == SUBREG)
3709 : 40 : new_i3_dest = XEXP (new_i3_dest, 0);
3710 : :
3711 : 10193 : while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
3712 : 10193 : || GET_CODE (new_i2_dest) == STRICT_LOW_PART
3713 : 20386 : || GET_CODE (new_i2_dest) == SUBREG)
3714 : 0 : new_i2_dest = XEXP (new_i2_dest, 0);
3715 : :
3716 : 10193 : if (REG_P (new_i3_dest)
3717 : 6231 : && REG_P (new_i2_dest)
3718 : 6231 : && REGNO (new_i3_dest) == REGNO (new_i2_dest)
3719 : 10193 : && REGNO (new_i2_dest) < reg_n_sets_max)
3720 : 0 : INC_REG_N_SETS (REGNO (new_i2_dest), 1);
3721 : : }
3722 : : }
3723 : :
3724 : : /* If we can split it and use I2DEST, go ahead and see if that
3725 : : helps things be recognized. Verify that none of the registers
3726 : : are set between I2 and I3. */
3727 : 1299 : if (insn_code_number < 0
3728 : 4615835 : && (split = find_split_point (&newpat, i3, false)) != 0
3729 : : /* We need I2DEST in the proper mode. If it is a hard register
3730 : : or the only use of a pseudo, we can change its mode.
3731 : : Make sure we don't change a hard register to have a mode that
3732 : : isn't valid for it, or change the number of registers. */
3733 : 4357709 : && (GET_MODE (*split) == GET_MODE (i2dest)
3734 : 1645257 : || GET_MODE (*split) == VOIDmode
3735 : 1293569 : || can_change_dest_mode (i2dest, added_sets_2,
3736 : : GET_MODE (*split)))
3737 : 3641611 : && (next_nonnote_nondebug_insn (i2) == i3
3738 : 580200 : || !modified_between_p (*split, i2, i3))
3739 : : /* We can't overwrite I2DEST if its value is still used by
3740 : : NEWPAT. */
3741 : 3611744 : && ! reg_referenced_p (i2dest, newpat)
3742 : : /* We should not split a possibly trapping part when we
3743 : : care about non-call EH and have REG_EH_REGION notes
3744 : : to distribute. */
3745 : 8166823 : && ! (cfun->can_throw_non_call_exceptions
3746 : 381420 : && has_non_call_exception
3747 : 118 : && may_trap_p (*split)))
3748 : : {
3749 : 3541799 : rtx newdest = i2dest;
3750 : 3541799 : enum rtx_code split_code = GET_CODE (*split);
3751 : 3541799 : machine_mode split_mode = GET_MODE (*split);
3752 : 3541799 : bool subst_done = false;
3753 : 3541799 : newi2pat = NULL_RTX;
3754 : :
3755 : 3541799 : i2scratch = true;
3756 : :
3757 : : /* *SPLIT may be part of I2SRC, so make sure we have the
3758 : : original expression around for later debug processing.
3759 : : We should not need I2SRC any more in other cases. */
3760 : 3541799 : if (MAY_HAVE_DEBUG_BIND_INSNS)
3761 : 1730106 : i2src = copy_rtx (i2src);
3762 : : else
3763 : 1811693 : i2src = NULL;
3764 : :
3765 : : /* Get NEWDEST as a register in the proper mode. We have already
3766 : : validated that we can do this. */
3767 : 3541799 : if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
3768 : : {
3769 : 574078 : if (REGNO (i2dest) < FIRST_PSEUDO_REGISTER)
3770 : 0 : newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
3771 : : else
3772 : : {
3773 : 574078 : subst_mode (REGNO (i2dest), split_mode);
3774 : 574078 : newdest = regno_reg_rtx[REGNO (i2dest)];
3775 : : }
3776 : : }
3777 : :
3778 : : /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
3779 : : an ASHIFT. This can occur if it was inside a PLUS and hence
3780 : : appeared to be a memory address. This is a kludge. */
3781 : 3541799 : if (split_code == MULT
3782 : 204040 : && CONST_INT_P (XEXP (*split, 1))
3783 : 105406 : && INTVAL (XEXP (*split, 1)) > 0
3784 : 3644142 : && (i = exact_log2 (UINTVAL (XEXP (*split, 1)))) >= 0)
3785 : : {
3786 : 72246 : rtx i_rtx = gen_int_shift_amount (split_mode, i);
3787 : 72246 : SUBST (*split, gen_rtx_ASHIFT (split_mode,
3788 : : XEXP (*split, 0), i_rtx));
3789 : : /* Update split_code because we may not have a multiply
3790 : : anymore. */
3791 : 72246 : split_code = GET_CODE (*split);
3792 : : }
3793 : :
3794 : : /* Similarly for (plus (mult FOO (const_int pow2))). */
3795 : 3541799 : if (split_code == PLUS
3796 : 673148 : && GET_CODE (XEXP (*split, 0)) == MULT
3797 : 108524 : && CONST_INT_P (XEXP (XEXP (*split, 0), 1))
3798 : 39231 : && INTVAL (XEXP (XEXP (*split, 0), 1)) > 0
3799 : 3577076 : && (i = exact_log2 (UINTVAL (XEXP (XEXP (*split, 0), 1)))) >= 0)
3800 : : {
3801 : 7078 : rtx nsplit = XEXP (*split, 0);
3802 : 7078 : rtx i_rtx = gen_int_shift_amount (GET_MODE (nsplit), i);
3803 : 7078 : SUBST (XEXP (*split, 0), gen_rtx_ASHIFT (GET_MODE (nsplit),
3804 : : XEXP (nsplit, 0),
3805 : : i_rtx));
3806 : : /* Update split_code because we may not have a multiply
3807 : : anymore. */
3808 : 7078 : split_code = GET_CODE (*split);
3809 : : }
3810 : :
3811 : : #ifdef INSN_SCHEDULING
3812 : : /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
3813 : : be written as a ZERO_EXTEND. */
3814 : 3541799 : if (split_code == SUBREG && MEM_P (SUBREG_REG (*split)))
3815 : : {
3816 : : /* Or as a SIGN_EXTEND if LOAD_EXTEND_OP says that that's
3817 : : what it really is. */
3818 : 12744 : if (load_extend_op (GET_MODE (SUBREG_REG (*split)))
3819 : : == SIGN_EXTEND)
3820 : : SUBST (*split, gen_rtx_SIGN_EXTEND (split_mode,
3821 : : SUBREG_REG (*split)));
3822 : : else
3823 : 12744 : SUBST (*split, gen_rtx_ZERO_EXTEND (split_mode,
3824 : : SUBREG_REG (*split)));
3825 : : }
3826 : : #endif
3827 : :
3828 : : /* Attempt to split binary operators using arithmetic identities. */
3829 : 3541799 : if (BINARY_P (SET_SRC (newpat))
3830 : 2973867 : && split_mode == GET_MODE (SET_SRC (newpat))
3831 : 5570069 : && ! side_effects_p (SET_SRC (newpat)))
3832 : : {
3833 : 2013988 : rtx setsrc = SET_SRC (newpat);
3834 : 2013988 : machine_mode mode = GET_MODE (setsrc);
3835 : 2013988 : enum rtx_code code = GET_CODE (setsrc);
3836 : 2013988 : rtx src_op0 = XEXP (setsrc, 0);
3837 : 2013988 : rtx src_op1 = XEXP (setsrc, 1);
3838 : :
3839 : : /* Split "X = Y op Y" as "Z = Y; X = Z op Z". */
3840 : 2013988 : if (rtx_equal_p (src_op0, src_op1))
3841 : : {
3842 : 1413 : newi2pat = gen_rtx_SET (newdest, src_op0);
3843 : 1413 : SUBST (XEXP (setsrc, 0), newdest);
3844 : 1413 : SUBST (XEXP (setsrc, 1), newdest);
3845 : 1413 : subst_done = true;
3846 : : }
3847 : : /* Split "((P op Q) op R) op S" where op is PLUS or MULT. */
3848 : 2012575 : else if ((code == PLUS || code == MULT)
3849 : 975408 : && GET_CODE (src_op0) == code
3850 : 415365 : && GET_CODE (XEXP (src_op0, 0)) == code
3851 : 173465 : && (INTEGRAL_MODE_P (mode)
3852 : : || (FLOAT_MODE_P (mode)
3853 : 99998 : && flag_unsafe_math_optimizations)))
3854 : : {
3855 : 77420 : rtx p = XEXP (XEXP (src_op0, 0), 0);
3856 : 77420 : rtx q = XEXP (XEXP (src_op0, 0), 1);
3857 : 77420 : rtx r = XEXP (src_op0, 1);
3858 : 77420 : rtx s = src_op1;
3859 : :
3860 : : /* Split both "((X op Y) op X) op Y" and
3861 : : "((X op Y) op Y) op X" as "T op T" where T is
3862 : : "X op Y". */
3863 : 77673 : if ((rtx_equal_p (p,r) && rtx_equal_p (q,s))
3864 : 77574 : || (rtx_equal_p (p,s) && rtx_equal_p (q,r)))
3865 : : {
3866 : 99 : newi2pat = gen_rtx_SET (newdest, XEXP (src_op0, 0));
3867 : 99 : SUBST (XEXP (setsrc, 0), newdest);
3868 : 99 : SUBST (XEXP (setsrc, 1), newdest);
3869 : 99 : subst_done = true;
3870 : : }
3871 : : /* Split "((X op X) op Y) op Y)" as "T op T" where
3872 : : T is "X op Y". */
3873 : 77321 : else if (rtx_equal_p (p,q) && rtx_equal_p (r,s))
3874 : : {
3875 : 60 : rtx tmp = simplify_gen_binary (code, mode, p, r);
3876 : 60 : newi2pat = gen_rtx_SET (newdest, tmp);
3877 : 60 : SUBST (XEXP (setsrc, 0), newdest);
3878 : 60 : SUBST (XEXP (setsrc, 1), newdest);
3879 : 60 : subst_done = true;
3880 : : }
3881 : : }
3882 : : }
3883 : :
3884 : 1572 : if (!subst_done)
3885 : : {
3886 : 3540227 : newi2pat = gen_rtx_SET (newdest, *split);
3887 : 3540227 : SUBST (*split, newdest);
3888 : : }
3889 : :
3890 : 3541799 : i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
3891 : :
3892 : : /* recog_for_combine might have added CLOBBERs to newi2pat.
3893 : : Make sure NEWPAT does not depend on the clobbered regs. */
3894 : 3541799 : if (GET_CODE (newi2pat) == PARALLEL)
3895 : 2441535 : for (i = XVECLEN (newi2pat, 0) - 1; i >= 0; i--)
3896 : 1638478 : if (GET_CODE (XVECEXP (newi2pat, 0, i)) == CLOBBER)
3897 : : {
3898 : 835421 : rtx reg = XEXP (XVECEXP (newi2pat, 0, i), 0);
3899 : 835421 : if (reg_overlap_mentioned_p (reg, newpat))
3900 : : {
3901 : 18918 : undo_all ();
3902 : 18918 : return 0;
3903 : : }
3904 : : }
3905 : :
3906 : : /* If the split point was a MULT and we didn't have one before,
3907 : : don't use one now. */
3908 : 3522881 : if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
3909 : 2115824 : insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3910 : : }
3911 : : }
3912 : :
3913 : : /* Check for a case where we loaded from memory in a narrow mode and
3914 : : then sign extended it, but we need both registers. In that case,
3915 : : we have a PARALLEL with both loads from the same memory location.
3916 : : We can split this into a load from memory followed by a register-register
3917 : : copy. This saves at least one insn, more if register allocation can
3918 : : eliminate the copy.
3919 : :
3920 : : We cannot do this if the involved modes have more than one elements,
3921 : : like for vector or complex modes.
3922 : :
3923 : : We cannot do this if the destination of the first assignment is a
3924 : : condition code register. We eliminate this case by making sure
3925 : : the SET_DEST and SET_SRC have the same mode.
3926 : :
3927 : : We cannot do this if the destination of the second assignment is
3928 : : a register that we have already assumed is zero-extended. Similarly
3929 : : for a SUBREG of such a register. */
3930 : :
3931 : 5621314 : else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
3932 : 5564801 : && GET_CODE (newpat) == PARALLEL
3933 : 5563009 : && XVECLEN (newpat, 0) == 2
3934 : 4624379 : && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
3935 : 4624181 : && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
3936 : 21708 : && (GET_MODE (SET_DEST (XVECEXP (newpat, 0, 0)))
3937 : 21708 : == GET_MODE (SET_SRC (XVECEXP (newpat, 0, 0))))
3938 : 21708 : && ! VECTOR_MODE_P (GET_MODE (SET_DEST (XVECEXP (newpat, 0, 0))))
3939 : : && ! COMPLEX_MODE_P (GET_MODE (SET_DEST (XVECEXP (newpat, 0, 0))))
3940 : 20265 : && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
3941 : 20265 : && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
3942 : 20265 : XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
3943 : 5060 : && !modified_between_p (SET_SRC (XVECEXP (newpat, 0, 1)), i2, i3)
3944 : 5060 : && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
3945 : 5060 : && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
3946 : 5060 : && ! (temp_expr = SET_DEST (XVECEXP (newpat, 0, 1)),
3947 : : (REG_P (temp_expr)
3948 : 5060 : && reg_stat[REGNO (temp_expr)].nonzero_bits != 0
3949 : 5196 : && known_lt (GET_MODE_PRECISION (GET_MODE (temp_expr)),
3950 : : BITS_PER_WORD)
3951 : 4931 : && known_lt (GET_MODE_PRECISION (GET_MODE (temp_expr)),
3952 : : HOST_BITS_PER_INT)
3953 : 1182 : && (reg_stat[REGNO (temp_expr)].nonzero_bits
3954 : 1182 : != GET_MODE_MASK (word_mode))))
3955 : 4991 : && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
3956 : 0 : && (temp_expr = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
3957 : 0 : (REG_P (temp_expr)
3958 : 0 : && reg_stat[REGNO (temp_expr)].nonzero_bits != 0
3959 : 0 : && known_lt (GET_MODE_PRECISION (GET_MODE (temp_expr)),
3960 : : BITS_PER_WORD)
3961 : 0 : && known_lt (GET_MODE_PRECISION (GET_MODE (temp_expr)),
3962 : : HOST_BITS_PER_INT)
3963 : 0 : && (reg_stat[REGNO (temp_expr)].nonzero_bits
3964 : 0 : != GET_MODE_MASK (word_mode)))))
3965 : 4991 : && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
3966 : 4991 : SET_SRC (XVECEXP (newpat, 0, 1)))
3967 : 28144826 : && ! find_reg_note (i3, REG_UNUSED,
3968 : 4944 : SET_DEST (XVECEXP (newpat, 0, 0))))
3969 : : {
3970 : 4944 : rtx ni2dest;
3971 : :
3972 : 4944 : newi2pat = XVECEXP (newpat, 0, 0);
3973 : 4944 : ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
3974 : 4944 : newpat = XVECEXP (newpat, 0, 1);
3975 : 4944 : SUBST (SET_SRC (newpat),
3976 : : gen_lowpart (GET_MODE (SET_SRC (newpat)), ni2dest));
3977 : 4944 : i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
3978 : :
3979 : 4944 : if (i2_code_number >= 0)
3980 : 0 : insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3981 : :
3982 : 4944 : if (insn_code_number >= 0)
3983 : : swap_i2i3 = 1;
3984 : : }
3985 : :
3986 : : /* Similarly, check for a case where we have a PARALLEL of two independent
3987 : : SETs but we started with three insns. In this case, we can do the sets
3988 : : as two separate insns. This case occurs when some SET allows two
3989 : : other insns to combine, but the destination of that SET is still live.
3990 : :
3991 : : Also do this if we started with two insns and (at least) one of the
3992 : : resulting sets is a noop; this noop will be deleted later.
3993 : :
3994 : : Also do this if we started with two insns neither of which was a simple
3995 : : move. */
3996 : :
3997 : 24097224 : else if (insn_code_number < 0 && asm_noperands (newpat) < 0
3998 : 24079905 : && GET_CODE (newpat) == PARALLEL
3999 : 10983574 : && XVECLEN (newpat, 0) == 2
4000 : 9955223 : && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
4001 : 9850509 : && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
4002 : 9792510 : && (i1
4003 : 5192645 : || set_noop_p (XVECEXP (newpat, 0, 0))
4004 : 5192195 : || set_noop_p (XVECEXP (newpat, 0, 1))
4005 : 5192193 : || (!i2_was_move && !i3_was_move))
4006 : 6479623 : && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
4007 : 6478887 : && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
4008 : 6478707 : && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
4009 : 6478130 : && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
4010 : 6478116 : && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
4011 : : XVECEXP (newpat, 0, 0))
4012 : 5381428 : && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
4013 : 5381428 : XVECEXP (newpat, 0, 1))
4014 : 33787225 : && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0)))
4015 : 403146 : && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1)))))
4016 : : {
4017 : 5022812 : rtx set0 = XVECEXP (newpat, 0, 0);
4018 : 5022812 : rtx set1 = XVECEXP (newpat, 0, 1);
4019 : :
4020 : : /* Normally, it doesn't matter which of the two is done first, but
4021 : : one which uses any regs/memory set or used in between i2 and i3
4022 : : can't be first. The PARALLEL might also have been pre-existing
4023 : : in i3, so we need to make sure that we won't wrongly hoist a SET
4024 : : to i2 that would conflict with a death note present in there, or
4025 : : would have its dest modified or used between i2 and i3. */
4026 : 5022812 : if ((set_noop_p (set1)
4027 : 5022812 : || (!modified_between_p (SET_SRC (set1), i2, i3)
4028 : 10002723 : && !(REG_P (SET_DEST (set1))
4029 : 4988374 : && find_reg_note (i2, REG_DEAD, SET_DEST (set1)))
4030 : 5040032 : && !(GET_CODE (SET_DEST (set1)) == SUBREG
4031 : 25975 : && find_reg_note (i2, REG_DEAD,
4032 : 25975 : SUBREG_REG (SET_DEST (set1))))
4033 : 5014057 : && !modified_between_p (SET_DEST (set1), i2, i3)
4034 : 5014057 : && !reg_used_between_p (SET_DEST (set1), i2, i3)))
4035 : : /* If I3 is a jump, ensure that set0 is a jump so that
4036 : : we do not create invalid RTL. */
4037 : 10036863 : && (!JUMP_P (i3) || SET_DEST (set0) == pc_rtx))
4038 : : {
4039 : 5014051 : newi2pat = set1;
4040 : 5014051 : newpat = set0;
4041 : : }
4042 : 8761 : else if ((set_noop_p (set0)
4043 : 8755 : || (!modified_between_p (SET_SRC (set0), i2, i3)
4044 : 588 : && !(REG_P (SET_DEST (set0))
4045 : 294 : && find_reg_note (i2, REG_DEAD, SET_DEST (set0)))
4046 : 294 : && !(GET_CODE (SET_DEST (set0)) == SUBREG
4047 : 0 : && find_reg_note (i2, REG_DEAD,
4048 : 0 : SUBREG_REG (SET_DEST (set0))))
4049 : 294 : && !modified_between_p (SET_DEST (set0), i2, i3)
4050 : 293 : && !reg_used_between_p (SET_DEST (set0), i2, i3)))
4051 : : /* If I3 is a jump, ensure that set1 is a jump so that
4052 : : we do not create invalid RTL. */
4053 : 9054 : && (!JUMP_P (i3) || SET_DEST (set1) == pc_rtx))
4054 : : {
4055 : 299 : newi2pat = set0;
4056 : 299 : newpat = set1;
4057 : : }
4058 : : else
4059 : : {
4060 : 8462 : undo_all ();
4061 : 8462 : return 0;
4062 : : }
4063 : :
4064 : 5014350 : i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
4065 : :
4066 : 5014350 : if (i2_code_number >= 0)
4067 : : {
4068 : : /* recog_for_combine might have added CLOBBERs to newi2pat.
4069 : : Make sure NEWPAT does not depend on the clobbered regs. */
4070 : 3700694 : if (GET_CODE (newi2pat) == PARALLEL)
4071 : : {
4072 : 1292180 : for (i = XVECLEN (newi2pat, 0) - 1; i >= 0; i--)
4073 : 864703 : if (GET_CODE (XVECEXP (newi2pat, 0, i)) == CLOBBER)
4074 : : {
4075 : 437226 : rtx reg = XEXP (XVECEXP (newi2pat, 0, i), 0);
4076 : 437226 : if (reg_overlap_mentioned_p (reg, newpat))
4077 : : {
4078 : 3427 : undo_all ();
4079 : 3427 : return 0;
4080 : : }
4081 : : }
4082 : : }
4083 : :
4084 : 3697267 : insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
4085 : :
4086 : : /* Likewise, recog_for_combine might have added clobbers to NEWPAT.
4087 : : Checking that the SET0's SET_DEST and SET1's SET_DEST aren't
4088 : : mentioned/clobbered, ensures NEWI2PAT's SET_DEST is live. */
4089 : 3697267 : if (insn_code_number >= 0 && GET_CODE (newpat) == PARALLEL)
4090 : : {
4091 : 60473 : for (i = XVECLEN (newpat, 0) - 1; i >= 0; i--)
4092 : 40326 : if (GET_CODE (XVECEXP (newpat, 0, i)) == CLOBBER)
4093 : : {
4094 : 20179 : rtx reg = XEXP (XVECEXP (newpat, 0, i), 0);
4095 : 20179 : if (reg_overlap_mentioned_p (reg, SET_DEST (set0))
4096 : 20179 : || reg_overlap_mentioned_p (reg, SET_DEST (set1)))
4097 : : {
4098 : 0 : undo_all ();
4099 : 0 : return 0;
4100 : : }
4101 : : }
4102 : : }
4103 : :
4104 : : if (insn_code_number >= 0)
4105 : : split_i2i3 = true;
4106 : : }
4107 : : }
4108 : :
4109 : : /* If it still isn't recognized, fail and change things back the way they
4110 : : were. */
4111 : 29033069 : if ((insn_code_number < 0
4112 : : /* Is the result a reasonable ASM_OPERANDS? */
4113 : 32575979 : && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
4114 : : {
4115 : 28484601 : undo_all ();
4116 : 28484601 : return 0;
4117 : : }
4118 : :
4119 : : /* If we had to change another insn, make sure it is valid also. */
4120 : 4250679 : if (undobuf.other_insn)
4121 : : {
4122 : 211421 : CLEAR_HARD_REG_SET (newpat_used_regs);
4123 : :
4124 : 211421 : other_pat = PATTERN (undobuf.other_insn);
4125 : 211421 : other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
4126 : : &new_other_notes);
4127 : :
4128 : 211421 : if (other_code_number < 0 && ! check_asm_operands (other_pat))
4129 : : {
4130 : 8143 : undo_all ();
4131 : 8143 : return 0;
4132 : : }
4133 : : }
4134 : :
4135 : : /* Reject this combination if insn_cost reports that the replacement
4136 : : instructions are more expensive than the originals. */
4137 : 4242536 : if (!combine_validate_cost (i0, i1, i2, i3, newpat, newi2pat, other_pat))
4138 : : {
4139 : 207806 : undo_all ();
4140 : 207806 : return 0;
4141 : : }
4142 : :
4143 : 4034730 : if (MAY_HAVE_DEBUG_BIND_INSNS)
4144 : : {
4145 : 2206146 : struct undo *undo;
4146 : :
4147 : 6621254 : for (undo = undobuf.undos; undo; undo = undo->next)
4148 : 4415108 : if (undo->kind == UNDO_MODE)
4149 : : {
4150 : 1909 : rtx reg = regno_reg_rtx[undo->where.regno];
4151 : 1909 : machine_mode new_mode = GET_MODE (reg);
4152 : 1909 : machine_mode old_mode = undo->old_contents.m;
4153 : :
4154 : : /* Temporarily revert mode back. */
4155 : 1909 : adjust_reg_mode (reg, old_mode);
4156 : :
4157 : 1909 : if (reg == i2dest && i2scratch)
4158 : : {
4159 : : /* If we used i2dest as a scratch register with a
4160 : : different mode, substitute it for the original
4161 : : i2src while its original mode is temporarily
4162 : : restored, and then clear i2scratch so that we don't
4163 : : do it again later. */
4164 : 1909 : propagate_for_debug (i2, last_combined_insn, reg, i2src,
4165 : : this_basic_block);
4166 : 1909 : i2scratch = false;
4167 : : /* Put back the new mode. */
4168 : 1909 : adjust_reg_mode (reg, new_mode);
4169 : : }
4170 : : else
4171 : : {
4172 : 0 : rtx tempreg = gen_raw_REG (old_mode, REGNO (reg));
4173 : 0 : rtx_insn *first, *last;
4174 : :
4175 : 0 : if (reg == i2dest)
4176 : : {
4177 : : first = i2;
4178 : : last = last_combined_insn;
4179 : : }
4180 : : else
4181 : : {
4182 : 0 : first = i3;
4183 : 0 : last = undobuf.other_insn;
4184 : 0 : gcc_assert (last);
4185 : 0 : if (DF_INSN_LUID (last)
4186 : 0 : < DF_INSN_LUID (last_combined_insn))
4187 : 0 : last = last_combined_insn;
4188 : : }
4189 : :
4190 : : /* We're dealing with a reg that changed mode but not
4191 : : meaning, so we want to turn it into a subreg for
4192 : : the new mode. However, because of REG sharing and
4193 : : because its mode had already changed, we have to do
4194 : : it in two steps. First, replace any debug uses of
4195 : : reg, with its original mode temporarily restored,
4196 : : with this copy we have created; then, replace the
4197 : : copy with the SUBREG of the original shared reg,
4198 : : once again changed to the new mode. */
4199 : 0 : propagate_for_debug (first, last, reg, tempreg,
4200 : : this_basic_block);
4201 : 0 : adjust_reg_mode (reg, new_mode);
4202 : 0 : propagate_for_debug (first, last, tempreg,
4203 : : lowpart_subreg (old_mode, reg, new_mode),
4204 : : this_basic_block);
4205 : : }
4206 : : }
4207 : : }
4208 : :
4209 : : /* If we will be able to accept this, we have made a
4210 : : change to the destination of I3. This requires us to
4211 : : do a few adjustments. */
4212 : :
4213 : 4034730 : if (changed_i3_dest)
4214 : : {
4215 : 16520 : PATTERN (i3) = newpat;
4216 : 16520 : adjust_for_new_dest (i3);
4217 : : }
4218 : :
4219 : 4034730 : bool only_i3_changed = !i0 && !i1 && rtx_equal_p (newi2pat, PATTERN (i2));
4220 : :
4221 : : /* If only i3 has changed, any split of the combined instruction just
4222 : : restored i2 to its original state. No destinations moved from i3
4223 : : to i2. */
4224 : : if (only_i3_changed)
4225 : : split_i2i3 = false;
4226 : :
4227 : : /* We now know that we can do this combination. Merge the insns and
4228 : : update the status of registers and LOG_LINKS. */
4229 : :
4230 : 4034730 : if (undobuf.other_insn)
4231 : : {
4232 : 203142 : rtx note, next;
4233 : :
4234 : 203142 : PATTERN (undobuf.other_insn) = other_pat;
4235 : :
4236 : : /* If any of the notes in OTHER_INSN were REG_DEAD or REG_UNUSED,
4237 : : ensure that they are still valid. Then add any non-duplicate
4238 : : notes added by recog_for_combine. */
4239 : 606484 : for (note = REG_NOTES (undobuf.other_insn); note; note = next)
4240 : : {
4241 : 403342 : next = XEXP (note, 1);
4242 : :
4243 : 403342 : if ((REG_NOTE_KIND (note) == REG_DEAD
4244 : 206091 : && !reg_referenced_p (XEXP (note, 0),
4245 : 206091 : PATTERN (undobuf.other_insn)))
4246 : 398787 : ||(REG_NOTE_KIND (note) == REG_UNUSED
4247 : 28 : && !reg_set_p (XEXP (note, 0),
4248 : 28 : PATTERN (undobuf.other_insn)))
4249 : : /* Simply drop equal note since it may be no longer valid
4250 : : for other_insn. It may be possible to record that CC
4251 : : register is changed and only discard those notes, but
4252 : : in practice it's unnecessary complication and doesn't
4253 : : give any meaningful improvement.
4254 : :
4255 : : See PR78559. */
4256 : 398787 : || REG_NOTE_KIND (note) == REG_EQUAL
4257 : 801995 : || REG_NOTE_KIND (note) == REG_EQUIV)
4258 : 4689 : remove_note (undobuf.other_insn, note);
4259 : : }
4260 : :
4261 : 203142 : distribute_notes (new_other_notes, undobuf.other_insn,
4262 : : undobuf.other_insn, NULL, NULL_RTX, NULL_RTX,
4263 : : NULL_RTX);
4264 : : }
4265 : :
4266 : 4034730 : if (swap_i2i3)
4267 : : {
4268 : : /* I3 now uses what used to be its destination and which is now
4269 : : I2's destination. This requires us to do a few adjustments. */
4270 : 0 : PATTERN (i3) = newpat;
4271 : 0 : adjust_for_new_dest (i3);
4272 : : }
4273 : :
4274 : 4034730 : if (swap_i2i3 || split_i2i3)
4275 : : {
4276 : : /* We might need a LOG_LINK from I3 to I2. But then we used to
4277 : : have one, so we still will.
4278 : :
4279 : : However, some later insn might be using I2's dest and have
4280 : : a LOG_LINK pointing at I3. We should change it to point at
4281 : : I2 instead. */
4282 : :
4283 : : /* newi2pat is usually a SET here; however, recog_for_combine might
4284 : : have added some clobbers. */
4285 : 30017 : rtx x = newi2pat;
4286 : 30017 : if (GET_CODE (x) == PARALLEL)
4287 : 482 : x = XVECEXP (newi2pat, 0, 0);
4288 : :
4289 : 30017 : if (REG_P (SET_DEST (x))
4290 : 6 : || (GET_CODE (SET_DEST (x)) == SUBREG
4291 : 0 : && REG_P (SUBREG_REG (SET_DEST (x)))))
4292 : : {
4293 : 30011 : unsigned int regno = reg_or_subregno (SET_DEST (x));
4294 : :
4295 : 30011 : bool done = false;
4296 : 516585 : for (rtx_insn *insn = NEXT_INSN (i3);
4297 : 516585 : !done
4298 : 516585 : && insn
4299 : 515238 : && INSN_P (insn)
4300 : 1003159 : && BLOCK_FOR_INSN (insn) == this_basic_block;
4301 : 486574 : insn = NEXT_INSN (insn))
4302 : : {
4303 : 486574 : if (DEBUG_INSN_P (insn))
4304 : 172400 : continue;
4305 : 314174 : struct insn_link *link;
4306 : 589182 : FOR_EACH_LOG_LINK (link, insn)
4307 : 275018 : if (link->insn == i3 && link->regno == regno)
4308 : : {
4309 : 10 : link->insn = i2;
4310 : 10 : done = true;
4311 : 10 : break;
4312 : : }
4313 : : }
4314 : : }
4315 : : }
4316 : :
4317 : 4034730 : {
4318 : 4034730 : rtx i3notes, i2notes, i1notes = 0, i0notes = 0;
4319 : 4034730 : struct insn_link *i3links, *i2links, *i1links = 0, *i0links = 0;
4320 : 4034730 : rtx midnotes = 0;
4321 : 4034730 : int from_luid;
4322 : : /* Compute which registers we expect to eliminate. newi2pat may be setting
4323 : : either i3dest or i2dest, so we must check it. */
4324 : 103181 : rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
4325 : 3943047 : || i2dest_in_i2src || i2dest_in_i1src || i2dest_in_i0src
4326 : 3868016 : || !i2dest_killed
4327 : 7901666 : ? 0 : i2dest);
4328 : : /* For i1, we need to compute both local elimination and global
4329 : : elimination information with respect to newi2pat because i1dest
4330 : : may be the same as i3dest, in which case newi2pat may be setting
4331 : : i1dest. Global information is used when distributing REG_DEAD
4332 : : note for i2 and i3, in which case it does matter if newi2pat sets
4333 : : i1dest or not.
4334 : :
4335 : : Local information is used when distributing REG_DEAD note for i1,
4336 : : in which case it doesn't matter if newi2pat sets i1dest or not.
4337 : : See PR62151, if we have four insns combination:
4338 : : i0: r0 <- i0src
4339 : : i1: r1 <- i1src (using r0)
4340 : : REG_DEAD (r0)
4341 : : i2: r0 <- i2src (using r1)
4342 : : i3: r3 <- i3src (using r0)
4343 : : ix: using r0
4344 : : From i1's point of view, r0 is eliminated, no matter if it is set
4345 : : by newi2pat or not. In other words, REG_DEAD info for r0 in i1
4346 : : should be discarded.
4347 : :
4348 : : Note local information only affects cases in forms like "I1->I2->I3",
4349 : : "I0->I1->I2->I3" or "I0&I1->I2, I2->I3". For other cases like
4350 : : "I0->I1, I1&I2->I3" or "I1&I2->I3", newi2pat won't set i1dest or
4351 : : i0dest anyway. */
4352 : 99548 : rtx local_elim_i1 = (i1 == 0 || i1dest_in_i1src || i1dest_in_i0src
4353 : 99481 : || !i1dest_killed
4354 : 4034730 : ? 0 : i1dest);
4355 : 99480 : rtx elim_i1 = (local_elim_i1 == 0
4356 : 99480 : || (newi2pat && reg_set_p (i1dest, newi2pat))
4357 : 99480 : ? 0 : i1dest);
4358 : : /* Same case as i1. */
4359 : 5014 : rtx local_elim_i0 = (i0 == 0 || i0dest_in_i0src || !i0dest_killed
4360 : 4034730 : ? 0 : i0dest);
4361 : 4996 : rtx elim_i0 = (local_elim_i0 == 0
4362 : 4996 : || (newi2pat && reg_set_p (i0dest, newi2pat))
4363 : 4996 : ? 0 : i0dest);
4364 : :
4365 : : /* Get the old REG_NOTES and LOG_LINKS from all our insns and
4366 : : clear them. */
4367 : 4034730 : i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
4368 : 4034730 : i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
4369 : 4034730 : if (i1)
4370 : 99548 : i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
4371 : 4034730 : if (i0)
4372 : 5014 : i0notes = REG_NOTES (i0), i0links = LOG_LINKS (i0);
4373 : :
4374 : : /* Ensure that we do not have something that should not be shared but
4375 : : occurs multiple times in the new insns. Check this by first
4376 : : resetting all the `used' flags and then copying anything is shared. */
4377 : :
4378 : 4034730 : reset_used_flags (i3notes);
4379 : 4034730 : reset_used_flags (i2notes);
4380 : 4034730 : reset_used_flags (i1notes);
4381 : 4034730 : reset_used_flags (i0notes);
4382 : 4034730 : reset_used_flags (newpat);
4383 : 4034730 : reset_used_flags (newi2pat);
4384 : 4034730 : if (undobuf.other_insn)
4385 : 203142 : reset_used_flags (PATTERN (undobuf.other_insn));
4386 : :
4387 : 4034730 : i3notes = copy_rtx_if_shared (i3notes);
4388 : 4034730 : i2notes = copy_rtx_if_shared (i2notes);
4389 : 4034730 : i1notes = copy_rtx_if_shared (i1notes);
4390 : 4034730 : i0notes = copy_rtx_if_shared (i0notes);
4391 : 4034730 : newpat = copy_rtx_if_shared (newpat);
4392 : 4034730 : newi2pat = copy_rtx_if_shared (newi2pat);
4393 : 4034730 : if (undobuf.other_insn)
4394 : 203142 : reset_used_flags (PATTERN (undobuf.other_insn));
4395 : :
4396 : 4034730 : INSN_CODE (i3) = insn_code_number;
4397 : 4034730 : PATTERN (i3) = newpat;
4398 : :
4399 : 4034730 : if (CALL_P (i3) && CALL_INSN_FUNCTION_USAGE (i3))
4400 : : {
4401 : 353797 : for (rtx link = CALL_INSN_FUNCTION_USAGE (i3); link;
4402 : 272351 : link = XEXP (link, 1))
4403 : : {
4404 : 272351 : if (substed_i2)
4405 : : {
4406 : : /* I2SRC must still be meaningful at this point. Some
4407 : : splitting operations can invalidate I2SRC, but those
4408 : : operations do not apply to calls. */
4409 : 272351 : gcc_assert (i2src);
4410 : 272351 : XEXP (link, 0) = simplify_replace_rtx (XEXP (link, 0),
4411 : : i2dest, i2src);
4412 : : }
4413 : 272351 : if (substed_i1)
4414 : 0 : XEXP (link, 0) = simplify_replace_rtx (XEXP (link, 0),
4415 : : i1dest, i1src);
4416 : 272351 : if (substed_i0)
4417 : 0 : XEXP (link, 0) = simplify_replace_rtx (XEXP (link, 0),
4418 : : i0dest, i0src);
4419 : : }
4420 : : }
4421 : :
4422 : 4034730 : if (undobuf.other_insn)
4423 : 203142 : INSN_CODE (undobuf.other_insn) = other_code_number;
4424 : :
4425 : : /* We had one special case above where I2 had more than one set and
4426 : : we replaced a destination of one of those sets with the destination
4427 : : of I3. In that case, we have to update LOG_LINKS of insns later
4428 : : in this basic block. Note that this (expensive) case is rare.
4429 : :
4430 : : Also, in this case, we must pretend that all REG_NOTEs for I2
4431 : : actually came from I3, so that REG_UNUSED notes from I2 will be
4432 : : properly handled. */
4433 : :
4434 : 4034730 : if (i3_subst_into_i2)
4435 : : {
4436 : 205696 : for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
4437 : 141183 : if ((GET_CODE (XVECEXP (PATTERN (i2), 0, i)) == SET
4438 : 65716 : || GET_CODE (XVECEXP (PATTERN (i2), 0, i)) == CLOBBER)
4439 : 140359 : && REG_P (SET_DEST (XVECEXP (PATTERN (i2), 0, i)))
4440 : 126000 : && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
4441 : 267183 : && ! find_reg_note (i2, REG_UNUSED,
4442 : 126000 : SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
4443 : 29535917 : for (temp_insn = NEXT_INSN (i2);
4444 : : temp_insn
4445 : 29535917 : && (this_basic_block->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
4446 : 29439336 : || BB_HEAD (this_basic_block) != temp_insn);
4447 : 29478886 : temp_insn = NEXT_INSN (temp_insn))
4448 : 29478886 : if (temp_insn != i3 && NONDEBUG_INSN_P (temp_insn))
4449 : 19094374 : FOR_EACH_LOG_LINK (link, temp_insn)
4450 : 6933504 : if (link->insn == i2)
4451 : 438 : link->insn = i3;
4452 : :
4453 : 64513 : if (i3notes)
4454 : : {
4455 : : rtx link = i3notes;
4456 : 71454 : while (XEXP (link, 1))
4457 : : link = XEXP (link, 1);
4458 : 64513 : XEXP (link, 1) = i2notes;
4459 : : }
4460 : : else
4461 : : i3notes = i2notes;
4462 : : i2notes = 0;
4463 : : }
4464 : :
4465 : 4034730 : LOG_LINKS (i3) = NULL;
4466 : 4034730 : REG_NOTES (i3) = 0;
4467 : 4034730 : LOG_LINKS (i2) = NULL;
4468 : 4034730 : REG_NOTES (i2) = 0;
4469 : :
4470 : 4034730 : if (newi2pat)
4471 : : {
4472 : 103181 : if (MAY_HAVE_DEBUG_BIND_INSNS && i2scratch)
4473 : 11065 : propagate_for_debug (i2, last_combined_insn, i2dest, i2src,
4474 : : this_basic_block);
4475 : 103181 : INSN_CODE (i2) = i2_code_number;
4476 : 103181 : PATTERN (i2) = newi2pat;
4477 : : }
4478 : : else
4479 : : {
4480 : 3931549 : if (MAY_HAVE_DEBUG_BIND_INSNS && i2src)
4481 : 2140408 : propagate_for_debug (i2, last_combined_insn, i2dest, i2src,
4482 : : this_basic_block);
4483 : 3931549 : SET_INSN_DELETED (i2);
4484 : : }
4485 : :
4486 : 4034730 : if (i1)
4487 : : {
4488 : 99548 : LOG_LINKS (i1) = NULL;
4489 : 99548 : REG_NOTES (i1) = 0;
4490 : 99548 : if (MAY_HAVE_DEBUG_BIND_INSNS)
4491 : 51723 : propagate_for_debug (i1, last_combined_insn, i1dest, i1src,
4492 : : this_basic_block);
4493 : 99548 : SET_INSN_DELETED (i1);
4494 : : }
4495 : :
4496 : 4034730 : if (i0)
4497 : : {
4498 : 5014 : LOG_LINKS (i0) = NULL;
4499 : 5014 : REG_NOTES (i0) = 0;
4500 : 5014 : if (MAY_HAVE_DEBUG_BIND_INSNS)
4501 : 3749 : propagate_for_debug (i0, last_combined_insn, i0dest, i0src,
4502 : : this_basic_block);
4503 : 5014 : SET_INSN_DELETED (i0);
4504 : : }
4505 : :
4506 : : /* Get death notes for everything that is now used in either I3 or
4507 : : I2 and used to die in a previous insn. If we built two new
4508 : : patterns, move from I1 to I2 then I2 to I3 so that we get the
4509 : : proper movement on registers that I2 modifies. */
4510 : :
4511 : 4034730 : if (i0)
4512 : 5014 : from_luid = DF_INSN_LUID (i0);
4513 : 4029716 : else if (i1)
4514 : 94534 : from_luid = DF_INSN_LUID (i1);
4515 : : else
4516 : 3935182 : from_luid = DF_INSN_LUID (i2);
4517 : 4034730 : if (newi2pat)
4518 : 103181 : move_deaths (newi2pat, NULL_RTX, from_luid, i2, &midnotes);
4519 : 4034730 : move_deaths (newpat, newi2pat, from_luid, i3, &midnotes);
4520 : :
4521 : : /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
4522 : 4034730 : if (i3notes)
4523 : 7338224 : distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL,
4524 : : elim_i2, elim_i1, elim_i0);
4525 : 4034730 : if (i2notes)
4526 : 5698726 : distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL,
4527 : : elim_i2, elim_i1, elim_i0);
4528 : 4034730 : if (i1notes)
4529 : 59893 : distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL,
4530 : : elim_i2, local_elim_i1, local_elim_i0);
4531 : 4034730 : if (i0notes)
4532 : 4370 : distribute_notes (i0notes, i0, i3, newi2pat ? i2 : NULL,
4533 : : elim_i2, elim_i1, local_elim_i0);
4534 : 4034730 : if (midnotes)
4535 : 4854482 : distribute_notes (midnotes, NULL, i3, newi2pat ? i2 : NULL,
4536 : : elim_i2, elim_i1, elim_i0);
4537 : :
4538 : : /* Distribute any notes added to I2 or I3 by recog_for_combine. We
4539 : : know these are REG_UNUSED and want them to go to the desired insn,
4540 : : so we always pass it as i3. */
4541 : :
4542 : 4034730 : if (newi2pat && new_i2_notes)
4543 : 39648 : distribute_notes (new_i2_notes, i2, i2, NULL, NULL_RTX, NULL_RTX,
4544 : : NULL_RTX);
4545 : :
4546 : 4034730 : if (new_i3_notes)
4547 : 144269 : distribute_notes (new_i3_notes, i3, i3, NULL, NULL_RTX, NULL_RTX,
4548 : : NULL_RTX);
4549 : :
4550 : : /* If I3DEST was used in I3SRC, it really died in I3. We may need to
4551 : : put a REG_DEAD note for it somewhere. If NEWI2PAT exists and sets
4552 : : I3DEST, the death must be somewhere before I2, not I3. If we passed I3
4553 : : in that case, it might delete I2. Similarly for I2 and I1.
4554 : : Show an additional death due to the REG_DEAD note we make here. If
4555 : : we discard it in distribute_notes, we will decrement it again. */
4556 : :
4557 : 4034730 : if (i3dest_killed)
4558 : : {
4559 : 306220 : rtx new_note = alloc_reg_note (REG_DEAD, i3dest_killed, NULL_RTX);
4560 : 306220 : if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
4561 : 1023 : distribute_notes (new_note, NULL, i2, NULL, elim_i2,
4562 : : elim_i1, elim_i0);
4563 : : else
4564 : 609004 : distribute_notes (new_note, NULL, i3, newi2pat ? i2 : NULL,
4565 : : elim_i2, elim_i1, elim_i0);
4566 : : }
4567 : :
4568 : 4034730 : if (i2dest_in_i2src)
4569 : : {
4570 : 73970 : rtx new_note = alloc_reg_note (REG_DEAD, i2dest, NULL_RTX);
4571 : 73970 : if (newi2pat && reg_set_p (i2dest, newi2pat))
4572 : 449 : distribute_notes (new_note, NULL, i2, NULL, NULL_RTX,
4573 : : NULL_RTX, NULL_RTX);
4574 : : else
4575 : 147004 : distribute_notes (new_note, NULL, i3, newi2pat ? i2 : NULL,
4576 : : NULL_RTX, NULL_RTX, NULL_RTX);
4577 : : }
4578 : :
4579 : 4034730 : if (i1dest_in_i1src)
4580 : : {
4581 : 65 : rtx new_note = alloc_reg_note (REG_DEAD, i1dest, NULL_RTX);
4582 : 65 : if (newi2pat && reg_set_p (i1dest, newi2pat))
4583 : 4 : distribute_notes (new_note, NULL, i2, NULL, NULL_RTX,
4584 : : NULL_RTX, NULL_RTX);
4585 : : else
4586 : 107 : distribute_notes (new_note, NULL, i3, newi2pat ? i2 : NULL,
4587 : : NULL_RTX, NULL_RTX, NULL_RTX);
4588 : : }
4589 : :
4590 : 4034730 : if (i0dest_in_i0src)
4591 : : {
4592 : 18 : rtx new_note = alloc_reg_note (REG_DEAD, i0dest, NULL_RTX);
4593 : 18 : if (newi2pat && reg_set_p (i0dest, newi2pat))
4594 : 0 : distribute_notes (new_note, NULL, i2, NULL, NULL_RTX,
4595 : : NULL_RTX, NULL_RTX);
4596 : : else
4597 : 36 : distribute_notes (new_note, NULL, i3, newi2pat ? i2 : NULL,
4598 : : NULL_RTX, NULL_RTX, NULL_RTX);
4599 : : }
4600 : :
4601 : 4034730 : if (only_i3_changed)
4602 : 30774 : distribute_links (i3links, i3, param_max_combine_search_insns);
4603 : : else
4604 : : {
4605 : 4003956 : distribute_links (i3links);
4606 : 4003956 : distribute_links (i2links, i2);
4607 : 4003956 : distribute_links (i1links);
4608 : 4003956 : distribute_links (i0links);
4609 : : }
4610 : :
4611 : 4034730 : if (REG_P (i2dest))
4612 : : {
4613 : 4034730 : struct insn_link *link;
4614 : 4034730 : rtx_insn *i2_insn = 0;
4615 : 4034730 : rtx i2_val = 0, set;
4616 : :
4617 : : /* The insn that used to set this register doesn't exist, and
4618 : : this life of the register may not exist either. See if one of
4619 : : I3's links points to an insn that sets I2DEST. If it does,
4620 : : that is now the last known value for I2DEST. If we don't update
4621 : : this and I2 set the register to a value that depended on its old
4622 : : contents, we will get confused. If this insn is used, thing
4623 : : will be set correctly in combine_instructions. */
4624 : 7385014 : FOR_EACH_LOG_LINK (link, i3)
4625 : 3350284 : if ((set = single_set (link->insn)) != 0
4626 : 3350284 : && rtx_equal_p (i2dest, SET_DEST (set)))
4627 : 44671 : i2_insn = link->insn, i2_val = SET_SRC (set);
4628 : :
4629 : 4034730 : record_value_for_reg (i2dest, i2_insn, i2_val);
4630 : :
4631 : : /* If the reg formerly set in I2 died only once and that was in I3,
4632 : : zero its use count so it won't make `reload' do any work. */
4633 : 4034730 : if (! added_sets_2
4634 : 3904584 : && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
4635 : 3866700 : && ! i2dest_in_i2src
4636 : 7846829 : && REGNO (i2dest) < reg_n_sets_max)
4637 : 3812097 : INC_REG_N_SETS (REGNO (i2dest), -1);
4638 : : }
4639 : :
4640 : 4034730 : if (i1 && REG_P (i1dest))
4641 : : {
4642 : 99548 : struct insn_link *link;
4643 : 99548 : rtx_insn *i1_insn = 0;
4644 : 99548 : rtx i1_val = 0, set;
4645 : :
4646 : 173840 : FOR_EACH_LOG_LINK (link, i3)
4647 : 74292 : if ((set = single_set (link->insn)) != 0
4648 : 74292 : && rtx_equal_p (i1dest, SET_DEST (set)))
4649 : 171 : i1_insn = link->insn, i1_val = SET_SRC (set);
4650 : :
4651 : 99548 : record_value_for_reg (i1dest, i1_insn, i1_val);
4652 : :
4653 : 99548 : if (! added_sets_1
4654 : : && ! i1dest_in_i1src
4655 : 99548 : && REGNO (i1dest) < reg_n_sets_max)
4656 : 93769 : INC_REG_N_SETS (REGNO (i1dest), -1);
4657 : : }
4658 : :
4659 : 4034730 : if (i0 && REG_P (i0dest))
4660 : : {
4661 : 5014 : struct insn_link *link;
4662 : 5014 : rtx_insn *i0_insn = 0;
4663 : 5014 : rtx i0_val = 0, set;
4664 : :
4665 : 7077 : FOR_EACH_LOG_LINK (link, i3)
4666 : 2063 : if ((set = single_set (link->insn)) != 0
4667 : 2063 : && rtx_equal_p (i0dest, SET_DEST (set)))
4668 : 0 : i0_insn = link->insn, i0_val = SET_SRC (set);
4669 : :
4670 : 5014 : record_value_for_reg (i0dest, i0_insn, i0_val);
4671 : :
4672 : 5014 : if (! added_sets_0
4673 : : && ! i0dest_in_i0src
4674 : 5014 : && REGNO (i0dest) < reg_n_sets_max)
4675 : 4955 : INC_REG_N_SETS (REGNO (i0dest), -1);
4676 : : }
4677 : :
4678 : : /* Update reg_stat[].nonzero_bits et al for any changes that may have
4679 : : been made to this insn. The order is important, because newi2pat
4680 : : can affect nonzero_bits of newpat. */
4681 : 4034730 : if (newi2pat)
4682 : 103181 : note_pattern_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
4683 : 4034730 : note_pattern_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
4684 : : }
4685 : :
4686 : 4034730 : if (undobuf.other_insn != NULL_RTX)
4687 : : {
4688 : 203142 : if (dump_file)
4689 : : {
4690 : 12 : fprintf (dump_file, "modifying other_insn ");
4691 : 12 : dump_insn_slim (dump_file, undobuf.other_insn);
4692 : : }
4693 : 203142 : df_insn_rescan (undobuf.other_insn);
4694 : : }
4695 : :
4696 : 4034730 : if (i0 && !(NOTE_P (i0) && (NOTE_KIND (i0) == NOTE_INSN_DELETED)))
4697 : : {
4698 : 0 : if (dump_file)
4699 : : {
4700 : 0 : fprintf (dump_file, "modifying insn i0 ");
4701 : 0 : dump_insn_slim (dump_file, i0);
4702 : : }
4703 : 0 : df_insn_rescan (i0);
4704 : : }
4705 : :
4706 : 4034730 : if (i1 && !(NOTE_P (i1) && (NOTE_KIND (i1) == NOTE_INSN_DELETED)))
4707 : : {
4708 : 0 : if (dump_file)
4709 : : {
4710 : 0 : fprintf (dump_file, "modifying insn i1 ");
4711 : 0 : dump_insn_slim (dump_file, i1);
4712 : : }
4713 : 0 : df_insn_rescan (i1);
4714 : : }
4715 : :
4716 : 4034730 : if (i2 && !(NOTE_P (i2) && (NOTE_KIND (i2) == NOTE_INSN_DELETED)))
4717 : : {
4718 : 103181 : if (dump_file)
4719 : : {
4720 : 15 : fprintf (dump_file, "modifying insn i2 ");
4721 : 15 : dump_insn_slim (dump_file, i2);
4722 : : }
4723 : 103181 : df_insn_rescan (i2);
4724 : : }
4725 : :
4726 : 4034730 : if (i3 && !(NOTE_P (i3) && (NOTE_KIND (i3) == NOTE_INSN_DELETED)))
4727 : : {
4728 : 4034730 : if (dump_file)
4729 : : {
4730 : 240 : fprintf (dump_file, "modifying insn i3 ");
4731 : 240 : dump_insn_slim (dump_file, i3);
4732 : : }
4733 : 4034730 : df_insn_rescan (i3);
4734 : : }
4735 : :
4736 : : /* Set new_direct_jump_p if a new return or simple jump instruction
4737 : : has been created. Adjust the CFG accordingly. */
4738 : 4034730 : if (returnjump_p (i3) || any_uncondjump_p (i3))
4739 : : {
4740 : 162 : *new_direct_jump_p = 1;
4741 : 162 : mark_jump_label (PATTERN (i3), i3, 0);
4742 : 162 : update_cfg_for_uncondjump (i3);
4743 : : }
4744 : :
4745 : 4034730 : if (undobuf.other_insn != NULL_RTX
4746 : 4034730 : && (returnjump_p (undobuf.other_insn)
4747 : 203142 : || any_uncondjump_p (undobuf.other_insn)))
4748 : : {
4749 : 1905 : *new_direct_jump_p = 1;
4750 : 1905 : update_cfg_for_uncondjump (undobuf.other_insn);
4751 : : }
4752 : :
4753 : 4034730 : if (GET_CODE (PATTERN (i3)) == TRAP_IF
4754 : 4034730 : && XEXP (PATTERN (i3), 0) == const1_rtx)
4755 : : {
4756 : 0 : basic_block bb = BLOCK_FOR_INSN (i3);
4757 : 0 : gcc_assert (bb);
4758 : 0 : remove_edge (split_block (bb, i3));
4759 : 0 : emit_barrier_after_bb (bb);
4760 : 0 : *new_direct_jump_p = 1;
4761 : : }
4762 : :
4763 : 4034730 : if (undobuf.other_insn
4764 : 203142 : && GET_CODE (PATTERN (undobuf.other_insn)) == TRAP_IF
4765 : 4034730 : && XEXP (PATTERN (undobuf.other_insn), 0) == const1_rtx)
4766 : : {
4767 : 0 : basic_block bb = BLOCK_FOR_INSN (undobuf.other_insn);
4768 : 0 : gcc_assert (bb);
4769 : 0 : remove_edge (split_block (bb, undobuf.other_insn));
4770 : 0 : emit_barrier_after_bb (bb);
4771 : 0 : *new_direct_jump_p = 1;
4772 : : }
4773 : :
4774 : : /* A noop might also need cleaning up of CFG, if it comes from the
4775 : : simplification of a jump. */
4776 : 4034730 : if (JUMP_P (i3)
4777 : 46726 : && GET_CODE (newpat) == SET
4778 : 34114 : && SET_SRC (newpat) == pc_rtx
4779 : 401 : && SET_DEST (newpat) == pc_rtx)
4780 : : {
4781 : 401 : *new_direct_jump_p = 1;
4782 : 401 : update_cfg_for_uncondjump (i3);
4783 : : }
4784 : :
4785 : 4034730 : if (undobuf.other_insn != NULL_RTX
4786 : 203142 : && JUMP_P (undobuf.other_insn)
4787 : 197161 : && GET_CODE (PATTERN (undobuf.other_insn)) == SET
4788 : 197161 : && SET_SRC (PATTERN (undobuf.other_insn)) == pc_rtx
4789 : 4036762 : && SET_DEST (PATTERN (undobuf.other_insn)) == pc_rtx)
4790 : : {
4791 : 2032 : *new_direct_jump_p = 1;
4792 : 2032 : update_cfg_for_uncondjump (undobuf.other_insn);
4793 : : }
4794 : :
4795 : 4034730 : combine_successes++;
4796 : 4034730 : undo_commit ();
4797 : :
4798 : 4034730 : if (only_i3_changed)
4799 : : return i3;
4800 : :
4801 : 4003956 : rtx_insn *ret = newi2pat ? i2 : i3;
4802 : 4003956 : if (added_links_insn && DF_INSN_LUID (added_links_insn) < DF_INSN_LUID (ret))
4803 : : ret = added_links_insn;
4804 : 4003956 : if (added_notes_insn && DF_INSN_LUID (added_notes_insn) < DF_INSN_LUID (ret))
4805 : : ret = added_notes_insn;
4806 : :
4807 : : return ret;
4808 : : }
4809 : :
4810 : : /* Get a marker for undoing to the current state. */
4811 : :
4812 : : static void *
4813 : 37273431 : get_undo_marker (void)
4814 : : {
4815 : 37273431 : return undobuf.undos;
4816 : : }
4817 : :
4818 : : /* Undo the modifications up to the marker. */
4819 : :
4820 : : static void
4821 : 43834388 : undo_to_marker (void *marker)
4822 : : {
4823 : 43834388 : struct undo *undo, *next;
4824 : :
4825 : 138435045 : for (undo = undobuf.undos; undo != marker; undo = next)
4826 : : {
4827 : 94600657 : gcc_assert (undo);
4828 : :
4829 : 94600657 : next = undo->next;
4830 : 94600657 : switch (undo->kind)
4831 : : {
4832 : 87634334 : case UNDO_RTX:
4833 : 87634334 : *undo->where.r = undo->old_contents.r;
4834 : 87634334 : break;
4835 : 6323181 : case UNDO_INT:
4836 : 6323181 : *undo->where.i = undo->old_contents.i;
4837 : 6323181 : break;
4838 : 570214 : case UNDO_MODE:
4839 : 570214 : adjust_reg_mode (regno_reg_rtx[undo->where.regno],
4840 : : undo->old_contents.m);
4841 : 570214 : break;
4842 : 72928 : case UNDO_LINKS:
4843 : 72928 : *undo->where.l = undo->old_contents.l;
4844 : 72928 : break;
4845 : 0 : default:
4846 : 0 : gcc_unreachable ();
4847 : : }
4848 : :
4849 : 94600657 : undo->next = undobuf.frees;
4850 : 94600657 : undobuf.frees = undo;
4851 : : }
4852 : :
4853 : 43834388 : undobuf.undos = (struct undo *) marker;
4854 : 43834388 : }
4855 : :
4856 : : /* Undo all the modifications recorded in undobuf. */
4857 : :
4858 : : static void
4859 : 42738849 : undo_all (void)
4860 : : {
4861 : 42738849 : undo_to_marker (0);
4862 : 0 : }
4863 : :
4864 : : /* We've committed to accepting the changes we made. Move all
4865 : : of the undos to the free list. */
4866 : :
4867 : : static void
4868 : 4034730 : undo_commit (void)
4869 : : {
4870 : 4034730 : struct undo *undo, *next;
4871 : :
4872 : 11922888 : for (undo = undobuf.undos; undo; undo = next)
4873 : : {
4874 : 7888158 : next = undo->next;
4875 : 7888158 : undo->next = undobuf.frees;
4876 : 7888158 : undobuf.frees = undo;
4877 : : }
4878 : 4034730 : undobuf.undos = 0;
4879 : 4034730 : }
4880 : :
4881 : : /* Find the innermost point within the rtx at LOC, possibly LOC itself,
4882 : : where we have an arithmetic expression and return that point. LOC will
4883 : : be inside INSN.
4884 : :
4885 : : try_combine will call this function to see if an insn can be split into
4886 : : two insns. */
4887 : :
4888 : : static rtx *
4889 : 31008504 : find_split_point (rtx *loc, rtx_insn *insn, bool set_src)
4890 : : {
4891 : 31008504 : rtx x = *loc;
4892 : 31008504 : enum rtx_code code = GET_CODE (x);
4893 : 31008504 : rtx *split;
4894 : 31008504 : unsigned HOST_WIDE_INT len = 0;
4895 : 31008504 : HOST_WIDE_INT pos = 0;
4896 : 31008504 : bool unsignedp = false;
4897 : 31008504 : rtx inner = NULL_RTX;
4898 : 31008504 : scalar_int_mode mode, inner_mode;
4899 : :
4900 : : /* First special-case some codes. */
4901 : 31008504 : switch (code)
4902 : : {
4903 : 973923 : case SUBREG:
4904 : : #ifdef INSN_SCHEDULING
4905 : : /* If we are making a paradoxical SUBREG invalid, it becomes a split
4906 : : point. */
4907 : 973923 : if (MEM_P (SUBREG_REG (x)))
4908 : : return loc;
4909 : : #endif
4910 : 958204 : return find_split_point (&SUBREG_REG (x), insn, false);
4911 : :
4912 : 1511498 : case MEM:
4913 : : /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
4914 : : using LO_SUM and HIGH. */
4915 : 1511498 : if (HAVE_lo_sum && (GET_CODE (XEXP (x, 0)) == CONST
4916 : : || GET_CODE (XEXP (x, 0)) == SYMBOL_REF))
4917 : : {
4918 : : machine_mode address_mode = get_address_mode (x);
4919 : :
4920 : : SUBST (XEXP (x, 0),
4921 : : gen_rtx_LO_SUM (address_mode,
4922 : : gen_rtx_HIGH (address_mode, XEXP (x, 0)),
4923 : : XEXP (x, 0)));
4924 : : return &XEXP (XEXP (x, 0), 0);
4925 : : }
4926 : :
4927 : : /* If we have a PLUS whose second operand is a constant and the
4928 : : address is not valid, perhaps we can split it up using
4929 : : the machine-specific way to split large constants. We use
4930 : : the first pseudo-reg (one of the virtual regs) as a placeholder;
4931 : : it will not remain in the result. */
4932 : 1511498 : if (GET_CODE (XEXP (x, 0)) == PLUS
4933 : 999715 : && CONST_INT_P (XEXP (XEXP (x, 0), 1))
4934 : 3221716 : && ! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
4935 : 710503 : MEM_ADDR_SPACE (x)))
4936 : : {
4937 : 130362 : rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
4938 : 130362 : unsigned int old_nregs, new_nregs;
4939 : 130362 : rtx_insn *seq = combine_split_insns (gen_rtx_SET (reg, XEXP (x, 0)),
4940 : : subst_insn, &old_nregs, &new_nregs);
4941 : :
4942 : : /* This should have produced two insns, each of which sets our
4943 : : placeholder. If the source of the second is a valid address,
4944 : : we can put both sources together and make a split point
4945 : : in the middle. */
4946 : :
4947 : 130362 : if (seq
4948 : 54 : && NEXT_INSN (seq) != NULL_RTX
4949 : 0 : && NEXT_INSN (NEXT_INSN (seq)) == NULL_RTX
4950 : 0 : && NONJUMP_INSN_P (seq)
4951 : 0 : && GET_CODE (PATTERN (seq)) == SET
4952 : 0 : && SET_DEST (PATTERN (seq)) == reg
4953 : 0 : && ! reg_mentioned_p (reg,
4954 : 0 : SET_SRC (PATTERN (seq)))
4955 : 0 : && NONJUMP_INSN_P (NEXT_INSN (seq))
4956 : 0 : && GET_CODE (PATTERN (NEXT_INSN (seq))) == SET
4957 : 0 : && SET_DEST (PATTERN (NEXT_INSN (seq))) == reg
4958 : 130362 : && memory_address_addr_space_p
4959 : 130362 : (GET_MODE (x), SET_SRC (PATTERN (NEXT_INSN (seq))),
4960 : 0 : MEM_ADDR_SPACE (x)))
4961 : : {
4962 : 0 : rtx src1 = SET_SRC (PATTERN (seq));
4963 : 0 : rtx src2 = SET_SRC (PATTERN (NEXT_INSN (seq)));
4964 : :
4965 : : /* Replace the placeholder in SRC2 with SRC1. If we can
4966 : : find where in SRC2 it was placed, that can become our
4967 : : split point and we can replace this address with SRC2.
4968 : : Just try two obvious places. */
4969 : :
4970 : 0 : src2 = replace_rtx (src2, reg, src1);
4971 : 0 : split = 0;
4972 : 0 : if (XEXP (src2, 0) == src1)
4973 : 0 : split = &XEXP (src2, 0);
4974 : 0 : else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
4975 : 0 : && XEXP (XEXP (src2, 0), 0) == src1)
4976 : 0 : split = &XEXP (XEXP (src2, 0), 0);
4977 : :
4978 : 0 : if (split)
4979 : : {
4980 : 0 : SUBST (XEXP (x, 0), src2);
4981 : 105860 : return split;
4982 : : }
4983 : : }
4984 : :
4985 : : /* If that didn't work and we have a nested plus, like:
4986 : : ((REG1 * CONST1) + REG2) + CONST2 and (REG1 + REG2) + CONST2
4987 : : is valid address, try to split (REG1 * CONST1). */
4988 : 130362 : if (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
4989 : 95624 : && !OBJECT_P (XEXP (XEXP (XEXP (x, 0), 0), 0))
4990 : 82979 : && OBJECT_P (XEXP (XEXP (XEXP (x, 0), 0), 1))
4991 : 82974 : && ! (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SUBREG
4992 : 10 : && OBJECT_P (SUBREG_REG (XEXP (XEXP (XEXP (x, 0),
4993 : : 0), 0)))))
4994 : : {
4995 : 82974 : rtx tem = XEXP (XEXP (XEXP (x, 0), 0), 0);
4996 : 82974 : XEXP (XEXP (XEXP (x, 0), 0), 0) = reg;
4997 : 165948 : if (memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
4998 : 82974 : MEM_ADDR_SPACE (x)))
4999 : : {
5000 : 71649 : XEXP (XEXP (XEXP (x, 0), 0), 0) = tem;
5001 : 71649 : return &XEXP (XEXP (XEXP (x, 0), 0), 0);
5002 : : }
5003 : 11325 : XEXP (XEXP (XEXP (x, 0), 0), 0) = tem;
5004 : 11325 : }
5005 : 47388 : else if (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
5006 : 12650 : && OBJECT_P (XEXP (XEXP (XEXP (x, 0), 0), 0))
5007 : 12645 : && !OBJECT_P (XEXP (XEXP (XEXP (x, 0), 0), 1))
5008 : 380 : && ! (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == SUBREG
5009 : 380 : && OBJECT_P (SUBREG_REG (XEXP (XEXP (XEXP (x, 0),
5010 : : 0), 1)))))
5011 : : {
5012 : 0 : rtx tem = XEXP (XEXP (XEXP (x, 0), 0), 1);
5013 : 0 : XEXP (XEXP (XEXP (x, 0), 0), 1) = reg;
5014 : 0 : if (memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
5015 : 0 : MEM_ADDR_SPACE (x)))
5016 : : {
5017 : 0 : XEXP (XEXP (XEXP (x, 0), 0), 1) = tem;
5018 : 0 : return &XEXP (XEXP (XEXP (x, 0), 0), 1);
5019 : : }
5020 : 0 : XEXP (XEXP (XEXP (x, 0), 0), 1) = tem;
5021 : : }
5022 : :
5023 : : /* If that didn't work, perhaps the first operand is complex and
5024 : : needs to be computed separately, so make a split point there.
5025 : : This will occur on machines that just support REG + CONST
5026 : : and have a constant moved through some previous computation. */
5027 : 58713 : if (!OBJECT_P (XEXP (XEXP (x, 0), 0))
5028 : 34211 : && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
5029 : 0 : && OBJECT_P (SUBREG_REG (XEXP (XEXP (x, 0), 0)))))
5030 : 34211 : return &XEXP (XEXP (x, 0), 0);
5031 : : }
5032 : :
5033 : : /* If we have a PLUS whose first operand is complex, try computing it
5034 : : separately by making a split there. */
5035 : 1405638 : if (GET_CODE (XEXP (x, 0)) == PLUS
5036 : 2468957 : && ! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
5037 : 893855 : MEM_ADDR_SPACE (x))
5038 : 169464 : && ! OBJECT_P (XEXP (XEXP (x, 0), 0))
5039 : 1524984 : && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
5040 : 1042 : && OBJECT_P (SUBREG_REG (XEXP (XEXP (x, 0), 0)))))
5041 : 119342 : return &XEXP (XEXP (x, 0), 0);
5042 : : break;
5043 : :
5044 : 4615835 : case SET:
5045 : : /* See if we can split SET_SRC as it stands. */
5046 : 4615835 : split = find_split_point (&SET_SRC (x), insn, true);
5047 : 4615835 : if (split && split != &SET_SRC (x))
5048 : : return split;
5049 : :
5050 : : /* See if we can split SET_DEST as it stands. */
5051 : 503646 : split = find_split_point (&SET_DEST (x), insn, false);
5052 : 503646 : if (split && split != &SET_DEST (x))
5053 : : return split;
5054 : :
5055 : : /* See if this is a bitfield assignment with everything constant. If
5056 : : so, this is an IOR of an AND, so split it into that. */
5057 : 469959 : if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
5058 : 4877 : && is_a <scalar_int_mode> (GET_MODE (XEXP (SET_DEST (x), 0)),
5059 : : &inner_mode)
5060 : 4877 : && HWI_COMPUTABLE_MODE_P (inner_mode)
5061 : 4877 : && CONST_INT_P (XEXP (SET_DEST (x), 1))
5062 : 4877 : && CONST_INT_P (XEXP (SET_DEST (x), 2))
5063 : 4706 : && CONST_INT_P (SET_SRC (x))
5064 : 421 : && ((INTVAL (XEXP (SET_DEST (x), 1))
5065 : 421 : + INTVAL (XEXP (SET_DEST (x), 2)))
5066 : 421 : <= GET_MODE_PRECISION (inner_mode))
5067 : 470380 : && ! side_effects_p (XEXP (SET_DEST (x), 0)))
5068 : : {
5069 : 404 : HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2));
5070 : 404 : unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1));
5071 : 404 : rtx dest = XEXP (SET_DEST (x), 0);
5072 : 404 : unsigned HOST_WIDE_INT mask = (HOST_WIDE_INT_1U << len) - 1;
5073 : 404 : unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x)) & mask;
5074 : 404 : rtx or_mask;
5075 : :
5076 : 404 : if (BITS_BIG_ENDIAN)
5077 : : pos = GET_MODE_PRECISION (inner_mode) - len - pos;
5078 : :
5079 : 404 : or_mask = gen_int_mode (src << pos, inner_mode);
5080 : 404 : if (src == mask)
5081 : 0 : SUBST (SET_SRC (x),
5082 : : simplify_gen_binary (IOR, inner_mode, dest, or_mask));
5083 : : else
5084 : : {
5085 : 404 : rtx negmask = gen_int_mode (~(mask << pos), inner_mode);
5086 : 404 : SUBST (SET_SRC (x),
5087 : : simplify_gen_binary (IOR, inner_mode,
5088 : : simplify_gen_binary (AND, inner_mode,
5089 : : dest, negmask),
5090 : : or_mask));
5091 : : }
5092 : :
5093 : 404 : SUBST (SET_DEST (x), dest);
5094 : :
5095 : 404 : split = find_split_point (&SET_SRC (x), insn, true);
5096 : 404 : if (split && split != &SET_SRC (x))
5097 : : return split;
5098 : : }
5099 : :
5100 : : /* Otherwise, see if this is an operation that we can split into two.
5101 : : If so, try to split that. */
5102 : 469555 : code = GET_CODE (SET_SRC (x));
5103 : :
5104 : 469555 : switch (code)
5105 : : {
5106 : 16332 : case AND:
5107 : : /* If we are AND'ing with a large constant that is only a single
5108 : : bit and the result is only being used in a context where we
5109 : : need to know if it is zero or nonzero, replace it with a bit
5110 : : extraction. This will avoid the large constant, which might
5111 : : have taken more than one insn to make. If the constant were
5112 : : not a valid argument to the AND but took only one insn to make,
5113 : : this is no worse, but if it took more than one insn, it will
5114 : : be better. */
5115 : :
5116 : 16332 : if (CONST_INT_P (XEXP (SET_SRC (x), 1))
5117 : 11054 : && REG_P (XEXP (SET_SRC (x), 0))
5118 : 436 : && (pos = exact_log2 (UINTVAL (XEXP (SET_SRC (x), 1)))) >= 7
5119 : 1 : && REG_P (SET_DEST (x))
5120 : 0 : && (split = find_single_use (SET_DEST (x), insn, NULL)) != 0
5121 : 0 : && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
5122 : 0 : && XEXP (*split, 0) == SET_DEST (x)
5123 : 16332 : && XEXP (*split, 1) == const0_rtx)
5124 : : {
5125 : 0 : rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
5126 : 0 : XEXP (SET_SRC (x), 0),
5127 : : pos, NULL_RTX, 1,
5128 : : true, false, false);
5129 : 0 : if (extraction != 0)
5130 : : {
5131 : 0 : SUBST (SET_SRC (x), extraction);
5132 : 0 : return find_split_point (loc, insn, false);
5133 : : }
5134 : : }
5135 : : break;
5136 : :
5137 : : case NE:
5138 : : /* If STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
5139 : : is known to be on, this can be converted into a NEG of a shift. */
5140 : : if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
5141 : : && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
5142 : : && ((pos = exact_log2 (nonzero_bits (XEXP (SET_SRC (x), 0),
5143 : : GET_MODE (XEXP (SET_SRC (x),
5144 : : 0))))) >= 1))
5145 : : {
5146 : : machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
5147 : : rtx pos_rtx = gen_int_shift_amount (mode, pos);
5148 : : SUBST (SET_SRC (x),
5149 : : gen_rtx_NEG (mode,
5150 : : gen_rtx_LSHIFTRT (mode,
5151 : : XEXP (SET_SRC (x), 0),
5152 : : pos_rtx)));
5153 : :
5154 : : split = find_split_point (&SET_SRC (x), insn, true);
5155 : : if (split && split != &SET_SRC (x))
5156 : : return split;
5157 : : }
5158 : : break;
5159 : :
5160 : 724 : case SIGN_EXTEND:
5161 : 724 : inner = XEXP (SET_SRC (x), 0);
5162 : :
5163 : : /* We can't optimize if either mode is a partial integer
5164 : : mode as we don't know how many bits are significant
5165 : : in those modes. */
5166 : 724 : if (!is_int_mode (GET_MODE (inner), &inner_mode)
5167 : 718 : || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
5168 : : break;
5169 : :
5170 : 718 : pos = 0;
5171 : 718 : len = GET_MODE_PRECISION (inner_mode);
5172 : 718 : unsignedp = false;
5173 : 718 : break;
5174 : :
5175 : 13121 : case SIGN_EXTRACT:
5176 : 13121 : case ZERO_EXTRACT:
5177 : 13121 : if (is_a <scalar_int_mode> (GET_MODE (XEXP (SET_SRC (x), 0)),
5178 : : &inner_mode)
5179 : 12833 : && CONST_INT_P (XEXP (SET_SRC (x), 1))
5180 : 12833 : && CONST_INT_P (XEXP (SET_SRC (x), 2)))
5181 : : {
5182 : 12300 : inner = XEXP (SET_SRC (x), 0);
5183 : 12300 : len = INTVAL (XEXP (SET_SRC (x), 1));
5184 : 12300 : pos = INTVAL (XEXP (SET_SRC (x), 2));
5185 : :
5186 : 12300 : if (BITS_BIG_ENDIAN)
5187 : : pos = GET_MODE_PRECISION (inner_mode) - len - pos;
5188 : 12300 : unsignedp = (code == ZERO_EXTRACT);
5189 : : }
5190 : : break;
5191 : :
5192 : : default:
5193 : : break;
5194 : : }
5195 : :
5196 : 469555 : if (len
5197 : 13018 : && known_subrange_p (pos, len,
5198 : 13018 : 0, GET_MODE_PRECISION (GET_MODE (inner)))
5199 : 482573 : && is_a <scalar_int_mode> (GET_MODE (SET_SRC (x)), &mode))
5200 : : {
5201 : : /* For unsigned, we have a choice of a shift followed by an
5202 : : AND or two shifts. Use two shifts for field sizes where the
5203 : : constant might be too large. We assume here that we can
5204 : : always at least get 8-bit constants in an AND insn, which is
5205 : : true for every current RISC. */
5206 : :
5207 : 13018 : if (unsignedp && len <= 8)
5208 : : {
5209 : 6183 : unsigned HOST_WIDE_INT mask
5210 : 6183 : = (HOST_WIDE_INT_1U << len) - 1;
5211 : 6183 : rtx pos_rtx = gen_int_shift_amount (mode, pos);
5212 : 6183 : SUBST (SET_SRC (x),
5213 : : gen_rtx_AND (mode,
5214 : : gen_rtx_LSHIFTRT
5215 : : (mode, gen_lowpart (mode, inner), pos_rtx),
5216 : : gen_int_mode (mask, mode)));
5217 : :
5218 : 6183 : split = find_split_point (&SET_SRC (x), insn, true);
5219 : 6183 : if (split && split != &SET_SRC (x))
5220 : 31008504 : return split;
5221 : : }
5222 : : else
5223 : : {
5224 : 6835 : int left_bits = GET_MODE_PRECISION (mode) - len - pos;
5225 : 6835 : int right_bits = GET_MODE_PRECISION (mode) - len;
5226 : 13670 : SUBST (SET_SRC (x),
5227 : : gen_rtx_fmt_ee
5228 : : (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
5229 : : gen_rtx_ASHIFT (mode,
5230 : : gen_lowpart (mode, inner),
5231 : : gen_int_shift_amount (mode, left_bits)),
5232 : : gen_int_shift_amount (mode, right_bits)));
5233 : :
5234 : 6835 : split = find_split_point (&SET_SRC (x), insn, true);
5235 : 6835 : if (split && split != &SET_SRC (x))
5236 : 31008504 : return split;
5237 : : }
5238 : : }
5239 : :
5240 : : /* See if this is a simple operation with a constant as the second
5241 : : operand. It might be that this constant is out of range and hence
5242 : : could be used as a split point. */
5243 : 456537 : if (BINARY_P (SET_SRC (x))
5244 : 204152 : && CONSTANT_P (XEXP (SET_SRC (x), 1))
5245 : 110898 : && (OBJECT_P (XEXP (SET_SRC (x), 0))
5246 : 34023 : || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
5247 : 11375 : && OBJECT_P (SUBREG_REG (XEXP (SET_SRC (x), 0))))))
5248 : 78825 : return &XEXP (SET_SRC (x), 1);
5249 : :
5250 : : /* Finally, see if this is a simple operation with its first operand
5251 : : not in a register. The operation might require this operand in a
5252 : : register, so return it as a split point. We can always do this
5253 : : because if the first operand were another operation, we would have
5254 : : already found it as a split point. */
5255 : 377712 : if ((BINARY_P (SET_SRC (x)) || UNARY_P (SET_SRC (x)))
5256 : 377712 : && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
5257 : 119586 : return &XEXP (SET_SRC (x), 0);
5258 : :
5259 : : return 0;
5260 : :
5261 : 1201545 : case AND:
5262 : 1201545 : case IOR:
5263 : : /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
5264 : : it is better to write this as (not (ior A B)) so we can split it.
5265 : : Similarly for IOR. */
5266 : 1201545 : if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
5267 : : {
5268 : 1160 : SUBST (*loc,
5269 : : gen_rtx_NOT (GET_MODE (x),
5270 : : gen_rtx_fmt_ee (code == IOR ? AND : IOR,
5271 : : GET_MODE (x),
5272 : : XEXP (XEXP (x, 0), 0),
5273 : : XEXP (XEXP (x, 1), 0))));
5274 : 580 : return find_split_point (loc, insn, set_src);
5275 : : }
5276 : :
5277 : : /* Many RISC machines have a large set of logical insns. If the
5278 : : second operand is a NOT, put it first so we will try to split the
5279 : : other operand first. */
5280 : 1200965 : if (GET_CODE (XEXP (x, 1)) == NOT)
5281 : : {
5282 : 5102 : rtx tem = XEXP (x, 0);
5283 : 5102 : SUBST (XEXP (x, 0), XEXP (x, 1));
5284 : 5102 : SUBST (XEXP (x, 1), tem);
5285 : : }
5286 : : /* Many targets have a `(and (not X) Y)` and/or `(ior (not X) Y)` instructions.
5287 : : Split at that insns. However if this is
5288 : : the SET_SRC, we likely do not have such an instruction and it's
5289 : : worthless to try this split. */
5290 : 1200965 : if (!set_src && GET_CODE (XEXP (x, 0)) == NOT)
5291 : : return loc;
5292 : : break;
5293 : :
5294 : 3116517 : case PLUS:
5295 : 3116517 : case MINUS:
5296 : : /* Canonicalization can produce (minus A (mult B C)), where C is a
5297 : : constant. It may be better to try splitting (plus (mult B -C) A)
5298 : : instead if this isn't a multiply by a power of two. */
5299 : 190220 : if (set_src && code == MINUS && GET_CODE (XEXP (x, 1)) == MULT
5300 : 20927 : && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
5301 : 3121944 : && !pow2p_hwi (INTVAL (XEXP (XEXP (x, 1), 1))))
5302 : : {
5303 : 5427 : machine_mode mode = GET_MODE (x);
5304 : 5427 : unsigned HOST_WIDE_INT this_int = INTVAL (XEXP (XEXP (x, 1), 1));
5305 : 5427 : HOST_WIDE_INT other_int = trunc_int_for_mode (-this_int, mode);
5306 : 5427 : SUBST (*loc, gen_rtx_PLUS (mode,
5307 : : gen_rtx_MULT (mode,
5308 : : XEXP (XEXP (x, 1), 0),
5309 : : gen_int_mode (other_int,
5310 : : mode)),
5311 : : XEXP (x, 0)));
5312 : 5427 : return find_split_point (loc, insn, set_src);
5313 : : }
5314 : :
5315 : : /* Split at a multiply-accumulate instruction. However if this is
5316 : : the SET_SRC, we likely do not have such an instruction and it's
5317 : : worthless to try this split. */
5318 : 3111090 : if (!set_src
5319 : 1855801 : && (GET_CODE (XEXP (x, 0)) == MULT
5320 : 1746338 : || (GET_CODE (XEXP (x, 0)) == ASHIFT
5321 : 123678 : && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
5322 : : return loc;
5323 : :
5324 : : default:
5325 : : break;
5326 : : }
5327 : :
5328 : : /* Otherwise, select our actions depending on our rtx class. */
5329 : 24949074 : switch (GET_RTX_CLASS (code))
5330 : : {
5331 : 1396717 : case RTX_BITFIELD_OPS: /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
5332 : 1396717 : case RTX_TERNARY:
5333 : 1396717 : split = find_split_point (&XEXP (x, 2), insn, false);
5334 : 1396717 : if (split)
5335 : : return split;
5336 : : /* fall through */
5337 : 9866173 : case RTX_BIN_ARITH:
5338 : 9866173 : case RTX_COMM_ARITH:
5339 : 9866173 : case RTX_COMPARE:
5340 : 9866173 : case RTX_COMM_COMPARE:
5341 : 9866173 : split = find_split_point (&XEXP (x, 1), insn, false);
5342 : 9866173 : if (split)
5343 : : return split;
5344 : : /* fall through */
5345 : 9407144 : case RTX_UNARY:
5346 : : /* Some machines have (and (shift ...) ...) insns. If X is not
5347 : : an AND, but XEXP (X, 0) is, use it as our split point. */
5348 : 9407144 : if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
5349 : 374479 : return &XEXP (x, 0);
5350 : :
5351 : 9032665 : split = find_split_point (&XEXP (x, 0), insn, false);
5352 : 9032665 : if (split)
5353 : : return split;
5354 : : return loc;
5355 : :
5356 : : default:
5357 : : /* Otherwise, we don't have a split point. */
5358 : : return 0;
5359 : : }
5360 : : }
5361 : :
5362 : : /* Throughout X, replace FROM with TO, and return the result.
5363 : : The result is TO if X is FROM;
5364 : : otherwise the result is X, but its contents may have been modified.
5365 : : If they were modified, a record was made in undobuf so that
5366 : : undo_all will (among other things) return X to its original state.
5367 : :
5368 : : If the number of changes necessary is too much to record to undo,
5369 : : the excess changes are not made, so the result is invalid.
5370 : : The changes already made can still be undone.
5371 : : undobuf.num_undo is incremented for such changes, so by testing that
5372 : : the caller can tell whether the result is valid.
5373 : :
5374 : : `n_occurrences' is incremented each time FROM is replaced.
5375 : :
5376 : : IN_DEST is true if we are processing the SET_DEST of a SET.
5377 : :
5378 : : IN_COND is true if we are at the top level of a condition.
5379 : :
5380 : : UNIQUE_COPY is true if each substitution must be unique. We do this
5381 : : by copying if `n_occurrences' is nonzero. */
5382 : :
5383 : : static rtx
5384 : 407312400 : subst (rtx x, rtx from, rtx to, bool in_dest, bool in_cond, bool unique_copy)
5385 : : {
5386 : 407312400 : enum rtx_code code = GET_CODE (x);
5387 : 407312400 : machine_mode op0_mode = VOIDmode;
5388 : 407312400 : const char *fmt;
5389 : 407312400 : int len, i;
5390 : 407312400 : rtx new_rtx;
5391 : :
5392 : : /* Two expressions are equal if they are identical copies of a shared
5393 : : RTX or if they are both registers with the same register number
5394 : : and mode. */
5395 : :
5396 : : #define COMBINE_RTX_EQUAL_P(X,Y) \
5397 : : ((X) == (Y) \
5398 : : || (REG_P (X) && REG_P (Y) \
5399 : : && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
5400 : :
5401 : : /* Do not substitute into clobbers of regs -- this will never result in
5402 : : valid RTL. */
5403 : 407312400 : if (GET_CODE (x) == CLOBBER && REG_P (XEXP (x, 0)))
5404 : : return x;
5405 : :
5406 : 397225835 : if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
5407 : : {
5408 : 0 : n_occurrences++;
5409 : 0 : return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
5410 : : }
5411 : :
5412 : : /* If X and FROM are the same register but different modes, they
5413 : : will not have been seen as equal above. However, the log links code
5414 : : will make a LOG_LINKS entry for that case. If we do nothing, we
5415 : : will try to rerecognize our original insn and, when it succeeds,
5416 : : we will delete the feeding insn, which is incorrect.
5417 : :
5418 : : So force this insn not to match in this (rare) case. */
5419 : 88132275 : if (! in_dest && code == REG && REG_P (from)
5420 : 429109296 : && reg_overlap_mentioned_p (x, from))
5421 : 4211 : return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
5422 : :
5423 : : /* If this is an object, we are done unless it is a MEM or LO_SUM, both
5424 : : of which may contain things that can be combined. */
5425 : 397221624 : if (code != MEM && code != LO_SUM && OBJECT_P (x))
5426 : : return x;
5427 : :
5428 : : /* It is possible to have a subexpression appear twice in the insn.
5429 : : Suppose that FROM is a register that appears within TO.
5430 : : Then, after that subexpression has been scanned once by `subst',
5431 : : the second time it is scanned, TO may be found. If we were
5432 : : to scan TO here, we would find FROM within it and create a
5433 : : self-referent rtl structure which is completely wrong. */
5434 : 212418220 : if (COMBINE_RTX_EQUAL_P (x, to))
5435 : : return to;
5436 : :
5437 : : /* Parallel asm_operands need special attention because all of the
5438 : : inputs are shared across the arms. Furthermore, unsharing the
5439 : : rtl results in recognition failures. Failure to handle this case
5440 : : specially can result in circular rtl.
5441 : :
5442 : : Solve this by doing a normal pass across the first entry of the
5443 : : parallel, and only processing the SET_DESTs of the subsequent
5444 : : entries. Ug. */
5445 : :
5446 : 212276923 : if (code == PARALLEL
5447 : 12470685 : && GET_CODE (XVECEXP (x, 0, 0)) == SET
5448 : 10674267 : && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
5449 : : {
5450 : 19776 : new_rtx = subst (XVECEXP (x, 0, 0), from, to, false, false, unique_copy);
5451 : :
5452 : : /* If this substitution failed, this whole thing fails. */
5453 : 19776 : if (GET_CODE (new_rtx) == CLOBBER
5454 : 0 : && XEXP (new_rtx, 0) == const0_rtx)
5455 : : return new_rtx;
5456 : :
5457 : 19776 : SUBST (XVECEXP (x, 0, 0), new_rtx);
5458 : :
5459 : 98596 : for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
5460 : : {
5461 : 78820 : rtx dest = SET_DEST (XVECEXP (x, 0, i));
5462 : :
5463 : 78820 : if (!REG_P (dest) && GET_CODE (dest) != PC)
5464 : : {
5465 : 2248 : new_rtx = subst (dest, from, to, false, false, unique_copy);
5466 : :
5467 : : /* If this substitution failed, this whole thing fails. */
5468 : 2248 : if (GET_CODE (new_rtx) == CLOBBER
5469 : 0 : && XEXP (new_rtx, 0) == const0_rtx)
5470 : : return new_rtx;
5471 : :
5472 : 2248 : SUBST (SET_DEST (XVECEXP (x, 0, i)), new_rtx);
5473 : : }
5474 : : }
5475 : : }
5476 : : else
5477 : : {
5478 : 212257147 : len = GET_RTX_LENGTH (code);
5479 : 212257147 : fmt = GET_RTX_FORMAT (code);
5480 : :
5481 : : /* We don't need to process a SET_DEST that is a register or PC, so
5482 : : set up to skip this common case. All other cases where we want
5483 : : to suppress replacing something inside a SET_SRC are handled via
5484 : : the IN_DEST operand. */
5485 : 212257147 : if (code == SET
5486 : 46980599 : && (REG_P (SET_DEST (x))
5487 : 46980599 : || GET_CODE (SET_DEST (x)) == PC))
5488 : 212257147 : fmt = "ie";
5489 : :
5490 : : /* Trying to simplify the operands of a widening MULT is not likely
5491 : : to create RTL matching a machine insn. */
5492 : 212257147 : if (code == MULT
5493 : 4705489 : && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
5494 : 4705489 : || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
5495 : 278567 : && (GET_CODE (XEXP (x, 1)) == ZERO_EXTEND
5496 : 278567 : || GET_CODE (XEXP (x, 1)) == SIGN_EXTEND)
5497 : 208935 : && REG_P (XEXP (XEXP (x, 0), 0))
5498 : 92950 : && REG_P (XEXP (XEXP (x, 1), 0))
5499 : 74569 : && from == to)
5500 : : return x;
5501 : :
5502 : :
5503 : : /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
5504 : : constant. */
5505 : 212214995 : if (fmt[0] == 'e')
5506 : 156345664 : op0_mode = GET_MODE (XEXP (x, 0));
5507 : :
5508 : 629479876 : for (i = 0; i < len; i++)
5509 : : {
5510 : 418095312 : if (fmt[i] == 'E')
5511 : : {
5512 : 14812887 : int j;
5513 : 46808584 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5514 : : {
5515 : 32095967 : if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
5516 : : {
5517 : 1592 : new_rtx = (unique_copy && n_occurrences
5518 : 294138 : ? copy_rtx (to) : to);
5519 : 294116 : n_occurrences++;
5520 : : }
5521 : : else
5522 : : {
5523 : 31801851 : new_rtx = subst (XVECEXP (x, i, j), from, to,
5524 : : false, false, unique_copy);
5525 : :
5526 : : /* If this substitution failed, this whole thing
5527 : : fails. */
5528 : 31801851 : if (GET_CODE (new_rtx) == CLOBBER
5529 : 10501461 : && XEXP (new_rtx, 0) == const0_rtx)
5530 : : return new_rtx;
5531 : : }
5532 : :
5533 : 31995697 : SUBST (XVECEXP (x, i, j), new_rtx);
5534 : : }
5535 : : }
5536 : 403282425 : else if (fmt[i] == 'e')
5537 : : {
5538 : : /* If this is a register being set, ignore it. */
5539 : 329463904 : new_rtx = XEXP (x, i);
5540 : 329463904 : if (in_dest
5541 : 329463904 : && i == 0
5542 : 5959213 : && (((code == SUBREG || code == ZERO_EXTRACT)
5543 : 354012 : && REG_P (new_rtx))
5544 : 5607547 : || code == STRICT_LOW_PART))
5545 : : ;
5546 : :
5547 : 329102162 : else if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
5548 : : {
5549 : : /* In general, don't install a subreg involving two
5550 : : modes not tieable. It can worsen register
5551 : : allocation, and can even make invalid reload
5552 : : insns, since the reg inside may need to be copied
5553 : : from in the outside mode, and that may be invalid
5554 : : if it is an fp reg copied in integer mode.
5555 : :
5556 : : We allow an exception to this: It is valid if
5557 : : it is inside another SUBREG and the mode of that
5558 : : SUBREG and the mode of the inside of TO is
5559 : : tieable. */
5560 : :
5561 : 47258891 : if (GET_CODE (to) == SUBREG
5562 : 533573 : && !targetm.modes_tieable_p (GET_MODE (to),
5563 : 533573 : GET_MODE (SUBREG_REG (to)))
5564 : 47542038 : && ! (code == SUBREG
5565 : 22771 : && (targetm.modes_tieable_p
5566 : 22771 : (GET_MODE (x), GET_MODE (SUBREG_REG (to))))))
5567 : 258096 : return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
5568 : :
5569 : 47000795 : if (code == SUBREG
5570 : 2421181 : && REG_P (to)
5571 : 98602 : && REGNO (to) < FIRST_PSEUDO_REGISTER
5572 : 47000800 : && simplify_subreg_regno (REGNO (to), GET_MODE (to),
5573 : 5 : SUBREG_BYTE (x),
5574 : 5 : GET_MODE (x)) < 0)
5575 : 0 : return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
5576 : :
5577 : 47000795 : new_rtx = (unique_copy && n_occurrences ? copy_rtx (to) : to);
5578 : 47000795 : n_occurrences++;
5579 : : }
5580 : : else
5581 : : /* If we are in a SET_DEST, suppress most cases unless we
5582 : : have gone inside a MEM, in which case we want to
5583 : : simplify the address. We assume here that things that
5584 : : are actually part of the destination have their inner
5585 : : parts in the first expression. This is true for SUBREG,
5586 : : STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
5587 : : things aside from REG and MEM that should appear in a
5588 : : SET_DEST. */
5589 : 281843271 : new_rtx = subst (XEXP (x, i), from, to,
5590 : : (((in_dest
5591 : 5321450 : && (code == SUBREG || code == STRICT_LOW_PART
5592 : 5321450 : || code == ZERO_EXTRACT))
5593 : 281835718 : || code == SET)
5594 : 48523977 : && i == 0),
5595 : 281843271 : code == IF_THEN_ELSE && i == 0,
5596 : : unique_copy);
5597 : :
5598 : : /* If we found that we will have to reject this combination,
5599 : : indicate that by returning the CLOBBER ourselves, rather than
5600 : : an expression containing it. This will speed things up as
5601 : : well as prevent accidents where two CLOBBERs are considered
5602 : : to be equal, thus producing an incorrect simplification. */
5603 : :
5604 : 329205808 : if (GET_CODE (new_rtx) == CLOBBER && XEXP (new_rtx, 0) == const0_rtx)
5605 : : return new_rtx;
5606 : :
5607 : 328733993 : if (GET_CODE (x) == SUBREG && CONST_SCALAR_INT_P (new_rtx))
5608 : : {
5609 : 31197 : machine_mode mode = GET_MODE (x);
5610 : :
5611 : 62394 : x = simplify_subreg (GET_MODE (x), new_rtx,
5612 : 31197 : GET_MODE (SUBREG_REG (x)),
5613 : 31197 : SUBREG_BYTE (x));
5614 : 31197 : if (! x)
5615 : 2 : x = gen_rtx_CLOBBER (mode, const0_rtx);
5616 : : }
5617 : 328702796 : else if (CONST_SCALAR_INT_P (new_rtx)
5618 : : && (GET_CODE (x) == ZERO_EXTEND
5619 : 59684519 : || GET_CODE (x) == SIGN_EXTEND
5620 : : || GET_CODE (x) == FLOAT
5621 : : || GET_CODE (x) == UNSIGNED_FLOAT))
5622 : : {
5623 : 143100 : x = simplify_unary_operation (GET_CODE (x), GET_MODE (x),
5624 : : new_rtx,
5625 : 71550 : GET_MODE (XEXP (x, 0)));
5626 : 71550 : if (!x)
5627 : 250 : return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
5628 : : }
5629 : : /* CONST_INTs shouldn't be substituted into PRE_DEC, PRE_MODIFY
5630 : : etc. arguments, otherwise we can ICE before trying to recog
5631 : : it. See PR104446. */
5632 : 328631246 : else if (CONST_SCALAR_INT_P (new_rtx)
5633 : 59612969 : && GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
5634 : 0 : return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
5635 : : else
5636 : 328631246 : SUBST (XEXP (x, i), new_rtx);
5637 : : }
5638 : : }
5639 : : }
5640 : :
5641 : : /* Check if we are loading something from the constant pool via float
5642 : : extension; in this case we would undo compress_float_constant
5643 : : optimization and degenerate constant load to an immediate value. */
5644 : 211404340 : if (GET_CODE (x) == FLOAT_EXTEND
5645 : 316190 : && MEM_P (XEXP (x, 0))
5646 : 211467555 : && MEM_READONLY_P (XEXP (x, 0)))
5647 : : {
5648 : 35770 : rtx tmp = avoid_constant_pool_reference (x);
5649 : 35770 : if (x != tmp)
5650 : : return x;
5651 : : }
5652 : :
5653 : : /* Try to simplify X. If the simplification changed the code, it is likely
5654 : : that further simplification will help, so loop, but limit the number
5655 : : of repetitions that will be performed. */
5656 : :
5657 : 219220169 : for (i = 0; i < 4; i++)
5658 : : {
5659 : : /* If X is sufficiently simple, don't bother trying to do anything
5660 : : with it. */
5661 : 219210558 : if (code != CONST_INT && code != REG && code != CLOBBER)
5662 : 218554039 : x = combine_simplify_rtx (x, op0_mode, in_dest, in_cond);
5663 : :
5664 : 219210558 : if (GET_CODE (x) == code)
5665 : : break;
5666 : :
5667 : 7851469 : code = GET_CODE (x);
5668 : :
5669 : : /* We no longer know the original mode of operand 0 since we
5670 : : have changed the form of X) */
5671 : 7851469 : op0_mode = VOIDmode;
5672 : : }
5673 : :
5674 : : return x;
5675 : : }
5676 : :
5677 : : /* If X is a commutative operation whose operands are not in the canonical
5678 : : order, use substitutions to swap them. */
5679 : :
5680 : : static void
5681 : 690968164 : maybe_swap_commutative_operands (rtx x)
5682 : : {
5683 : 690968164 : if (COMMUTATIVE_ARITH_P (x)
5684 : 690968164 : && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
5685 : : {
5686 : 3606518 : rtx temp = XEXP (x, 0);
5687 : 3606518 : SUBST (XEXP (x, 0), XEXP (x, 1));
5688 : 3606518 : SUBST (XEXP (x, 1), temp);
5689 : : }
5690 : :
5691 : : /* Canonicalize (vec_merge (fma op2 op1 op3) op1 mask) to
5692 : : (vec_merge (fma op1 op2 op3) op1 mask). */
5693 : 690968164 : if (GET_CODE (x) == VEC_MERGE
5694 : 803559 : && GET_CODE (XEXP (x, 0)) == FMA)
5695 : : {
5696 : 25208 : rtx fma_op1 = XEXP (XEXP (x, 0), 0);
5697 : 25208 : rtx fma_op2 = XEXP (XEXP (x, 0), 1);
5698 : 25208 : rtx masked_op = XEXP (x, 1);
5699 : 25208 : if (rtx_equal_p (masked_op, fma_op2))
5700 : : {
5701 : 218 : if (GET_CODE (fma_op1) == NEG)
5702 : : {
5703 : : /* Keep the negate canonicalized to the first operand. */
5704 : 150 : fma_op1 = XEXP (fma_op1, 0);
5705 : 150 : SUBST (XEXP (XEXP (XEXP (x, 0), 0), 0), fma_op2);
5706 : 150 : SUBST (XEXP (XEXP (x, 0), 1), fma_op1);
5707 : : }
5708 : : else
5709 : : {
5710 : 68 : SUBST (XEXP (XEXP (x, 0), 0), fma_op2);
5711 : 68 : SUBST (XEXP (XEXP (x, 0), 1), fma_op1);
5712 : : }
5713 : : }
5714 : : }
5715 : :
5716 : 690968164 : unsigned n_elts = 0;
5717 : 690968164 : if (GET_CODE (x) == VEC_MERGE
5718 : 803559 : && CONST_INT_P (XEXP (x, 2))
5719 : 855550 : && GET_MODE_NUNITS (GET_MODE (x)).is_constant (&n_elts)
5720 : 691395939 : && (swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1))
5721 : : /* Two operands have same precedence, then
5722 : : first bit of mask select first operand. */
5723 : 393210 : || (!swap_commutative_operands_p (XEXP (x, 1), XEXP (x, 0))
5724 : 101061 : && !(UINTVAL (XEXP (x, 2)) & 1))))
5725 : : {
5726 : 49788 : rtx temp = XEXP (x, 0);
5727 : 49788 : unsigned HOST_WIDE_INT sel = UINTVAL (XEXP (x, 2));
5728 : 49788 : unsigned HOST_WIDE_INT mask = HOST_WIDE_INT_1U;
5729 : 49788 : if (n_elts == HOST_BITS_PER_WIDE_INT)
5730 : : mask = -1;
5731 : : else
5732 : 49649 : mask = (HOST_WIDE_INT_1U << n_elts) - 1;
5733 : 49788 : SUBST (XEXP (x, 0), XEXP (x, 1));
5734 : 49788 : SUBST (XEXP (x, 1), temp);
5735 : 49788 : SUBST (XEXP (x, 2), GEN_INT (~sel & mask));
5736 : : }
5737 : 690968164 : }
5738 : :
5739 : : /* Simplify X, a piece of RTL. We just operate on the expression at the
5740 : : outer level; call `subst' to simplify recursively. Return the new
5741 : : expression.
5742 : :
5743 : : OP0_MODE is the original mode of XEXP (x, 0). IN_DEST is true
5744 : : if we are inside a SET_DEST. IN_COND is true if we are at the top level
5745 : : of a condition. */
5746 : :
5747 : : static rtx
5748 : 218852710 : combine_simplify_rtx (rtx x, machine_mode op0_mode, bool in_dest, bool in_cond)
5749 : : {
5750 : 218852710 : enum rtx_code code = GET_CODE (x);
5751 : 218852710 : machine_mode mode = GET_MODE (x);
5752 : 218852710 : scalar_int_mode int_mode;
5753 : 218852710 : rtx temp;
5754 : 218852710 : int i;
5755 : :
5756 : : /* If this is a commutative operation, put a constant last and a complex
5757 : : expression first. We don't need to do this for comparisons here. */
5758 : 218852710 : maybe_swap_commutative_operands (x);
5759 : :
5760 : : /* Try to fold this expression in case we have constants that weren't
5761 : : present before. */
5762 : 218852710 : temp = 0;
5763 : 218852710 : switch (GET_RTX_CLASS (code))
5764 : : {
5765 : 6881530 : case RTX_UNARY:
5766 : 6881530 : if (op0_mode == VOIDmode)
5767 : 147962 : op0_mode = GET_MODE (XEXP (x, 0));
5768 : 6881530 : temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
5769 : 6881530 : break;
5770 : 18016002 : case RTX_COMPARE:
5771 : 18016002 : case RTX_COMM_COMPARE:
5772 : 18016002 : {
5773 : 18016002 : machine_mode cmp_mode = GET_MODE (XEXP (x, 0));
5774 : 18016002 : if (cmp_mode == VOIDmode)
5775 : : {
5776 : 53659 : cmp_mode = GET_MODE (XEXP (x, 1));
5777 : 53659 : if (cmp_mode == VOIDmode)
5778 : 7933 : cmp_mode = op0_mode;
5779 : : }
5780 : 18016002 : temp = simplify_relational_operation (code, mode, cmp_mode,
5781 : : XEXP (x, 0), XEXP (x, 1));
5782 : : }
5783 : 18016002 : break;
5784 : 86432712 : case RTX_COMM_ARITH:
5785 : 86432712 : case RTX_BIN_ARITH:
5786 : 86432712 : temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
5787 : 86432712 : break;
5788 : 14297240 : case RTX_BITFIELD_OPS:
5789 : 14297240 : case RTX_TERNARY:
5790 : 14297240 : temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
5791 : : XEXP (x, 1), XEXP (x, 2));
5792 : 14297240 : break;
5793 : : default:
5794 : : break;
5795 : : }
5796 : :
5797 : 125627484 : if (temp)
5798 : : {
5799 : 16737910 : x = temp;
5800 : 16737910 : code = GET_CODE (temp);
5801 : 16737910 : op0_mode = VOIDmode;
5802 : 16737910 : mode = GET_MODE (temp);
5803 : : }
5804 : :
5805 : : /* If this is a simple operation applied to an IF_THEN_ELSE, try
5806 : : applying it to the arms of the IF_THEN_ELSE. This often simplifies
5807 : : things. Check for cases where both arms are testing the same
5808 : : condition.
5809 : :
5810 : : Don't do anything if all operands are very simple. */
5811 : :
5812 : 218852710 : if ((BINARY_P (x)
5813 : 104225732 : && ((!OBJECT_P (XEXP (x, 0))
5814 : 40418046 : && ! (GET_CODE (XEXP (x, 0)) == SUBREG
5815 : 4613323 : && OBJECT_P (SUBREG_REG (XEXP (x, 0)))))
5816 : 66419718 : || (!OBJECT_P (XEXP (x, 1))
5817 : 4522543 : && ! (GET_CODE (XEXP (x, 1)) == SUBREG
5818 : 1541772 : && OBJECT_P (SUBREG_REG (XEXP (x, 1)))))))
5819 : 177804559 : || (UNARY_P (x)
5820 : 6737375 : && (!OBJECT_P (XEXP (x, 0))
5821 : 2859953 : && ! (GET_CODE (XEXP (x, 0)) == SUBREG
5822 : 604511 : && OBJECT_P (SUBREG_REG (XEXP (x, 0)))))))
5823 : : {
5824 : 43372770 : rtx cond, true_rtx, false_rtx;
5825 : :
5826 : 43372770 : cond = if_then_else_cond (x, &true_rtx, &false_rtx);
5827 : 43372770 : if (cond != 0
5828 : : /* If everything is a comparison, what we have is highly unlikely
5829 : : to be simpler, so don't use it. */
5830 : 4182338 : && ! (COMPARISON_P (x)
5831 : 1279776 : && (COMPARISON_P (true_rtx) || COMPARISON_P (false_rtx)))
5832 : : /* Similarly, if we end up with one of the expressions the same
5833 : : as the original, it is certainly not simpler. */
5834 : 3982360 : && ! rtx_equal_p (x, true_rtx)
5835 : 47355130 : && ! rtx_equal_p (x, false_rtx))
5836 : : {
5837 : 3982360 : rtx cop1 = const0_rtx;
5838 : 3982360 : enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
5839 : :
5840 : 3982360 : if (cond_code == NE && COMPARISON_P (cond))
5841 : 638729 : return x;
5842 : :
5843 : : /* Simplify the alternative arms; this may collapse the true and
5844 : : false arms to store-flag values. Be careful to use copy_rtx
5845 : : here since true_rtx or false_rtx might share RTL with x as a
5846 : : result of the if_then_else_cond call above. */
5847 : 3343631 : true_rtx = subst (copy_rtx (true_rtx), pc_rtx, pc_rtx,
5848 : : false, false, false);
5849 : 3343631 : false_rtx = subst (copy_rtx (false_rtx), pc_rtx, pc_rtx,
5850 : : false, false, false);
5851 : :
5852 : : /* If true_rtx and false_rtx are not general_operands, an if_then_else
5853 : : is unlikely to be simpler. */
5854 : 3343631 : if (general_operand (true_rtx, VOIDmode)
5855 : 3343631 : && general_operand (false_rtx, VOIDmode))
5856 : : {
5857 : 1400815 : enum rtx_code reversed;
5858 : :
5859 : : /* Restarting if we generate a store-flag expression will cause
5860 : : us to loop. Just drop through in this case. */
5861 : :
5862 : : /* If the result values are STORE_FLAG_VALUE and zero, we can
5863 : : just make the comparison operation. */
5864 : 1400815 : if (true_rtx == const_true_rtx && false_rtx == const0_rtx)
5865 : 511369 : x = simplify_gen_relational (cond_code, mode, VOIDmode,
5866 : : cond, cop1);
5867 : 656640 : else if (true_rtx == const0_rtx && false_rtx == const_true_rtx
5868 : 889446 : && ((reversed = reversed_comparison_code_parts
5869 : 576792 : (cond_code, cond, cop1, NULL))
5870 : : != UNKNOWN))
5871 : 576792 : x = simplify_gen_relational (reversed, mode, VOIDmode,
5872 : : cond, cop1);
5873 : :
5874 : : /* Likewise, we can make the negate of a comparison operation
5875 : : if the result values are - STORE_FLAG_VALUE and zero. */
5876 : 312654 : else if (CONST_INT_P (true_rtx)
5877 : 224076 : && INTVAL (true_rtx) == - STORE_FLAG_VALUE
5878 : 40917 : && false_rtx == const0_rtx)
5879 : 38885 : x = simplify_gen_unary (NEG, mode,
5880 : : simplify_gen_relational (cond_code,
5881 : : mode, VOIDmode,
5882 : : cond, cop1),
5883 : : mode);
5884 : 273769 : else if (CONST_INT_P (false_rtx)
5885 : 204324 : && INTVAL (false_rtx) == - STORE_FLAG_VALUE
5886 : 22661 : && true_rtx == const0_rtx
5887 : 273769 : && ((reversed = reversed_comparison_code_parts
5888 : 19903 : (cond_code, cond, cop1, NULL))
5889 : : != UNKNOWN))
5890 : 19900 : x = simplify_gen_unary (NEG, mode,
5891 : : simplify_gen_relational (reversed,
5892 : : mode, VOIDmode,
5893 : : cond, cop1),
5894 : : mode);
5895 : :
5896 : 1400815 : code = GET_CODE (x);
5897 : 1400815 : op0_mode = VOIDmode;
5898 : : }
5899 : : }
5900 : : }
5901 : :
5902 : : /* First see if we can apply the inverse distributive law. */
5903 : 218213981 : if (code == PLUS || code == MINUS
5904 : 218213981 : || code == AND || code == IOR || code == XOR)
5905 : : {
5906 : 49036194 : x = apply_distributive_law (x);
5907 : 49036194 : code = GET_CODE (x);
5908 : 49036194 : op0_mode = VOIDmode;
5909 : : }
5910 : :
5911 : : /* If CODE is an associative operation not otherwise handled, see if we
5912 : : can associate some operands. This can win if they are constants or
5913 : : if they are logically related (i.e. (a & b) & a). */
5914 : 218213981 : if ((code == PLUS || code == MINUS || code == MULT || code == DIV
5915 : : || code == AND || code == IOR || code == XOR
5916 : : || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
5917 : 52977125 : && ((INTEGRAL_MODE_P (mode) && code != DIV)
5918 : 4664829 : || (flag_associative_math && FLOAT_MODE_P (mode))))
5919 : : {
5920 : 48949487 : if (GET_CODE (XEXP (x, 0)) == code)
5921 : : {
5922 : 4276934 : rtx other = XEXP (XEXP (x, 0), 0);
5923 : 4276934 : rtx inner_op0 = XEXP (XEXP (x, 0), 1);
5924 : 4276934 : rtx inner_op1 = XEXP (x, 1);
5925 : 4276934 : rtx inner;
5926 : :
5927 : : /* Make sure we pass the constant operand if any as the second
5928 : : one if this is a commutative operation. */
5929 : 4276934 : if (CONSTANT_P (inner_op0) && COMMUTATIVE_ARITH_P (x))
5930 : : std::swap (inner_op0, inner_op1);
5931 : 4276934 : inner = simplify_binary_operation (code == MINUS ? PLUS
5932 : 4183993 : : code == DIV ? MULT
5933 : : : code,
5934 : : mode, inner_op0, inner_op1);
5935 : :
5936 : : /* For commutative operations, try the other pair if that one
5937 : : didn't simplify. */
5938 : 4276934 : if (inner == 0 && COMMUTATIVE_ARITH_P (x))
5939 : : {
5940 : 4151931 : other = XEXP (XEXP (x, 0), 1);
5941 : 4151931 : inner = simplify_binary_operation (code, mode,
5942 : : XEXP (XEXP (x, 0), 0),
5943 : : XEXP (x, 1));
5944 : : }
5945 : :
5946 : 4241747 : if (inner)
5947 : 235705 : return simplify_gen_binary (code, mode, other, inner);
5948 : : }
5949 : : }
5950 : :
5951 : : /* A little bit of algebraic simplification here. */
5952 : 217978276 : switch (code)
5953 : : {
5954 : 22499312 : case MEM:
5955 : : /* Ensure that our address has any ASHIFTs converted to MULT in case
5956 : : address-recognizing predicates are called later. */
5957 : 22499312 : temp = make_compound_operation (XEXP (x, 0), MEM);
5958 : 22499312 : SUBST (XEXP (x, 0), temp);
5959 : 22499312 : break;
5960 : :
5961 : 8045909 : case SUBREG:
5962 : 8045909 : if (op0_mode == VOIDmode)
5963 : 141441 : op0_mode = GET_MODE (SUBREG_REG (x));
5964 : :
5965 : : /* See if this can be moved to simplify_subreg. */
5966 : 8045909 : if (CONSTANT_P (SUBREG_REG (x))
5967 : 19423 : && known_eq (subreg_lowpart_offset (mode, op0_mode), SUBREG_BYTE (x))
5968 : : /* Don't call gen_lowpart if the inner mode
5969 : : is VOIDmode and we cannot simplify it, as SUBREG without
5970 : : inner mode is invalid. */
5971 : 8065332 : && (GET_MODE (SUBREG_REG (x)) != VOIDmode
5972 : 0 : || gen_lowpart_common (mode, SUBREG_REG (x))))
5973 : 19423 : return gen_lowpart (mode, SUBREG_REG (x));
5974 : :
5975 : 8026486 : if (GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_CC)
5976 : : break;
5977 : 8026486 : {
5978 : 8026486 : rtx temp;
5979 : 16052972 : temp = simplify_subreg (mode, SUBREG_REG (x), op0_mode,
5980 : 8026486 : SUBREG_BYTE (x));
5981 : 8026486 : if (temp)
5982 : 218852710 : return temp;
5983 : :
5984 : : /* If op is known to have all lower bits zero, the result is zero. */
5985 : 7434351 : scalar_int_mode int_mode, int_op0_mode;
5986 : 7434351 : if (!in_dest
5987 : 3999028 : && is_a <scalar_int_mode> (mode, &int_mode)
5988 : 3919472 : && is_a <scalar_int_mode> (op0_mode, &int_op0_mode)
5989 : 3919472 : && (GET_MODE_PRECISION (int_mode)
5990 : 3919472 : < GET_MODE_PRECISION (int_op0_mode))
5991 : 3378076 : && known_eq (subreg_lowpart_offset (int_mode, int_op0_mode),
5992 : : SUBREG_BYTE (x))
5993 : 2905823 : && HWI_COMPUTABLE_MODE_P (int_op0_mode)
5994 : 2690848 : && ((nonzero_bits (SUBREG_REG (x), int_op0_mode)
5995 : 2690848 : & GET_MODE_MASK (int_mode)) == 0)
5996 : 7435151 : && !side_effects_p (SUBREG_REG (x)))
5997 : 800 : return CONST0_RTX (int_mode);
5998 : : }
5999 : :
6000 : : /* Don't change the mode of the MEM if that would change the meaning
6001 : : of the address. */
6002 : 7433551 : if (MEM_P (SUBREG_REG (x))
6003 : 7433551 : && (MEM_VOLATILE_P (SUBREG_REG (x))
6004 : 90072 : || mode_dependent_address_p (XEXP (SUBREG_REG (x), 0),
6005 : 90139 : MEM_ADDR_SPACE (SUBREG_REG (x)))))
6006 : 44056 : return gen_rtx_CLOBBER (mode, const0_rtx);
6007 : :
6008 : : /* Note that we cannot do any narrowing for non-constants since
6009 : : we might have been counting on using the fact that some bits were
6010 : : zero. We now do this in the SET. */
6011 : :
6012 : : break;
6013 : :
6014 : 368609 : case NEG:
6015 : 368609 : temp = expand_compound_operation (XEXP (x, 0));
6016 : :
6017 : : /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
6018 : : replaced by (lshiftrt X C). This will convert
6019 : : (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
6020 : :
6021 : 368609 : if (GET_CODE (temp) == ASHIFTRT
6022 : 14154 : && CONST_INT_P (XEXP (temp, 1))
6023 : 396855 : && INTVAL (XEXP (temp, 1)) == GET_MODE_UNIT_PRECISION (mode) - 1)
6024 : 0 : return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (temp, 0),
6025 : 0 : INTVAL (XEXP (temp, 1)));
6026 : :
6027 : : /* If X has only a single bit that might be nonzero, say, bit I, convert
6028 : : (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
6029 : : MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
6030 : : (sign_extract X 1 Y). But only do this if TEMP isn't a register
6031 : : or a SUBREG of one since we'd be making the expression more
6032 : : complex if it was just a register. */
6033 : :
6034 : 368609 : if (!REG_P (temp)
6035 : 178770 : && ! (GET_CODE (temp) == SUBREG
6036 : 20040 : && REG_P (SUBREG_REG (temp)))
6037 : 218989511 : && is_a <scalar_int_mode> (mode, &int_mode)
6038 : 505410 : && (i = exact_log2 (nonzero_bits (temp, int_mode))) >= 0)
6039 : : {
6040 : 59667 : rtx temp1 = simplify_shift_const
6041 : 59667 : (NULL_RTX, ASHIFTRT, int_mode,
6042 : : simplify_shift_const (NULL_RTX, ASHIFT, int_mode, temp,
6043 : 59667 : GET_MODE_PRECISION (int_mode) - 1 - i),
6044 : 59667 : GET_MODE_PRECISION (int_mode) - 1 - i);
6045 : :
6046 : : /* If all we did was surround TEMP with the two shifts, we
6047 : : haven't improved anything, so don't use it. Otherwise,
6048 : : we are better off with TEMP1. */
6049 : 59667 : if (GET_CODE (temp1) != ASHIFTRT
6050 : 59468 : || GET_CODE (XEXP (temp1, 0)) != ASHIFT
6051 : 59430 : || XEXP (XEXP (temp1, 0), 0) != temp)
6052 : : return temp1;
6053 : : }
6054 : : break;
6055 : :
6056 : 9511 : case TRUNCATE:
6057 : : /* We can't handle truncation to a partial integer mode here
6058 : : because we don't know the real bitsize of the partial
6059 : : integer mode. */
6060 : 9511 : if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
6061 : : break;
6062 : :
6063 : 9511 : if (HWI_COMPUTABLE_MODE_P (mode))
6064 : 0 : SUBST (XEXP (x, 0),
6065 : : force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
6066 : : GET_MODE_MASK (mode), false));
6067 : :
6068 : : /* We can truncate a constant value and return it. */
6069 : 9511 : {
6070 : 9511 : poly_int64 c;
6071 : 9511 : if (poly_int_rtx_p (XEXP (x, 0), &c))
6072 : 0 : return gen_int_mode (c, mode);
6073 : : }
6074 : :
6075 : : /* Similarly to what we do in simplify-rtx.cc, a truncate of a register
6076 : : whose value is a comparison can be replaced with a subreg if
6077 : : STORE_FLAG_VALUE permits. */
6078 : 9511 : if (HWI_COMPUTABLE_MODE_P (mode)
6079 : 0 : && (STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
6080 : 0 : && (temp = get_last_value (XEXP (x, 0)))
6081 : 0 : && COMPARISON_P (temp)
6082 : 9511 : && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (XEXP (x, 0))))
6083 : 0 : return gen_lowpart (mode, XEXP (x, 0));
6084 : : break;
6085 : :
6086 : 5404 : case CONST:
6087 : : /* (const (const X)) can become (const X). Do it this way rather than
6088 : : returning the inner CONST since CONST can be shared with a
6089 : : REG_EQUAL note. */
6090 : 5404 : if (GET_CODE (XEXP (x, 0)) == CONST)
6091 : 0 : SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
6092 : : break;
6093 : :
6094 : : case LO_SUM:
6095 : : /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
6096 : : can add in an offset. find_split_point will split this address up
6097 : : again if it doesn't match. */
6098 : : if (HAVE_lo_sum && GET_CODE (XEXP (x, 0)) == HIGH
6099 : : && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
6100 : : return XEXP (x, 1);
6101 : : break;
6102 : :
6103 : 33370425 : case PLUS:
6104 : : /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
6105 : : when c is (const_int (pow2 + 1) / 2) is a sign extension of a
6106 : : bit-field and can be replaced by either a sign_extend or a
6107 : : sign_extract. The `and' may be a zero_extend and the two
6108 : : <c>, -<c> constants may be reversed. */
6109 : 33370425 : if (GET_CODE (XEXP (x, 0)) == XOR
6110 : 33370425 : && is_a <scalar_int_mode> (mode, &int_mode)
6111 : 13714 : && CONST_INT_P (XEXP (x, 1))
6112 : 4821 : && CONST_INT_P (XEXP (XEXP (x, 0), 1))
6113 : 4291 : && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1))
6114 : 77 : && ((i = exact_log2 (UINTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
6115 : 2 : || (i = exact_log2 (UINTVAL (XEXP (x, 1)))) >= 0)
6116 : 39 : && HWI_COMPUTABLE_MODE_P (int_mode)
6117 : 33370464 : && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
6118 : 0 : && CONST_INT_P (XEXP (XEXP (XEXP (x, 0), 0), 1))
6119 : 0 : && (UINTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
6120 : 0 : == (HOST_WIDE_INT_1U << (i + 1)) - 1))
6121 : 39 : || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
6122 : 0 : && known_eq ((GET_MODE_PRECISION
6123 : : (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))),
6124 : : (unsigned int) i + 1))))
6125 : 0 : return simplify_shift_const
6126 : 0 : (NULL_RTX, ASHIFTRT, int_mode,
6127 : : simplify_shift_const (NULL_RTX, ASHIFT, int_mode,
6128 : : XEXP (XEXP (XEXP (x, 0), 0), 0),
6129 : 0 : GET_MODE_PRECISION (int_mode) - (i + 1)),
6130 : 0 : GET_MODE_PRECISION (int_mode) - (i + 1));
6131 : :
6132 : : /* If only the low-order bit of X is possibly nonzero, (plus x -1)
6133 : : can become (ashiftrt (ashift (xor x 1) C) C) where C is
6134 : : the bitsize of the mode - 1. This allows simplification of
6135 : : "a = (b & 8) == 0;" */
6136 : 33370425 : if (XEXP (x, 1) == constm1_rtx
6137 : 720389 : && !REG_P (XEXP (x, 0))
6138 : 305252 : && ! (GET_CODE (XEXP (x, 0)) == SUBREG
6139 : 37669 : && REG_P (SUBREG_REG (XEXP (x, 0))))
6140 : 33629655 : && is_a <scalar_int_mode> (mode, &int_mode)
6141 : 33639443 : && nonzero_bits (XEXP (x, 0), int_mode) == 1)
6142 : 9788 : return simplify_shift_const
6143 : 9788 : (NULL_RTX, ASHIFTRT, int_mode,
6144 : : simplify_shift_const (NULL_RTX, ASHIFT, int_mode,
6145 : : gen_rtx_XOR (int_mode, XEXP (x, 0),
6146 : : const1_rtx),
6147 : 9788 : GET_MODE_PRECISION (int_mode) - 1),
6148 : 19576 : GET_MODE_PRECISION (int_mode) - 1);
6149 : :
6150 : : /* If we are adding two things that have no bits in common, convert
6151 : : the addition into an IOR. This will often be further simplified,
6152 : : for example in cases like ((a & 1) + (a & 2)), which can
6153 : : become a & 3. */
6154 : :
6155 : 33360637 : if (HWI_COMPUTABLE_MODE_P (mode)
6156 : 29477628 : && (nonzero_bits (XEXP (x, 0), mode)
6157 : 29477628 : & nonzero_bits (XEXP (x, 1), mode)) == 0)
6158 : : {
6159 : : /* Try to simplify the expression further. */
6160 : 298671 : rtx tor = simplify_gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
6161 : 298671 : temp = combine_simplify_rtx (tor, VOIDmode, in_dest, false);
6162 : :
6163 : : /* If we could, great. If not, do not go ahead with the IOR
6164 : : replacement, since PLUS appears in many special purpose
6165 : : address arithmetic instructions. */
6166 : 298671 : if (GET_CODE (temp) != CLOBBER
6167 : 298671 : && (GET_CODE (temp) != IOR
6168 : 294388 : || ((XEXP (temp, 0) != XEXP (x, 0)
6169 : 293296 : || XEXP (temp, 1) != XEXP (x, 1))
6170 : 1092 : && (XEXP (temp, 0) != XEXP (x, 1)
6171 : 0 : || XEXP (temp, 1) != XEXP (x, 0)))))
6172 : : return temp;
6173 : : }
6174 : :
6175 : : /* Canonicalize x + x into x << 1. */
6176 : 33355262 : if (GET_MODE_CLASS (mode) == MODE_INT
6177 : 29792260 : && rtx_equal_p (XEXP (x, 0), XEXP (x, 1))
6178 : 33358372 : && !side_effects_p (XEXP (x, 0)))
6179 : 3105 : return simplify_gen_binary (ASHIFT, mode, XEXP (x, 0), const1_rtx);
6180 : :
6181 : : break;
6182 : :
6183 : 3657634 : case MINUS:
6184 : : /* (minus <foo> (and <foo> (const_int -pow2))) becomes
6185 : : (and <foo> (const_int pow2-1)) */
6186 : 3657634 : if (is_a <scalar_int_mode> (mode, &int_mode)
6187 : 3075868 : && GET_CODE (XEXP (x, 1)) == AND
6188 : 107289 : && CONST_INT_P (XEXP (XEXP (x, 1), 1))
6189 : 104589 : && pow2p_hwi (-UINTVAL (XEXP (XEXP (x, 1), 1)))
6190 : 48369 : && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
6191 : 0 : return simplify_and_const_int (NULL_RTX, int_mode, XEXP (x, 0),
6192 : 0 : -INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
6193 : : break;
6194 : :
6195 : 2975568 : case MULT:
6196 : : /* If we have (mult (plus A B) C), apply the distributive law and then
6197 : : the inverse distributive law to see if things simplify. This
6198 : : occurs mostly in addresses, often when unrolling loops. */
6199 : :
6200 : 2975568 : if (GET_CODE (XEXP (x, 0)) == PLUS)
6201 : : {
6202 : 259127 : rtx result = distribute_and_simplify_rtx (x, 0);
6203 : 259127 : if (result)
6204 : : return result;
6205 : : }
6206 : :
6207 : : /* Try simplify a*(b/c) as (a*b)/c. */
6208 : 2975003 : if (FLOAT_MODE_P (mode) && flag_associative_math
6209 : 201375 : && GET_CODE (XEXP (x, 0)) == DIV)
6210 : : {
6211 : 259 : rtx tem = simplify_binary_operation (MULT, mode,
6212 : : XEXP (XEXP (x, 0), 0),
6213 : : XEXP (x, 1));
6214 : 259 : if (tem)
6215 : 33 : return simplify_gen_binary (DIV, mode, tem, XEXP (XEXP (x, 0), 1));
6216 : : }
6217 : : break;
6218 : :
6219 : 123175 : case UDIV:
6220 : : /* If this is a divide by a power of two, treat it as a shift if
6221 : : its first operand is a shift. */
6222 : 123175 : if (is_a <scalar_int_mode> (mode, &int_mode)
6223 : 123175 : && CONST_INT_P (XEXP (x, 1))
6224 : 2024 : && (i = exact_log2 (UINTVAL (XEXP (x, 1)))) >= 0
6225 : 0 : && (GET_CODE (XEXP (x, 0)) == ASHIFT
6226 : 0 : || GET_CODE (XEXP (x, 0)) == LSHIFTRT
6227 : 0 : || GET_CODE (XEXP (x, 0)) == ASHIFTRT
6228 : 0 : || GET_CODE (XEXP (x, 0)) == ROTATE
6229 : 0 : || GET_CODE (XEXP (x, 0)) == ROTATERT))
6230 : 0 : return simplify_shift_const (NULL_RTX, LSHIFTRT, int_mode,
6231 : 0 : XEXP (x, 0), i);
6232 : : break;
6233 : :
6234 : 17989405 : case EQ: case NE:
6235 : 17989405 : case GT: case GTU: case GE: case GEU:
6236 : 17989405 : case LT: case LTU: case LE: case LEU:
6237 : 17989405 : case UNEQ: case LTGT:
6238 : 17989405 : case UNGT: case UNGE:
6239 : 17989405 : case UNLT: case UNLE:
6240 : 17989405 : case UNORDERED: case ORDERED:
6241 : : /* If the first operand is a condition code, we can't do anything
6242 : : with it. */
6243 : 17989405 : if (GET_CODE (XEXP (x, 0)) == COMPARE
6244 : 17989405 : || GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC)
6245 : : {
6246 : 13613793 : rtx op0 = XEXP (x, 0);
6247 : 13613793 : rtx op1 = XEXP (x, 1);
6248 : 13613793 : enum rtx_code new_code;
6249 : :
6250 : 13613793 : if (GET_CODE (op0) == COMPARE)
6251 : 0 : op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
6252 : :
6253 : : /* Simplify our comparison, if possible. */
6254 : 13613793 : new_code = simplify_comparison (code, &op0, &op1);
6255 : :
6256 : : /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
6257 : : if only the low-order bit is possibly nonzero in X (such as when
6258 : : X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
6259 : : (xor X 1) or (minus 1 X); we use the former. Finally, if X is
6260 : : known to be either 0 or -1, NE becomes a NEG and EQ becomes
6261 : : (plus X 1).
6262 : :
6263 : : Remove any ZERO_EXTRACT we made when thinking this was a
6264 : : comparison. It may now be simpler to use, e.g., an AND. If a
6265 : : ZERO_EXTRACT is indeed appropriate, it will be placed back by
6266 : : the call to make_compound_operation in the SET case.
6267 : :
6268 : : Don't apply these optimizations if the caller would
6269 : : prefer a comparison rather than a value.
6270 : : E.g., for the condition in an IF_THEN_ELSE most targets need
6271 : : an explicit comparison. */
6272 : :
6273 : 13613793 : if (in_cond)
6274 : : ;
6275 : :
6276 : 2102489 : else if (STORE_FLAG_VALUE == 1
6277 : : && new_code == NE
6278 : 2520267 : && is_int_mode (mode, &int_mode)
6279 : 418000 : && op1 == const0_rtx
6280 : 218973 : && int_mode == GET_MODE (op0)
6281 : 2188571 : && nonzero_bits (op0, int_mode) == 1)
6282 : 222 : return gen_lowpart (int_mode,
6283 : 460303 : expand_compound_operation (op0));
6284 : :
6285 : 2102267 : else if (STORE_FLAG_VALUE == 1
6286 : : && new_code == NE
6287 : 2519380 : && is_int_mode (mode, &int_mode)
6288 : 417778 : && op1 == const0_rtx
6289 : 218751 : && int_mode == GET_MODE (op0)
6290 : 2188127 : && (num_sign_bit_copies (op0, int_mode)
6291 : 85860 : == GET_MODE_PRECISION (int_mode)))
6292 : : {
6293 : 665 : op0 = expand_compound_operation (op0);
6294 : 665 : return simplify_gen_unary (NEG, int_mode,
6295 : 665 : gen_lowpart (int_mode, op0),
6296 : 665 : int_mode);
6297 : : }
6298 : :
6299 : 2101602 : else if (STORE_FLAG_VALUE == 1
6300 : : && new_code == EQ
6301 : 2406983 : && is_int_mode (mode, &int_mode)
6302 : 307150 : && op1 == const0_rtx
6303 : 137350 : && int_mode == GET_MODE (op0)
6304 : 2148735 : && nonzero_bits (op0, int_mode) == 1)
6305 : : {
6306 : 1769 : op0 = expand_compound_operation (op0);
6307 : 1769 : return simplify_gen_binary (XOR, int_mode,
6308 : 1769 : gen_lowpart (int_mode, op0),
6309 : 1769 : const1_rtx);
6310 : : }
6311 : :
6312 : 2099833 : else if (STORE_FLAG_VALUE == 1
6313 : : && new_code == EQ
6314 : 13915959 : && is_int_mode (mode, &int_mode)
6315 : 305381 : && op1 == const0_rtx
6316 : 135581 : && int_mode == GET_MODE (op0)
6317 : 2145197 : && (num_sign_bit_copies (op0, int_mode)
6318 : 45364 : == GET_MODE_PRECISION (int_mode)))
6319 : : {
6320 : 559 : op0 = expand_compound_operation (op0);
6321 : 559 : return plus_constant (int_mode, gen_lowpart (int_mode, op0), 1);
6322 : : }
6323 : :
6324 : : /* If STORE_FLAG_VALUE is -1, we have cases similar to
6325 : : those above. */
6326 : 13610578 : if (in_cond)
6327 : : ;
6328 : :
6329 : 13610578 : else if (STORE_FLAG_VALUE == -1
6330 : : && new_code == NE
6331 : : && is_int_mode (mode, &int_mode)
6332 : : && op1 == const0_rtx
6333 : : && int_mode == GET_MODE (op0)
6334 : : && (num_sign_bit_copies (op0, int_mode)
6335 : : == GET_MODE_PRECISION (int_mode)))
6336 : : return gen_lowpart (int_mode, expand_compound_operation (op0));
6337 : :
6338 : 13610578 : else if (STORE_FLAG_VALUE == -1
6339 : : && new_code == NE
6340 : : && is_int_mode (mode, &int_mode)
6341 : : && op1 == const0_rtx
6342 : : && int_mode == GET_MODE (op0)
6343 : : && nonzero_bits (op0, int_mode) == 1)
6344 : : {
6345 : : op0 = expand_compound_operation (op0);
6346 : : return simplify_gen_unary (NEG, int_mode,
6347 : : gen_lowpart (int_mode, op0),
6348 : : int_mode);
6349 : : }
6350 : :
6351 : 13610578 : else if (STORE_FLAG_VALUE == -1
6352 : : && new_code == EQ
6353 : : && is_int_mode (mode, &int_mode)
6354 : : && op1 == const0_rtx
6355 : : && int_mode == GET_MODE (op0)
6356 : : && (num_sign_bit_copies (op0, int_mode)
6357 : : == GET_MODE_PRECISION (int_mode)))
6358 : : {
6359 : : op0 = expand_compound_operation (op0);
6360 : : return simplify_gen_unary (NOT, int_mode,
6361 : : gen_lowpart (int_mode, op0),
6362 : : int_mode);
6363 : : }
6364 : :
6365 : : /* If X is 0/1, (eq X 0) is X-1. */
6366 : 13610578 : else if (STORE_FLAG_VALUE == -1
6367 : : && new_code == EQ
6368 : : && is_int_mode (mode, &int_mode)
6369 : : && op1 == const0_rtx
6370 : : && int_mode == GET_MODE (op0)
6371 : : && nonzero_bits (op0, int_mode) == 1)
6372 : : {
6373 : : op0 = expand_compound_operation (op0);
6374 : : return plus_constant (int_mode, gen_lowpart (int_mode, op0), -1);
6375 : : }
6376 : :
6377 : : /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
6378 : : one bit that might be nonzero, we can convert (ne x 0) to
6379 : : (ashift x c) where C puts the bit in the sign bit. Remove any
6380 : : AND with STORE_FLAG_VALUE when we are done, since we are only
6381 : : going to test the sign bit. */
6382 : 13610578 : if (new_code == NE
6383 : 14023788 : && is_int_mode (mode, &int_mode)
6384 : 417193 : && HWI_COMPUTABLE_MODE_P (int_mode)
6385 : 413210 : && val_signbit_p (int_mode, STORE_FLAG_VALUE)
6386 : 0 : && op1 == const0_rtx
6387 : 0 : && int_mode == GET_MODE (op0)
6388 : 13610578 : && (i = exact_log2 (nonzero_bits (op0, int_mode))) >= 0)
6389 : : {
6390 : 0 : x = simplify_shift_const (NULL_RTX, ASHIFT, int_mode,
6391 : : expand_compound_operation (op0),
6392 : 0 : GET_MODE_PRECISION (int_mode) - 1 - i);
6393 : 0 : if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
6394 : 0 : return XEXP (x, 0);
6395 : : else
6396 : : return x;
6397 : : }
6398 : :
6399 : : /* If the code changed, return a whole new comparison.
6400 : : We also need to avoid using SUBST in cases where
6401 : : simplify_comparison has widened a comparison with a CONST_INT,
6402 : : since in that case the wider CONST_INT may fail the sanity
6403 : : checks in do_SUBST. */
6404 : 13610578 : if (new_code != code
6405 : 13163249 : || (CONST_INT_P (op1)
6406 : 7339393 : && GET_MODE (op0) != GET_MODE (XEXP (x, 0))
6407 : 11075 : && GET_MODE (op0) != GET_MODE (XEXP (x, 1))))
6408 : 457088 : return gen_rtx_fmt_ee (new_code, mode, op0, op1);
6409 : :
6410 : : /* Otherwise, keep this operation, but maybe change its operands.
6411 : : This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
6412 : 13153490 : SUBST (XEXP (x, 0), op0);
6413 : 13153490 : SUBST (XEXP (x, 1), op1);
6414 : : }
6415 : : break;
6416 : :
6417 : 13145168 : case IF_THEN_ELSE:
6418 : 13145168 : return simplify_if_then_else (x);
6419 : :
6420 : 4672578 : case ZERO_EXTRACT:
6421 : 4672578 : case SIGN_EXTRACT:
6422 : 4672578 : case ZERO_EXTEND:
6423 : 4672578 : case SIGN_EXTEND:
6424 : : /* If we are processing SET_DEST, we are done. */
6425 : 4672578 : if (in_dest)
6426 : : return x;
6427 : :
6428 : 4669901 : return expand_compound_operation (x);
6429 : :
6430 : 46667382 : case SET:
6431 : 46667382 : return simplify_set (x);
6432 : :
6433 : 11181809 : case AND:
6434 : 11181809 : case IOR:
6435 : 11181809 : return simplify_logical (x);
6436 : :
6437 : 13326704 : case ASHIFT:
6438 : 13326704 : case LSHIFTRT:
6439 : 13326704 : case ASHIFTRT:
6440 : 13326704 : case ROTATE:
6441 : 13326704 : case ROTATERT:
6442 : : /* If this is a shift by a constant amount, simplify it. */
6443 : 13326704 : if (CONST_INT_P (XEXP (x, 1)))
6444 : 12858813 : return simplify_shift_const (x, code, mode, XEXP (x, 0),
6445 : 12858813 : INTVAL (XEXP (x, 1)));
6446 : :
6447 : : else if (SHIFT_COUNT_TRUNCATED && !REG_P (XEXP (x, 1)))
6448 : : SUBST (XEXP (x, 1),
6449 : : force_to_mode (XEXP (x, 1), GET_MODE (XEXP (x, 1)),
6450 : : (HOST_WIDE_INT_1U
6451 : : << exact_log2 (GET_MODE_UNIT_BITSIZE
6452 : : (GET_MODE (x)))) - 1, false));
6453 : : break;
6454 : 1814734 : case VEC_SELECT:
6455 : 1814734 : {
6456 : 1814734 : rtx trueop0 = XEXP (x, 0);
6457 : 1814734 : mode = GET_MODE (trueop0);
6458 : 1814734 : rtx trueop1 = XEXP (x, 1);
6459 : : /* If we select a low-part subreg, return that. */
6460 : 1814734 : if (vec_series_lowpart_p (GET_MODE (x), mode, trueop1))
6461 : : {
6462 : 999 : rtx new_rtx = lowpart_subreg (GET_MODE (x), trueop0, mode);
6463 : 999 : if (new_rtx != NULL_RTX)
6464 : : return new_rtx;
6465 : : }
6466 : : }
6467 : :
6468 : : default:
6469 : : break;
6470 : : }
6471 : :
6472 : : return x;
6473 : : }
6474 : :
6475 : : /* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
6476 : :
6477 : : static rtx
6478 : 13145168 : simplify_if_then_else (rtx x)
6479 : : {
6480 : 13145168 : machine_mode mode = GET_MODE (x);
6481 : 13145168 : rtx cond = XEXP (x, 0);
6482 : 13145168 : rtx true_rtx = XEXP (x, 1);
6483 : 13145168 : rtx false_rtx = XEXP (x, 2);
6484 : 13145168 : enum rtx_code true_code = GET_CODE (cond);
6485 : 13145168 : bool comparison_p = COMPARISON_P (cond);
6486 : 13145168 : rtx temp;
6487 : 13145168 : int i;
6488 : 13145168 : enum rtx_code false_code;
6489 : 13145168 : rtx reversed;
6490 : 13145168 : scalar_int_mode int_mode, inner_mode;
6491 : :
6492 : : /* Simplify storing of the truth value. */
6493 : 13145168 : if (comparison_p && true_rtx == const_true_rtx && false_rtx == const0_rtx)
6494 : 0 : return simplify_gen_relational (true_code, mode, VOIDmode,
6495 : 0 : XEXP (cond, 0), XEXP (cond, 1));
6496 : :
6497 : : /* Also when the truth value has to be reversed. */
6498 : 13144623 : if (comparison_p
6499 : 13144623 : && true_rtx == const0_rtx && false_rtx == const_true_rtx
6500 : 0 : && (reversed = reversed_comparison (cond, mode)))
6501 : : return reversed;
6502 : :
6503 : : /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
6504 : : in it is being compared against certain values. Get the true and false
6505 : : comparisons and see if that says anything about the value of each arm. */
6506 : :
6507 : 13145168 : if (comparison_p
6508 : 13144623 : && ((false_code = reversed_comparison_code (cond, NULL))
6509 : : != UNKNOWN)
6510 : 26142618 : && REG_P (XEXP (cond, 0)))
6511 : : {
6512 : 8087737 : HOST_WIDE_INT nzb;
6513 : 8087737 : rtx from = XEXP (cond, 0);
6514 : 8087737 : rtx true_val = XEXP (cond, 1);
6515 : 8087737 : rtx false_val = true_val;
6516 : 8087737 : bool swapped = false;
6517 : :
6518 : : /* If FALSE_CODE is EQ, swap the codes and arms. */
6519 : :
6520 : 8087737 : if (false_code == EQ)
6521 : : {
6522 : 2939265 : swapped = true, true_code = EQ, false_code = NE;
6523 : 2939265 : std::swap (true_rtx, false_rtx);
6524 : : }
6525 : :
6526 : 8087737 : scalar_int_mode from_mode;
6527 : 8087737 : if (is_a <scalar_int_mode> (GET_MODE (from), &from_mode))
6528 : : {
6529 : : /* If we are comparing against zero and the expression being
6530 : : tested has only a single bit that might be nonzero, that is
6531 : : its value when it is not equal to zero. Similarly if it is
6532 : : known to be -1 or 0. */
6533 : 6812297 : if (true_code == EQ
6534 : 4964326 : && true_val == const0_rtx
6535 : 8835927 : && pow2p_hwi (nzb = nonzero_bits (from, from_mode)))
6536 : : {
6537 : 209477 : false_code = EQ;
6538 : 209477 : false_val = gen_int_mode (nzb, from_mode);
6539 : : }
6540 : 6602820 : else if (true_code == EQ
6541 : 4754849 : && true_val == const0_rtx
6542 : 8416973 : && (num_sign_bit_copies (from, from_mode)
6543 : 1814153 : == GET_MODE_PRECISION (from_mode)))
6544 : : {
6545 : 664 : false_code = EQ;
6546 : 664 : false_val = constm1_rtx;
6547 : : }
6548 : : }
6549 : :
6550 : : /* Now simplify an arm if we know the value of the register in the
6551 : : branch and it is used in the arm. Be careful due to the potential
6552 : : of locally-shared RTL. */
6553 : :
6554 : 8087737 : if (reg_mentioned_p (from, true_rtx))
6555 : 300707 : true_rtx = subst (known_cond (copy_rtx (true_rtx), true_code,
6556 : : from, true_val),
6557 : : pc_rtx, pc_rtx, false, false, false);
6558 : 8087737 : if (reg_mentioned_p (from, false_rtx))
6559 : 104434 : false_rtx = subst (known_cond (copy_rtx (false_rtx), false_code,
6560 : : from, false_val),
6561 : : pc_rtx, pc_rtx, false, false, false);
6562 : :
6563 : 13236209 : SUBST (XEXP (x, 1), swapped ? false_rtx : true_rtx);
6564 : 13236209 : SUBST (XEXP (x, 2), swapped ? true_rtx : false_rtx);
6565 : :
6566 : 8087737 : true_rtx = XEXP (x, 1);
6567 : 8087737 : false_rtx = XEXP (x, 2);
6568 : 8087737 : true_code = GET_CODE (cond);
6569 : : }
6570 : :
6571 : : /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
6572 : : reversed, do so to avoid needing two sets of patterns for
6573 : : subtract-and-branch insns. Similarly if we have a constant in the true
6574 : : arm, the false arm is the same as the first operand of the comparison, or
6575 : : the false arm is more complicated than the true arm. */
6576 : :
6577 : 13145168 : if (comparison_p
6578 : 13144623 : && reversed_comparison_code (cond, NULL) != UNKNOWN
6579 : 26142618 : && (true_rtx == pc_rtx
6580 : 12997450 : || (CONSTANT_P (true_rtx)
6581 : 10900163 : && !CONST_INT_P (false_rtx) && false_rtx != pc_rtx)
6582 : 12964754 : || true_rtx == const0_rtx
6583 : 12964532 : || (OBJECT_P (true_rtx) && !OBJECT_P (false_rtx))
6584 : 12925807 : || (GET_CODE (true_rtx) == SUBREG && OBJECT_P (SUBREG_REG (true_rtx))
6585 : 13926 : && !OBJECT_P (false_rtx))
6586 : 12923521 : || reg_mentioned_p (true_rtx, false_rtx)
6587 : 12923391 : || rtx_equal_p (false_rtx, XEXP (cond, 0))))
6588 : : {
6589 : 101709 : SUBST (XEXP (x, 0), reversed_comparison (cond, GET_MODE (cond)));
6590 : 101709 : SUBST (XEXP (x, 1), false_rtx);
6591 : 101709 : SUBST (XEXP (x, 2), true_rtx);
6592 : :
6593 : 101709 : std::swap (true_rtx, false_rtx);
6594 : 101709 : cond = XEXP (x, 0);
6595 : :
6596 : : /* It is possible that the conditional has been simplified out. */
6597 : 101709 : true_code = GET_CODE (cond);
6598 : 101709 : comparison_p = COMPARISON_P (cond);
6599 : : }
6600 : :
6601 : : /* If the two arms are identical, we don't need the comparison. */
6602 : :
6603 : 13145168 : if (rtx_equal_p (true_rtx, false_rtx) && ! side_effects_p (cond))
6604 : : return true_rtx;
6605 : :
6606 : : /* Convert a == b ? b : a to "a". */
6607 : 4036291 : if (true_code == EQ && ! side_effects_p (cond)
6608 : 4010313 : && !HONOR_NANS (mode)
6609 : 3940091 : && rtx_equal_p (XEXP (cond, 0), false_rtx)
6610 : 13145531 : && rtx_equal_p (XEXP (cond, 1), true_rtx))
6611 : : return false_rtx;
6612 : 4690273 : else if (true_code == NE && ! side_effects_p (cond)
6613 : 4648858 : && !HONOR_NANS (mode)
6614 : 4643267 : && rtx_equal_p (XEXP (cond, 0), true_rtx)
6615 : 13207902 : && rtx_equal_p (XEXP (cond, 1), false_rtx))
6616 : : return true_rtx;
6617 : :
6618 : : /* Look for cases where we have (abs x) or (neg (abs X)). */
6619 : :
6620 : 13145162 : if (GET_MODE_CLASS (mode) == MODE_INT
6621 : 1968310 : && comparison_p
6622 : 1968290 : && XEXP (cond, 1) == const0_rtx
6623 : 1508719 : && GET_CODE (false_rtx) == NEG
6624 : 134 : && rtx_equal_p (true_rtx, XEXP (false_rtx, 0))
6625 : 15 : && rtx_equal_p (true_rtx, XEXP (cond, 0))
6626 : 13145177 : && ! side_effects_p (true_rtx))
6627 : 15 : switch (true_code)
6628 : : {
6629 : 15 : case GT:
6630 : 15 : case GE:
6631 : 15 : return simplify_gen_unary (ABS, mode, true_rtx, mode);
6632 : 0 : case LT:
6633 : 0 : case LE:
6634 : 0 : return
6635 : 0 : simplify_gen_unary (NEG, mode,
6636 : : simplify_gen_unary (ABS, mode, true_rtx, mode),
6637 : 0 : mode);
6638 : : default:
6639 : : break;
6640 : : }
6641 : :
6642 : : /* Look for MIN or MAX. */
6643 : :
6644 : 13145147 : if ((! FLOAT_MODE_P (mode)
6645 : 84734 : || (flag_unsafe_math_optimizations
6646 : 370 : && !HONOR_NANS (mode)
6647 : 370 : && !HONOR_SIGNED_ZEROS (mode)))
6648 : 13060783 : && comparison_p
6649 : 13060406 : && rtx_equal_p (XEXP (cond, 0), true_rtx)
6650 : 105405 : && rtx_equal_p (XEXP (cond, 1), false_rtx)
6651 : 13132 : && ! side_effects_p (cond))
6652 : 13128 : switch (true_code)
6653 : : {
6654 : 4998 : case GE:
6655 : 4998 : case GT:
6656 : 4998 : return simplify_gen_binary (SMAX, mode, true_rtx, false_rtx);
6657 : 4524 : case LE:
6658 : 4524 : case LT:
6659 : 4524 : return simplify_gen_binary (SMIN, mode, true_rtx, false_rtx);
6660 : 2715 : case GEU:
6661 : 2715 : case GTU:
6662 : 2715 : return simplify_gen_binary (UMAX, mode, true_rtx, false_rtx);
6663 : 891 : case LEU:
6664 : 891 : case LTU:
6665 : 891 : return simplify_gen_binary (UMIN, mode, true_rtx, false_rtx);
6666 : : default:
6667 : : break;
6668 : : }
6669 : :
6670 : : /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
6671 : : second operand is zero, this can be done as (OP Z (mult COND C2)) where
6672 : : C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
6673 : : SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
6674 : : We can do this kind of thing in some cases when STORE_FLAG_VALUE is
6675 : : neither 1 or -1, but it isn't worth checking for. */
6676 : :
6677 : 13132019 : if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6678 : : && comparison_p
6679 : 15006083 : && is_int_mode (mode, &int_mode)
6680 : 15087188 : && ! side_effects_p (x))
6681 : : {
6682 : 1951200 : rtx t = make_compound_operation (true_rtx, SET);
6683 : 1951200 : rtx f = make_compound_operation (false_rtx, SET);
6684 : 1951200 : rtx cond_op0 = XEXP (cond, 0);
6685 : 1951200 : rtx cond_op1 = XEXP (cond, 1);
6686 : 1951200 : enum rtx_code op = UNKNOWN, extend_op = UNKNOWN;
6687 : 1951200 : scalar_int_mode m = int_mode;
6688 : 1951200 : rtx z = 0, c1 = NULL_RTX;
6689 : :
6690 : 1951200 : if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
6691 : : || GET_CODE (t) == IOR || GET_CODE (t) == XOR
6692 : : || GET_CODE (t) == ASHIFT
6693 : : || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
6694 : 199179 : && rtx_equal_p (XEXP (t, 0), f))
6695 : 72740 : c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
6696 : :
6697 : : /* If an identity-zero op is commutative, check whether there
6698 : : would be a match if we swapped the operands. */
6699 : 1813938 : else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
6700 : 1800275 : || GET_CODE (t) == XOR)
6701 : 1892770 : && rtx_equal_p (XEXP (t, 1), f))
6702 : 8365 : c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
6703 : 1870095 : else if (GET_CODE (t) == SIGN_EXTEND
6704 : 1848 : && is_a <scalar_int_mode> (GET_MODE (XEXP (t, 0)), &inner_mode)
6705 : 1848 : && (GET_CODE (XEXP (t, 0)) == PLUS
6706 : 1848 : || GET_CODE (XEXP (t, 0)) == MINUS
6707 : : || GET_CODE (XEXP (t, 0)) == IOR
6708 : : || GET_CODE (XEXP (t, 0)) == XOR
6709 : : || GET_CODE (XEXP (t, 0)) == ASHIFT
6710 : : || GET_CODE (XEXP (t, 0)) == LSHIFTRT
6711 : : || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
6712 : 80 : && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
6713 : 54 : && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
6714 : 54 : && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
6715 : 1870095 : && (num_sign_bit_copies (f, GET_MODE (f))
6716 : 0 : > (unsigned int)
6717 : 0 : (GET_MODE_PRECISION (int_mode)
6718 : 0 : - GET_MODE_PRECISION (inner_mode))))
6719 : : {
6720 : 0 : c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
6721 : 0 : extend_op = SIGN_EXTEND;
6722 : 0 : m = inner_mode;
6723 : : }
6724 : 1870095 : else if (GET_CODE (t) == SIGN_EXTEND
6725 : 1848 : && is_a <scalar_int_mode> (GET_MODE (XEXP (t, 0)), &inner_mode)
6726 : 1848 : && (GET_CODE (XEXP (t, 0)) == PLUS
6727 : 1774 : || GET_CODE (XEXP (t, 0)) == IOR
6728 : 1770 : || GET_CODE (XEXP (t, 0)) == XOR)
6729 : 78 : && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
6730 : 4 : && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
6731 : 4 : && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
6732 : 1870099 : && (num_sign_bit_copies (f, GET_MODE (f))
6733 : 4 : > (unsigned int)
6734 : 4 : (GET_MODE_PRECISION (int_mode)
6735 : 4 : - GET_MODE_PRECISION (inner_mode))))
6736 : : {
6737 : 0 : c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
6738 : 0 : extend_op = SIGN_EXTEND;
6739 : 0 : m = inner_mode;
6740 : : }
6741 : 1870095 : else if (GET_CODE (t) == ZERO_EXTEND
6742 : 4371 : && is_a <scalar_int_mode> (GET_MODE (XEXP (t, 0)), &inner_mode)
6743 : 4371 : && (GET_CODE (XEXP (t, 0)) == PLUS
6744 : 4371 : || GET_CODE (XEXP (t, 0)) == MINUS
6745 : : || GET_CODE (XEXP (t, 0)) == IOR
6746 : : || GET_CODE (XEXP (t, 0)) == XOR
6747 : : || GET_CODE (XEXP (t, 0)) == ASHIFT
6748 : : || GET_CODE (XEXP (t, 0)) == LSHIFTRT
6749 : : || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
6750 : 973 : && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
6751 : 103 : && HWI_COMPUTABLE_MODE_P (int_mode)
6752 : 103 : && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
6753 : 103 : && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
6754 : 1870095 : && ((nonzero_bits (f, GET_MODE (f))
6755 : 0 : & ~GET_MODE_MASK (inner_mode))
6756 : : == 0))
6757 : : {
6758 : 0 : c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
6759 : 0 : extend_op = ZERO_EXTEND;
6760 : 0 : m = inner_mode;
6761 : : }
6762 : 1870095 : else if (GET_CODE (t) == ZERO_EXTEND
6763 : 4371 : && is_a <scalar_int_mode> (GET_MODE (XEXP (t, 0)), &inner_mode)
6764 : 4371 : && (GET_CODE (XEXP (t, 0)) == PLUS
6765 : 3970 : || GET_CODE (XEXP (t, 0)) == IOR
6766 : 3970 : || GET_CODE (XEXP (t, 0)) == XOR)
6767 : 401 : && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
6768 : 16 : && HWI_COMPUTABLE_MODE_P (int_mode)
6769 : 16 : && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
6770 : 16 : && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
6771 : 1870095 : && ((nonzero_bits (f, GET_MODE (f))
6772 : 0 : & ~GET_MODE_MASK (inner_mode))
6773 : : == 0))
6774 : : {
6775 : 0 : c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
6776 : 0 : extend_op = ZERO_EXTEND;
6777 : 0 : m = inner_mode;
6778 : : }
6779 : :
6780 : 81105 : if (z)
6781 : : {
6782 : 81105 : machine_mode cm = m;
6783 : 81105 : if ((op == ASHIFT || op == LSHIFTRT || op == ASHIFTRT)
6784 : 2062 : && GET_MODE (c1) != VOIDmode)
6785 : 1562 : cm = GET_MODE (c1);
6786 : 81105 : temp = subst (simplify_gen_relational (true_code, cm, VOIDmode,
6787 : : cond_op0, cond_op1),
6788 : : pc_rtx, pc_rtx, false, false, false);
6789 : 81105 : temp = simplify_gen_binary (MULT, cm, temp,
6790 : : simplify_gen_binary (MULT, cm, c1,
6791 : : const_true_rtx));
6792 : 81105 : temp = subst (temp, pc_rtx, pc_rtx, false, false, false);
6793 : 81105 : temp = simplify_gen_binary (op, m, gen_lowpart (m, z), temp);
6794 : :
6795 : 81105 : if (extend_op != UNKNOWN)
6796 : 0 : temp = simplify_gen_unary (extend_op, int_mode, temp, m);
6797 : :
6798 : 81105 : return temp;
6799 : : }
6800 : : }
6801 : :
6802 : : /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
6803 : : 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
6804 : : negation of a single bit, we can convert this operation to a shift. We
6805 : : can actually do this more generally, but it doesn't seem worth it. */
6806 : :
6807 : 13050914 : if (true_code == NE
6808 : 13050913 : && is_a <scalar_int_mode> (mode, &int_mode)
6809 : 426643 : && XEXP (cond, 1) == const0_rtx
6810 : 298667 : && false_rtx == const0_rtx
6811 : 46359 : && CONST_INT_P (true_rtx)
6812 : 13051344 : && ((nonzero_bits (XEXP (cond, 0), int_mode) == 1
6813 : 2 : && (i = exact_log2 (UINTVAL (true_rtx))) >= 0)
6814 : 429 : || ((num_sign_bit_copies (XEXP (cond, 0), int_mode)
6815 : 429 : == GET_MODE_PRECISION (int_mode))
6816 : 0 : && (i = exact_log2 (-UINTVAL (true_rtx))) >= 0)))
6817 : 1 : return
6818 : 1 : simplify_shift_const (NULL_RTX, ASHIFT, int_mode,
6819 : 2 : gen_lowpart (int_mode, XEXP (cond, 0)), i);
6820 : :
6821 : : /* (IF_THEN_ELSE (NE A 0) C1 0) is A or a zero-extend of A if the only
6822 : : non-zero bit in A is C1. */
6823 : 4672550 : if (true_code == NE && XEXP (cond, 1) == const0_rtx
6824 : 2075784 : && false_rtx == const0_rtx && CONST_INT_P (true_rtx)
6825 : 13145597 : && is_a <scalar_int_mode> (mode, &int_mode)
6826 : 429 : && is_a <scalar_int_mode> (GET_MODE (XEXP (cond, 0)), &inner_mode)
6827 : 41 : && (UINTVAL (true_rtx) & GET_MODE_MASK (int_mode))
6828 : 41 : == nonzero_bits (XEXP (cond, 0), inner_mode)
6829 : 13050913 : && (i = exact_log2 (UINTVAL (true_rtx) & GET_MODE_MASK (int_mode))) >= 0)
6830 : : {
6831 : 0 : rtx val = XEXP (cond, 0);
6832 : 0 : if (inner_mode == int_mode)
6833 : : return val;
6834 : 0 : else if (GET_MODE_PRECISION (inner_mode) < GET_MODE_PRECISION (int_mode))
6835 : 0 : return simplify_gen_unary (ZERO_EXTEND, int_mode, val, inner_mode);
6836 : : }
6837 : :
6838 : : return x;
6839 : : }
6840 : :
6841 : : /* Simplify X, a SET expression. Return the new expression. */
6842 : :
6843 : : static rtx
6844 : 46667382 : simplify_set (rtx x)
6845 : : {
6846 : 46667382 : rtx src = SET_SRC (x);
6847 : 46667382 : rtx dest = SET_DEST (x);
6848 : 104701320 : machine_mode mode
6849 : 46667382 : = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
6850 : 46667382 : rtx_insn *other_insn;
6851 : 46667382 : rtx *cc_use;
6852 : 46667382 : scalar_int_mode int_mode;
6853 : :
6854 : : /* (set (pc) (return)) gets written as (return). */
6855 : 46667382 : if (GET_CODE (dest) == PC && ANY_RETURN_P (src))
6856 : : return src;
6857 : :
6858 : : /* Now that we know for sure which bits of SRC we are using, see if we can
6859 : : simplify the expression for the object knowing that we only need the
6860 : : low-order bits. */
6861 : :
6862 : 46667382 : if (GET_MODE_CLASS (mode) == MODE_INT && HWI_COMPUTABLE_MODE_P (mode))
6863 : : {
6864 : 20427814 : src = force_to_mode (src, mode, HOST_WIDE_INT_M1U, false);
6865 : 20427814 : SUBST (SET_SRC (x), src);
6866 : : }
6867 : :
6868 : : /* If the source is a COMPARE, look for the use of the comparison result
6869 : : and try to simplify it unless we already have used undobuf.other_insn. */
6870 : 40000257 : if ((GET_MODE_CLASS (mode) == MODE_CC || GET_CODE (src) == COMPARE)
6871 : 6667125 : && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
6872 : 6087236 : && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
6873 : 6087236 : && COMPARISON_P (*cc_use)
6874 : 52754134 : && rtx_equal_p (XEXP (*cc_use, 0), dest))
6875 : : {
6876 : 6085216 : enum rtx_code old_code = GET_CODE (*cc_use);
6877 : 6085216 : enum rtx_code new_code;
6878 : 6085216 : rtx op0, op1, tmp;
6879 : 6085216 : bool other_changed = false;
6880 : 6085216 : rtx inner_compare = NULL_RTX;
6881 : 6085216 : machine_mode compare_mode = GET_MODE (dest);
6882 : :
6883 : 6085216 : if (GET_CODE (src) == COMPARE)
6884 : : {
6885 : 5654648 : op0 = XEXP (src, 0), op1 = XEXP (src, 1);
6886 : 5654648 : if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
6887 : : {
6888 : 0 : inner_compare = op0;
6889 : 0 : op0 = XEXP (inner_compare, 0), op1 = XEXP (inner_compare, 1);
6890 : : }
6891 : : }
6892 : : else
6893 : 430568 : op0 = src, op1 = CONST0_RTX (GET_MODE (src));
6894 : :
6895 : 6085216 : tmp = simplify_relational_operation (old_code, compare_mode, VOIDmode,
6896 : : op0, op1);
6897 : 6085216 : if (!tmp)
6898 : : new_code = old_code;
6899 : 470118 : else if (!CONSTANT_P (tmp))
6900 : : {
6901 : 464976 : new_code = GET_CODE (tmp);
6902 : 464976 : op0 = XEXP (tmp, 0);
6903 : 464976 : op1 = XEXP (tmp, 1);
6904 : : }
6905 : : else
6906 : : {
6907 : 5142 : rtx pat = PATTERN (other_insn);
6908 : 5142 : undobuf.other_insn = other_insn;
6909 : 5142 : SUBST (*cc_use, tmp);
6910 : :
6911 : : /* Attempt to simplify CC user. */
6912 : 5142 : if (GET_CODE (pat) == SET)
6913 : : {
6914 : 4644 : rtx new_rtx = simplify_rtx (SET_SRC (pat));
6915 : 4644 : if (new_rtx != NULL_RTX)
6916 : 4052 : SUBST (SET_SRC (pat), new_rtx);
6917 : : }
6918 : :
6919 : : /* Convert X into a no-op move. */
6920 : 5142 : SUBST (SET_DEST (x), pc_rtx);
6921 : 5142 : SUBST (SET_SRC (x), pc_rtx);
6922 : 5142 : return x;
6923 : : }
6924 : :
6925 : : /* Simplify our comparison, if possible. */
6926 : 6080074 : new_code = simplify_comparison (new_code, &op0, &op1);
6927 : :
6928 : : #ifdef SELECT_CC_MODE
6929 : : /* If this machine has CC modes other than CCmode, check to see if we
6930 : : need to use a different CC mode here. */
6931 : 6080074 : if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
6932 : 645561 : compare_mode = GET_MODE (op0);
6933 : 5434513 : else if (inner_compare
6934 : 0 : && GET_MODE_CLASS (GET_MODE (inner_compare)) == MODE_CC
6935 : 0 : && new_code == old_code
6936 : 0 : && op0 == XEXP (inner_compare, 0)
6937 : 0 : && op1 == XEXP (inner_compare, 1))
6938 : 0 : compare_mode = GET_MODE (inner_compare);
6939 : : else
6940 : 5434513 : compare_mode = SELECT_CC_MODE (new_code, op0, op1);
6941 : :
6942 : : /* If the mode changed, we have to change SET_DEST, the mode in the
6943 : : compare, and the mode in the place SET_DEST is used. If SET_DEST is
6944 : : a hard register, just build new versions with the proper mode. If it
6945 : : is a pseudo, we lose unless it is only time we set the pseudo, in
6946 : : which case we can safely change its mode. */
6947 : 6080074 : if (compare_mode != GET_MODE (dest))
6948 : : {
6949 : 219843 : if (can_change_dest_mode (dest, 0, compare_mode))
6950 : : {
6951 : 219843 : unsigned int regno = REGNO (dest);
6952 : 219843 : rtx new_dest;
6953 : :
6954 : 219843 : if (regno < FIRST_PSEUDO_REGISTER)
6955 : 219843 : new_dest = gen_rtx_REG (compare_mode, regno);
6956 : : else
6957 : : {
6958 : 0 : subst_mode (regno, compare_mode);
6959 : 0 : new_dest = regno_reg_rtx[regno];
6960 : : }
6961 : :
6962 : 219843 : SUBST (SET_DEST (x), new_dest);
6963 : 219843 : SUBST (XEXP (*cc_use, 0), new_dest);
6964 : 219843 : other_changed = true;
6965 : :
6966 : 219843 : dest = new_dest;
6967 : : }
6968 : : }
6969 : : #endif /* SELECT_CC_MODE */
6970 : :
6971 : : /* If the code changed, we have to build a new comparison in
6972 : : undobuf.other_insn. */
6973 : 6080074 : if (new_code != old_code)
6974 : : {
6975 : 609914 : bool other_changed_previously = other_changed;
6976 : 609914 : unsigned HOST_WIDE_INT mask;
6977 : 609914 : rtx old_cc_use = *cc_use;
6978 : :
6979 : 609914 : SUBST (*cc_use, gen_rtx_fmt_ee (new_code, GET_MODE (*cc_use),
6980 : : dest, const0_rtx));
6981 : 609914 : other_changed = true;
6982 : :
6983 : : /* If the only change we made was to change an EQ into an NE or
6984 : : vice versa, OP0 has only one bit that might be nonzero, and OP1
6985 : : is zero, check if changing the user of the condition code will
6986 : : produce a valid insn. If it won't, we can keep the original code
6987 : : in that insn by surrounding our operation with an XOR. */
6988 : :
6989 : 609914 : if (((old_code == NE && new_code == EQ)
6990 : 580072 : || (old_code == EQ && new_code == NE))
6991 : 64432 : && ! other_changed_previously && op1 == const0_rtx
6992 : 61790 : && HWI_COMPUTABLE_MODE_P (GET_MODE (op0))
6993 : 617925 : && pow2p_hwi (mask = nonzero_bits (op0, GET_MODE (op0))))
6994 : : {
6995 : 8000 : rtx pat = PATTERN (other_insn), note = 0;
6996 : :
6997 : 8000 : if ((recog_for_combine (&pat, other_insn, ¬e) < 0
6998 : 8000 : && ! check_asm_operands (pat)))
6999 : : {
7000 : 4 : *cc_use = old_cc_use;
7001 : 4 : other_changed = false;
7002 : :
7003 : 4 : op0 = simplify_gen_binary (XOR, GET_MODE (op0), op0,
7004 : 4 : gen_int_mode (mask,
7005 : 4 : GET_MODE (op0)));
7006 : : }
7007 : : }
7008 : : }
7009 : :
7010 : 5478160 : if (other_changed)
7011 : 632202 : undobuf.other_insn = other_insn;
7012 : :
7013 : : /* Don't generate a compare of a CC with 0, just use that CC. */
7014 : 6080074 : if (GET_MODE (op0) == compare_mode && op1 == const0_rtx)
7015 : : {
7016 : 645561 : SUBST (SET_SRC (x), op0);
7017 : 645561 : src = SET_SRC (x);
7018 : : }
7019 : : /* Otherwise, if we didn't previously have the same COMPARE we
7020 : : want, create it from scratch. */
7021 : 5434513 : else if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode
7022 : 5308643 : || XEXP (src, 0) != op0 || XEXP (src, 1) != op1)
7023 : : {
7024 : 1431042 : SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
7025 : 1431042 : src = SET_SRC (x);
7026 : : }
7027 : : }
7028 : : else
7029 : : {
7030 : : /* Get SET_SRC in a form where we have placed back any
7031 : : compound expressions. Then do the checks below. */
7032 : 40582166 : src = make_compound_operation (src, SET);
7033 : 40582166 : SUBST (SET_SRC (x), src);
7034 : : }
7035 : :
7036 : : /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
7037 : : and X being a REG or (subreg (reg)), we may be able to convert this to
7038 : : (set (subreg:m2 x) (op)).
7039 : :
7040 : : We can always do this if M1 is narrower than M2 because that means that
7041 : : we only care about the low bits of the result.
7042 : :
7043 : : However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
7044 : : perform a narrower operation than requested since the high-order bits will
7045 : : be undefined. On machine where it is defined, this transformation is safe
7046 : : as long as M1 and M2 have the same number of words. */
7047 : :
7048 : 402918 : if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
7049 : 387082 : && !OBJECT_P (SUBREG_REG (src))
7050 : : && (known_equal_after_align_up
7051 : 243822 : (GET_MODE_SIZE (GET_MODE (src)),
7052 : 487644 : GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))),
7053 : 243822 : UNITS_PER_WORD))
7054 : 215547 : && (WORD_REGISTER_OPERATIONS || !paradoxical_subreg_p (src))
7055 : 211629 : && ! (REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER
7056 : 221 : && !REG_CAN_CHANGE_MODE_P (REGNO (dest),
7057 : : GET_MODE (SUBREG_REG (src)),
7058 : : GET_MODE (src)))
7059 : 46873648 : && (REG_P (dest)
7060 : 98306 : || (GET_CODE (dest) == SUBREG
7061 : 272 : && REG_P (SUBREG_REG (dest)))))
7062 : : {
7063 : 113374 : SUBST (SET_DEST (x),
7064 : : gen_lowpart (GET_MODE (SUBREG_REG (src)),
7065 : : dest));
7066 : 113374 : SUBST (SET_SRC (x), SUBREG_REG (src));
7067 : :
7068 : 113374 : src = SET_SRC (x), dest = SET_DEST (x);
7069 : : }
7070 : :
7071 : : /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
7072 : : would require a paradoxical subreg. Replace the subreg with a
7073 : : zero_extend to avoid the reload that would otherwise be required.
7074 : : Don't do this unless we have a scalar integer mode, otherwise the
7075 : : transformation is incorrect. */
7076 : :
7077 : 46662240 : enum rtx_code extend_op;
7078 : 46662240 : if (paradoxical_subreg_p (src)
7079 : : && MEM_P (SUBREG_REG (src))
7080 : : && SCALAR_INT_MODE_P (GET_MODE (src))
7081 : : && (extend_op = load_extend_op (GET_MODE (SUBREG_REG (src)))) != UNKNOWN)
7082 : : {
7083 : : SUBST (SET_SRC (x),
7084 : : gen_rtx_fmt_e (extend_op, GET_MODE (src), SUBREG_REG (src)));
7085 : :
7086 : : src = SET_SRC (x);
7087 : : }
7088 : :
7089 : : /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
7090 : : are comparing an item known to be 0 or -1 against 0, use a logical
7091 : : operation instead. Check for one of the arms being an IOR of the other
7092 : : arm with some value. We compute three terms to be IOR'ed together. In
7093 : : practice, at most two will be nonzero. Then we do the IOR's. */
7094 : :
7095 : 46662240 : if (GET_CODE (dest) != PC
7096 : 35684401 : && GET_CODE (src) == IF_THEN_ELSE
7097 : 1165095 : && is_int_mode (GET_MODE (src), &int_mode)
7098 : 1070259 : && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
7099 : 461667 : && XEXP (XEXP (src, 0), 1) == const0_rtx
7100 : 316652 : && int_mode == GET_MODE (XEXP (XEXP (src, 0), 0))
7101 : 98561 : && (!HAVE_conditional_move
7102 : 98561 : || ! can_conditionally_move_p (int_mode))
7103 : 0 : && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0), int_mode)
7104 : 0 : == GET_MODE_PRECISION (int_mode))
7105 : 46662240 : && ! side_effects_p (src))
7106 : : {
7107 : 0 : rtx true_rtx = (GET_CODE (XEXP (src, 0)) == NE
7108 : 0 : ? XEXP (src, 1) : XEXP (src, 2));
7109 : 0 : rtx false_rtx = (GET_CODE (XEXP (src, 0)) == NE
7110 : 0 : ? XEXP (src, 2) : XEXP (src, 1));
7111 : 0 : rtx term1 = const0_rtx, term2, term3;
7112 : :
7113 : 0 : if (GET_CODE (true_rtx) == IOR
7114 : 0 : && rtx_equal_p (XEXP (true_rtx, 0), false_rtx))
7115 : 0 : term1 = false_rtx, true_rtx = XEXP (true_rtx, 1), false_rtx = const0_rtx;
7116 : 0 : else if (GET_CODE (true_rtx) == IOR
7117 : 0 : && rtx_equal_p (XEXP (true_rtx, 1), false_rtx))
7118 : 0 : term1 = false_rtx, true_rtx = XEXP (true_rtx, 0), false_rtx = const0_rtx;
7119 : 0 : else if (GET_CODE (false_rtx) == IOR
7120 : 0 : && rtx_equal_p (XEXP (false_rtx, 0), true_rtx))
7121 : 0 : term1 = true_rtx, false_rtx = XEXP (false_rtx, 1), true_rtx = const0_rtx;
7122 : 0 : else if (GET_CODE (false_rtx) == IOR
7123 : 0 : && rtx_equal_p (XEXP (false_rtx, 1), true_rtx))
7124 : 0 : term1 = true_rtx, false_rtx = XEXP (false_rtx, 0), true_rtx = const0_rtx;
7125 : :
7126 : 0 : term2 = simplify_gen_binary (AND, int_mode,
7127 : 0 : XEXP (XEXP (src, 0), 0), true_rtx);
7128 : 0 : term3 = simplify_gen_binary (AND, int_mode,
7129 : : simplify_gen_unary (NOT, int_mode,
7130 : 0 : XEXP (XEXP (src, 0), 0),
7131 : : int_mode),
7132 : : false_rtx);
7133 : :
7134 : 0 : SUBST (SET_SRC (x),
7135 : : simplify_gen_binary (IOR, int_mode,
7136 : : simplify_gen_binary (IOR, int_mode,
7137 : : term1, term2),
7138 : : term3));
7139 : :
7140 : 0 : src = SET_SRC (x);
7141 : : }
7142 : :
7143 : : /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
7144 : : whole thing fail. */
7145 : 46662240 : if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
7146 : : return src;
7147 : 46662217 : else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
7148 : : return dest;
7149 : : else
7150 : : /* Convert this into a field assignment operation, if possible. */
7151 : 46662217 : return make_field_assignment (x);
7152 : : }
7153 : :
7154 : : /* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
7155 : : result. */
7156 : :
7157 : : static rtx
7158 : 11181809 : simplify_logical (rtx x)
7159 : : {
7160 : 11181809 : rtx op0 = XEXP (x, 0);
7161 : 11181809 : rtx op1 = XEXP (x, 1);
7162 : 11181809 : scalar_int_mode mode;
7163 : :
7164 : 11181809 : switch (GET_CODE (x))
7165 : : {
7166 : 6885140 : case AND:
7167 : : /* We can call simplify_and_const_int only if we don't lose
7168 : : any (sign) bits when converting INTVAL (op1) to
7169 : : "unsigned HOST_WIDE_INT". */
7170 : 6885140 : if (is_a <scalar_int_mode> (GET_MODE (x), &mode)
7171 : 6383475 : && CONST_INT_P (op1)
7172 : 4984606 : && (HWI_COMPUTABLE_MODE_P (mode)
7173 : 8738 : || INTVAL (op1) > 0))
7174 : : {
7175 : 4981355 : x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
7176 : 4981355 : if (GET_CODE (x) != AND)
7177 : : return x;
7178 : :
7179 : 4950309 : op0 = XEXP (x, 0);
7180 : 4950309 : op1 = XEXP (x, 1);
7181 : : }
7182 : :
7183 : : /* If we have any of (and (ior A B) C) or (and (xor A B) C),
7184 : : apply the distributive law and then the inverse distributive
7185 : : law to see if things simplify. */
7186 : 6854094 : if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
7187 : : {
7188 : 122579 : rtx result = distribute_and_simplify_rtx (x, 0);
7189 : 122579 : if (result)
7190 : : return result;
7191 : : }
7192 : 6841047 : if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
7193 : : {
7194 : 1809 : rtx result = distribute_and_simplify_rtx (x, 1);
7195 : 1809 : if (result)
7196 : : return result;
7197 : : }
7198 : : break;
7199 : :
7200 : 4296669 : case IOR:
7201 : : /* If we have (ior (and A B) C), apply the distributive law and then
7202 : : the inverse distributive law to see if things simplify. */
7203 : :
7204 : 4296669 : if (GET_CODE (op0) == AND)
7205 : : {
7206 : 1212860 : rtx result = distribute_and_simplify_rtx (x, 0);
7207 : 1212860 : if (result)
7208 : : return result;
7209 : : }
7210 : :
7211 : 4293997 : if (GET_CODE (op1) == AND)
7212 : : {
7213 : 65186 : rtx result = distribute_and_simplify_rtx (x, 1);
7214 : 65186 : if (result)
7215 : : return result;
7216 : : }
7217 : : break;
7218 : :
7219 : 0 : default:
7220 : 0 : gcc_unreachable ();
7221 : : }
7222 : :
7223 : : return x;
7224 : : }
7225 : :
7226 : : /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
7227 : : operations" because they can be replaced with two more basic operations.
7228 : : ZERO_EXTEND is also considered "compound" because it can be replaced with
7229 : : an AND operation, which is simpler, though only one operation.
7230 : :
7231 : : The function expand_compound_operation is called with an rtx expression
7232 : : and will convert it to the appropriate shifts and AND operations,
7233 : : simplifying at each stage.
7234 : :
7235 : : The function make_compound_operation is called to convert an expression
7236 : : consisting of shifts and ANDs into the equivalent compound expression.
7237 : : It is the inverse of this function, loosely speaking. */
7238 : :
7239 : : static rtx
7240 : 15615172 : expand_compound_operation (rtx x)
7241 : : {
7242 : 15615172 : unsigned HOST_WIDE_INT pos = 0, len;
7243 : 15615172 : bool unsignedp = false;
7244 : 15615172 : unsigned int modewidth;
7245 : 15615172 : rtx tem;
7246 : 15615172 : scalar_int_mode inner_mode;
7247 : :
7248 : 15615172 : switch (GET_CODE (x))
7249 : : {
7250 : 4440120 : case ZERO_EXTEND:
7251 : 4440120 : unsignedp = true;
7252 : : /* FALLTHRU */
7253 : 5785940 : case SIGN_EXTEND:
7254 : : /* We can't necessarily use a const_int for a multiword mode;
7255 : : it depends on implicitly extending the value.
7256 : : Since we don't know the right way to extend it,
7257 : : we can't tell whether the implicit way is right.
7258 : :
7259 : : Even for a mode that is no wider than a const_int,
7260 : : we can't win, because we need to sign extend one of its bits through
7261 : : the rest of it, and we don't know which bit. */
7262 : 5785940 : if (CONST_INT_P (XEXP (x, 0)))
7263 : : return x;
7264 : :
7265 : : /* Reject modes that aren't scalar integers because turning vector
7266 : : or complex modes into shifts causes problems. */
7267 : 5785940 : if (!is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &inner_mode))
7268 : : return x;
7269 : :
7270 : : /* Return if (subreg:MODE FROM 0) is not a safe replacement for
7271 : : (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
7272 : : because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
7273 : : reloaded. If not for that, MEM's would very rarely be safe.
7274 : :
7275 : : Reject modes bigger than a word, because we might not be able
7276 : : to reference a two-register group starting with an arbitrary register
7277 : : (and currently gen_lowpart might crash for a SUBREG). */
7278 : :
7279 : 11689613 : if (GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD)
7280 : : return x;
7281 : :
7282 : 5440787 : len = GET_MODE_PRECISION (inner_mode);
7283 : : /* If the inner object has VOIDmode (the only way this can happen
7284 : : is if it is an ASM_OPERANDS), we can't do anything since we don't
7285 : : know how much masking to do. */
7286 : 5440787 : if (len == 0)
7287 : : return x;
7288 : :
7289 : : break;
7290 : :
7291 : 949480 : case ZERO_EXTRACT:
7292 : 949480 : unsignedp = true;
7293 : :
7294 : : /* fall through */
7295 : :
7296 : 972779 : case SIGN_EXTRACT:
7297 : : /* If the operand is a CLOBBER, just return it. */
7298 : 972779 : if (GET_CODE (XEXP (x, 0)) == CLOBBER)
7299 : : return XEXP (x, 0);
7300 : :
7301 : 972779 : if (!CONST_INT_P (XEXP (x, 1))
7302 : 972644 : || !CONST_INT_P (XEXP (x, 2)))
7303 : : return x;
7304 : :
7305 : : /* Reject modes that aren't scalar integers because turning vector
7306 : : or complex modes into shifts causes problems. */
7307 : 13092295 : if (!is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &inner_mode))
7308 : : return x;
7309 : :
7310 : 895295 : len = INTVAL (XEXP (x, 1));
7311 : 895295 : pos = INTVAL (XEXP (x, 2));
7312 : :
7313 : : /* This should stay within the object being extracted, fail otherwise. */
7314 : 895295 : if (len + pos > GET_MODE_PRECISION (inner_mode))
7315 : : return x;
7316 : :
7317 : : if (BITS_BIG_ENDIAN)
7318 : : pos = GET_MODE_PRECISION (inner_mode) - len - pos;
7319 : :
7320 : : break;
7321 : :
7322 : : default:
7323 : : return x;
7324 : : }
7325 : :
7326 : : /* We've rejected non-scalar operations by now. */
7327 : 6336033 : scalar_int_mode mode = as_a <scalar_int_mode> (GET_MODE (x));
7328 : :
7329 : : /* Convert sign extension to zero extension, if we know that the high
7330 : : bit is not set, as this is easier to optimize. It will be converted
7331 : : back to cheaper alternative in make_extraction. */
7332 : 6336033 : if (GET_CODE (x) == SIGN_EXTEND
7333 : 1192467 : && HWI_COMPUTABLE_MODE_P (mode)
7334 : 7413465 : && ((nonzero_bits (XEXP (x, 0), inner_mode)
7335 : 1077432 : & ~(((unsigned HOST_WIDE_INT) GET_MODE_MASK (inner_mode)) >> 1))
7336 : : == 0))
7337 : : {
7338 : 580 : rtx temp = gen_rtx_ZERO_EXTEND (mode, XEXP (x, 0));
7339 : 580 : rtx temp2 = expand_compound_operation (temp);
7340 : :
7341 : : /* Make sure this is a profitable operation. */
7342 : 580 : if (set_src_cost (x, mode, optimize_this_for_speed_p)
7343 : 580 : > set_src_cost (temp2, mode, optimize_this_for_speed_p))
7344 : : return temp2;
7345 : 566 : else if (set_src_cost (x, mode, optimize_this_for_speed_p)
7346 : 566 : > set_src_cost (temp, mode, optimize_this_for_speed_p))
7347 : : return temp;
7348 : : else
7349 : : return x;
7350 : : }
7351 : :
7352 : : /* We can optimize some special cases of ZERO_EXTEND. */
7353 : 6335453 : if (GET_CODE (x) == ZERO_EXTEND)
7354 : : {
7355 : : /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
7356 : : know that the last value didn't have any inappropriate bits
7357 : : set. */
7358 : 4248320 : if (GET_CODE (XEXP (x, 0)) == TRUNCATE
7359 : 188 : && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode
7360 : 188 : && HWI_COMPUTABLE_MODE_P (mode)
7361 : 4248508 : && (nonzero_bits (XEXP (XEXP (x, 0), 0), mode)
7362 : 188 : & ~GET_MODE_MASK (inner_mode)) == 0)
7363 : 36 : return XEXP (XEXP (x, 0), 0);
7364 : :
7365 : : /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
7366 : 4248284 : if (GET_CODE (XEXP (x, 0)) == SUBREG
7367 : 636601 : && GET_MODE (SUBREG_REG (XEXP (x, 0))) == mode
7368 : 602311 : && subreg_lowpart_p (XEXP (x, 0))
7369 : 257763 : && HWI_COMPUTABLE_MODE_P (mode)
7370 : 4483440 : && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), mode)
7371 : 235156 : & ~GET_MODE_MASK (inner_mode)) == 0)
7372 : 88 : return SUBREG_REG (XEXP (x, 0));
7373 : :
7374 : : /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
7375 : : is a comparison and STORE_FLAG_VALUE permits. This is like
7376 : : the first case, but it works even when MODE is larger
7377 : : than HOST_WIDE_INT. */
7378 : 4248196 : if (GET_CODE (XEXP (x, 0)) == TRUNCATE
7379 : 152 : && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode
7380 : 152 : && COMPARISON_P (XEXP (XEXP (x, 0), 0))
7381 : 0 : && GET_MODE_PRECISION (inner_mode) <= HOST_BITS_PER_WIDE_INT
7382 : 4248196 : && (STORE_FLAG_VALUE & ~GET_MODE_MASK (inner_mode)) == 0)
7383 : : return XEXP (XEXP (x, 0), 0);
7384 : :
7385 : : /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
7386 : 4248196 : if (GET_CODE (XEXP (x, 0)) == SUBREG
7387 : 636513 : && GET_MODE (SUBREG_REG (XEXP (x, 0))) == mode
7388 : 602223 : && subreg_lowpart_p (XEXP (x, 0))
7389 : 257675 : && COMPARISON_P (SUBREG_REG (XEXP (x, 0)))
7390 : 0 : && GET_MODE_PRECISION (inner_mode) <= HOST_BITS_PER_WIDE_INT
7391 : 4248196 : && (STORE_FLAG_VALUE & ~GET_MODE_MASK (inner_mode)) == 0)
7392 : : return SUBREG_REG (XEXP (x, 0));
7393 : :
7394 : : }
7395 : :
7396 : : /* If we reach here, we want to return a pair of shifts. The inner
7397 : : shift is a left shift of BITSIZE - POS - LEN bits. The outer
7398 : : shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
7399 : : logical depending on the value of UNSIGNEDP.
7400 : :
7401 : : If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
7402 : : converted into an AND of a shift.
7403 : :
7404 : : We must check for the case where the left shift would have a negative
7405 : : count. This can happen in a case like (x >> 31) & 255 on machines
7406 : : that can't shift by a constant. On those machines, we would first
7407 : : combine the shift with the AND to produce a variable-position
7408 : : extraction. Then the constant of 31 would be substituted in
7409 : : to produce such a position. */
7410 : :
7411 : 6335329 : modewidth = GET_MODE_PRECISION (mode);
7412 : 6335329 : if (modewidth >= pos + len)
7413 : : {
7414 : 6335328 : tem = gen_lowpart (mode, XEXP (x, 0));
7415 : 6335328 : if (!tem || GET_CODE (tem) == CLOBBER)
7416 : : return x;
7417 : 6835058 : tem = simplify_shift_const (NULL_RTX, ASHIFT, mode,
7418 : 3417529 : tem, modewidth - pos - len);
7419 : 3417529 : tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
7420 : 3417529 : mode, tem, modewidth - len);
7421 : : }
7422 : 1 : else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
7423 : : {
7424 : 0 : tem = simplify_shift_const (NULL_RTX, LSHIFTRT, inner_mode,
7425 : : XEXP (x, 0), pos);
7426 : 0 : tem = gen_lowpart (mode, tem);
7427 : 0 : if (!tem || GET_CODE (tem) == CLOBBER)
7428 : : return x;
7429 : 0 : tem = simplify_and_const_int (NULL_RTX, mode, tem,
7430 : 0 : (HOST_WIDE_INT_1U << len) - 1);
7431 : : }
7432 : : else
7433 : : /* Any other cases we can't handle. */
7434 : : return x;
7435 : :
7436 : : /* If we couldn't do this for some reason, return the original
7437 : : expression. */
7438 : 3417529 : if (GET_CODE (tem) == CLOBBER)
7439 : : return x;
7440 : :
7441 : : return tem;
7442 : : }
7443 : :
7444 : : /* X is a SET which contains an assignment of one object into
7445 : : a part of another (such as a bit-field assignment, STRICT_LOW_PART,
7446 : : or certain SUBREGS). If possible, convert it into a series of
7447 : : logical operations.
7448 : :
7449 : : We half-heartedly support variable positions, but do not at all
7450 : : support variable lengths. */
7451 : :
7452 : : static const_rtx
7453 : 83527739 : expand_field_assignment (const_rtx x)
7454 : : {
7455 : 83527739 : rtx inner;
7456 : 83527739 : rtx pos; /* Always counts from low bit. */
7457 : 83527739 : int len, inner_len;
7458 : 83527739 : rtx mask, cleared, masked;
7459 : 83527739 : scalar_int_mode compute_mode;
7460 : :
7461 : : /* Loop until we find something we can't simplify. */
7462 : 83786103 : while (1)
7463 : : {
7464 : 83786103 : if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
7465 : 12865 : && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
7466 : : {
7467 : 12865 : rtx x0 = XEXP (SET_DEST (x), 0);
7468 : 12865 : if (!GET_MODE_PRECISION (GET_MODE (x0)).is_constant (&len))
7469 : : break;
7470 : 12865 : inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
7471 : 12865 : pos = gen_int_mode (subreg_lsb (XEXP (SET_DEST (x), 0)),
7472 : : MAX_MODE_INT);
7473 : 12865 : }
7474 : 83773238 : else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
7475 : 4206 : && CONST_INT_P (XEXP (SET_DEST (x), 1)))
7476 : : {
7477 : 4206 : inner = XEXP (SET_DEST (x), 0);
7478 : 4206 : if (!GET_MODE_PRECISION (GET_MODE (inner)).is_constant (&inner_len))
7479 : : break;
7480 : :
7481 : 4206 : len = INTVAL (XEXP (SET_DEST (x), 1));
7482 : 4206 : pos = XEXP (SET_DEST (x), 2);
7483 : :
7484 : : /* A constant position should stay within the width of INNER. */
7485 : 4206 : if (CONST_INT_P (pos) && INTVAL (pos) + len > inner_len)
7486 : : break;
7487 : :
7488 : : if (BITS_BIG_ENDIAN)
7489 : : {
7490 : : if (CONST_INT_P (pos))
7491 : : pos = GEN_INT (inner_len - len - INTVAL (pos));
7492 : : else if (GET_CODE (pos) == MINUS
7493 : : && CONST_INT_P (XEXP (pos, 1))
7494 : : && INTVAL (XEXP (pos, 1)) == inner_len - len)
7495 : : /* If position is ADJUST - X, new position is X. */
7496 : : pos = XEXP (pos, 0);
7497 : : else
7498 : : pos = simplify_gen_binary (MINUS, GET_MODE (pos),
7499 : : gen_int_mode (inner_len - len,
7500 : : GET_MODE (pos)),
7501 : : pos);
7502 : : }
7503 : : }
7504 : :
7505 : : /* If the destination is a subreg that overwrites the whole of the inner
7506 : : register, we can move the subreg to the source. */
7507 : 84016389 : else if (GET_CODE (SET_DEST (x)) == SUBREG
7508 : : /* We need SUBREGs to compute nonzero_bits properly. */
7509 : 843574 : && nonzero_sign_valid
7510 : 84528228 : && !read_modify_subreg_p (SET_DEST (x)))
7511 : : {
7512 : 247357 : x = gen_rtx_SET (SUBREG_REG (SET_DEST (x)),
7513 : : gen_lowpart
7514 : : (GET_MODE (SUBREG_REG (SET_DEST (x))),
7515 : : SET_SRC (x)));
7516 : 247357 : continue;
7517 : : }
7518 : : else
7519 : : break;
7520 : :
7521 : 18962 : while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
7522 : 1891 : inner = SUBREG_REG (inner);
7523 : :
7524 : : /* Don't attempt bitwise arithmetic on non scalar integer modes. */
7525 : 17071 : if (!is_a <scalar_int_mode> (GET_MODE (inner), &compute_mode))
7526 : : {
7527 : : /* Don't do anything for vector or complex integral types. */
7528 : 4115 : if (! FLOAT_MODE_P (GET_MODE (inner)))
7529 : : break;
7530 : :
7531 : : /* Try to find an integral mode to pun with. */
7532 : 38 : if (!int_mode_for_size (GET_MODE_BITSIZE (GET_MODE (inner)), 0)
7533 : 0 : .exists (&compute_mode))
7534 : : break;
7535 : :
7536 : 19 : inner = gen_lowpart (compute_mode, inner);
7537 : : }
7538 : :
7539 : : /* Compute a mask of LEN bits, if we can do this on the host machine. */
7540 : 12975 : if (len >= HOST_BITS_PER_WIDE_INT)
7541 : : break;
7542 : :
7543 : : /* Don't try to compute in too wide unsupported modes. */
7544 : 12975 : if (!targetm.scalar_mode_supported_p (compute_mode))
7545 : : break;
7546 : :
7547 : : /* gen_lowpart_for_combine returns CLOBBER on failure. */
7548 : 12975 : rtx lowpart = gen_lowpart (compute_mode, SET_SRC (x));
7549 : 12975 : if (GET_CODE (lowpart) == CLOBBER)
7550 : : break;
7551 : :
7552 : : /* Now compute the equivalent expression. Make a copy of INNER
7553 : : for the SET_DEST in case it is a MEM into which we will substitute;
7554 : : we don't want shared RTL in that case. */
7555 : 11007 : mask = gen_int_mode ((HOST_WIDE_INT_1U << len) - 1,
7556 : : compute_mode);
7557 : 11007 : cleared = simplify_gen_binary (AND, compute_mode,
7558 : : simplify_gen_unary (NOT, compute_mode,
7559 : : simplify_gen_binary (ASHIFT,
7560 : : compute_mode,
7561 : : mask, pos),
7562 : : compute_mode),
7563 : : inner);
7564 : 11007 : masked = simplify_gen_binary (ASHIFT, compute_mode,
7565 : : simplify_gen_binary (
7566 : : AND, compute_mode, lowpart, mask),
7567 : : pos);
7568 : :
7569 : 11007 : x = gen_rtx_SET (copy_rtx (inner),
7570 : : simplify_gen_binary (IOR, compute_mode,
7571 : : cleared, masked));
7572 : : }
7573 : :
7574 : 83527739 : return x;
7575 : : }
7576 : :
7577 : : /* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
7578 : : it is an RTX that represents the (variable) starting position; otherwise,
7579 : : POS is the (constant) starting bit position. Both are counted from the LSB.
7580 : :
7581 : : UNSIGNEDP is true for an unsigned reference and zero for a signed one.
7582 : :
7583 : : IN_DEST is true if this is a reference in the destination of a SET.
7584 : : This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If nonzero,
7585 : : a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
7586 : : be used.
7587 : :
7588 : : IN_COMPARE is true if we are in a COMPARE. This means that a
7589 : : ZERO_EXTRACT should be built even for bits starting at bit 0.
7590 : :
7591 : : MODE is the desired mode of the result (if IN_DEST == 0).
7592 : :
7593 : : The result is an RTX for the extraction or NULL_RTX if the target
7594 : : can't handle it. */
7595 : :
7596 : : static rtx
7597 : 5076650 : make_extraction (machine_mode mode, rtx inner, HOST_WIDE_INT pos,
7598 : : rtx pos_rtx, unsigned HOST_WIDE_INT len, bool unsignedp,
7599 : : bool in_dest, bool in_compare)
7600 : : {
7601 : : /* This mode describes the size of the storage area
7602 : : to fetch the overall value from. Within that, we
7603 : : ignore the POS lowest bits, etc. */
7604 : 5076650 : machine_mode is_mode = GET_MODE (inner);
7605 : 5076650 : machine_mode inner_mode;
7606 : 5076650 : scalar_int_mode wanted_inner_mode;
7607 : 5076650 : scalar_int_mode wanted_inner_reg_mode = word_mode;
7608 : 5076650 : scalar_int_mode pos_mode = word_mode;
7609 : 5076650 : machine_mode extraction_mode = word_mode;
7610 : 5076650 : rtx new_rtx = 0;
7611 : 5076650 : rtx orig_pos_rtx = pos_rtx;
7612 : 5076650 : HOST_WIDE_INT orig_pos;
7613 : :
7614 : 5076650 : if (pos_rtx && CONST_INT_P (pos_rtx))
7615 : 931136 : pos = INTVAL (pos_rtx), pos_rtx = 0;
7616 : :
7617 : 5076650 : if (GET_CODE (inner) == SUBREG
7618 : 2606882 : && subreg_lowpart_p (inner)
7619 : 7679619 : && (paradoxical_subreg_p (inner)
7620 : : /* If trying or potentially trying to extract
7621 : : bits outside of is_mode, don't look through
7622 : : non-paradoxical SUBREGs. See PR82192. */
7623 : 151837 : || (pos_rtx == NULL_RTX
7624 : 151782 : && known_le (pos + len, GET_MODE_PRECISION (is_mode)))))
7625 : : {
7626 : : /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
7627 : : consider just the QI as the memory to extract from.
7628 : : The subreg adds or removes high bits; its mode is
7629 : : irrelevant to the meaning of this extraction,
7630 : : since POS and LEN count from the lsb. */
7631 : 2602914 : if (MEM_P (SUBREG_REG (inner)))
7632 : 533762 : is_mode = GET_MODE (SUBREG_REG (inner));
7633 : : inner = SUBREG_REG (inner);
7634 : : }
7635 : 2473736 : else if (GET_CODE (inner) == ASHIFT
7636 : 133718 : && CONST_INT_P (XEXP (inner, 1))
7637 : 132509 : && pos_rtx == 0 && pos == 0
7638 : 132477 : && len > UINTVAL (XEXP (inner, 1)))
7639 : : {
7640 : : /* We're extracting the least significant bits of an rtx
7641 : : (ashift X (const_int C)), where LEN > C. Extract the
7642 : : least significant (LEN - C) bits of X, giving an rtx
7643 : : whose mode is MODE, then shift it left C times. */
7644 : 132477 : new_rtx = make_extraction (mode, XEXP (inner, 0),
7645 : : 0, 0, len - INTVAL (XEXP (inner, 1)),
7646 : : unsignedp, in_dest, in_compare);
7647 : 132477 : if (new_rtx != 0)
7648 : 130831 : return gen_rtx_ASHIFT (mode, new_rtx, XEXP (inner, 1));
7649 : : }
7650 : 2341259 : else if (GET_CODE (inner) == MULT
7651 : 172297 : && CONST_INT_P (XEXP (inner, 1))
7652 : 131001 : && pos_rtx == 0 && pos == 0)
7653 : : {
7654 : : /* We're extracting the least significant bits of an rtx
7655 : : (mult X (const_int 2^C)), where LEN > C. Extract the
7656 : : least significant (LEN - C) bits of X, giving an rtx
7657 : : whose mode is MODE, then multiply it by 2^C. */
7658 : 109330 : const HOST_WIDE_INT shift_amt = exact_log2 (INTVAL (XEXP (inner, 1)));
7659 : 109330 : if (len > 1 && IN_RANGE (shift_amt, 1, len - 1))
7660 : : {
7661 : 104362 : new_rtx = make_extraction (mode, XEXP (inner, 0),
7662 : : 0, 0, len - shift_amt,
7663 : : unsignedp, in_dest, in_compare);
7664 : 104362 : if (new_rtx)
7665 : 104362 : return gen_rtx_MULT (mode, new_rtx, XEXP (inner, 1));
7666 : : }
7667 : : }
7668 : 2231929 : else if (GET_CODE (inner) == TRUNCATE
7669 : : /* If trying or potentially trying to extract
7670 : : bits outside of is_mode, don't look through
7671 : : TRUNCATE. See PR82192. */
7672 : 0 : && pos_rtx == NULL_RTX
7673 : 2231929 : && known_le (pos + len, GET_MODE_PRECISION (is_mode)))
7674 : 0 : inner = XEXP (inner, 0);
7675 : :
7676 : 4841457 : inner_mode = GET_MODE (inner);
7677 : :
7678 : : /* See if this can be done without an extraction. We never can if the
7679 : : width of the field is not the same as that of some integer mode. For
7680 : : registers, we can only avoid the extraction if the position is at the
7681 : : low-order bit and this is either not in the destination or we have the
7682 : : appropriate STRICT_LOW_PART operation available.
7683 : :
7684 : : For MEM, we can avoid an extract if the field starts on an appropriate
7685 : : boundary and we can change the mode of the memory reference. */
7686 : :
7687 : 4841457 : scalar_int_mode tmode;
7688 : 4841457 : if (int_mode_for_size (len, 1).exists (&tmode)
7689 : 2347959 : && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
7690 : 2071996 : && !MEM_P (inner)
7691 : 1691012 : && (pos == 0 || REG_P (inner))
7692 : 1691012 : && (inner_mode == tmode
7693 : 246450 : || !REG_P (inner)
7694 : 222764 : || TRULY_NOOP_TRUNCATION_MODES_P (tmode, inner_mode)
7695 : 0 : || reg_truncated_to_mode (tmode, inner))
7696 : 1691012 : && (! in_dest
7697 : 23 : || (REG_P (inner)
7698 : 23 : && have_insn_for (STRICT_LOW_PART, tmode))))
7699 : 529273 : || (MEM_P (inner) && pos_rtx == 0
7700 : 382192 : && (pos
7701 : : % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
7702 : : : BITS_PER_UNIT)) == 0
7703 : : /* We can't do this if we are widening INNER_MODE (it
7704 : : may not be aligned, for one thing). */
7705 : 381325 : && !paradoxical_subreg_p (tmode, inner_mode)
7706 : 381325 : && known_le (pos + len, GET_MODE_PRECISION (is_mode))
7707 : 381325 : && (inner_mode == tmode
7708 : 1121 : || (! mode_dependent_address_p (XEXP (inner, 0),
7709 : 1121 : MEM_ADDR_SPACE (inner))
7710 : 1121 : && ! MEM_VOLATILE_P (inner))))))
7711 : : {
7712 : : /* If INNER is a MEM, make a new MEM that encompasses just the desired
7713 : : field. If the original and current mode are the same, we need not
7714 : : adjust the offset. Otherwise, we do if bytes big endian.
7715 : :
7716 : : If INNER is not a MEM, get a piece consisting of just the field
7717 : : of interest (in this case POS % BITS_PER_WORD must be 0). */
7718 : :
7719 : 2072314 : if (MEM_P (inner))
7720 : : {
7721 : 381312 : poly_int64 offset;
7722 : :
7723 : : /* POS counts from lsb, but make OFFSET count in memory order. */
7724 : 381312 : if (BYTES_BIG_ENDIAN)
7725 : : offset = bits_to_bytes_round_down (GET_MODE_PRECISION (is_mode)
7726 : : - len - pos);
7727 : : else
7728 : 381312 : offset = pos / BITS_PER_UNIT;
7729 : :
7730 : 381312 : new_rtx = adjust_address_nv (inner, tmode, offset);
7731 : : }
7732 : 1691002 : else if (REG_P (inner))
7733 : : {
7734 : 1120073 : if (tmode != inner_mode)
7735 : : {
7736 : : /* We can't call gen_lowpart in a DEST since we
7737 : : always want a SUBREG (see below) and it would sometimes
7738 : : return a new hard register. */
7739 : 222754 : if (pos || in_dest)
7740 : : {
7741 : 16 : poly_uint64 offset
7742 : 16 : = subreg_offset_from_lsb (tmode, inner_mode, pos);
7743 : :
7744 : : /* Avoid creating invalid subregs, for example when
7745 : : simplifying (x>>32)&255. */
7746 : 16 : if (!validate_subreg (tmode, inner_mode, inner, offset))
7747 : 0 : return NULL_RTX;
7748 : :
7749 : 16 : new_rtx = gen_rtx_SUBREG (tmode, inner, offset);
7750 : 16 : }
7751 : : else
7752 : 222738 : new_rtx = gen_lowpart (tmode, inner);
7753 : : }
7754 : : else
7755 : : new_rtx = inner;
7756 : : }
7757 : : else
7758 : 1141858 : new_rtx = force_to_mode (inner, tmode,
7759 : : len >= HOST_BITS_PER_WIDE_INT
7760 : : ? HOST_WIDE_INT_M1U
7761 : 570929 : : (HOST_WIDE_INT_1U << len) - 1, false);
7762 : :
7763 : : /* If this extraction is going into the destination of a SET,
7764 : : make a STRICT_LOW_PART unless we made a MEM. */
7765 : :
7766 : 2072314 : if (in_dest)
7767 : 49 : return (MEM_P (new_rtx) ? new_rtx
7768 : : : (GET_CODE (new_rtx) != SUBREG
7769 : 13 : ? gen_rtx_CLOBBER (tmode, const0_rtx)
7770 : 13 : : gen_rtx_STRICT_LOW_PART (VOIDmode, new_rtx)));
7771 : :
7772 : 2072265 : if (mode == tmode)
7773 : : return new_rtx;
7774 : :
7775 : 2072236 : if (CONST_SCALAR_INT_P (new_rtx))
7776 : 5 : return simplify_unary_operation (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
7777 : 5 : mode, new_rtx, tmode);
7778 : :
7779 : : /* If we know that no extraneous bits are set, and that the high
7780 : : bit is not set, convert the extraction to the cheaper of
7781 : : sign and zero extension, that are equivalent in these cases. */
7782 : 2072231 : if (flag_expensive_optimizations
7783 : 2072231 : && (HWI_COMPUTABLE_MODE_P (tmode)
7784 : 1925601 : && ((nonzero_bits (new_rtx, tmode)
7785 : 1925601 : & ~(((unsigned HOST_WIDE_INT)GET_MODE_MASK (tmode)) >> 1))
7786 : : == 0)))
7787 : : {
7788 : 8356 : rtx temp = gen_rtx_ZERO_EXTEND (mode, new_rtx);
7789 : 8356 : rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new_rtx);
7790 : :
7791 : : /* Prefer ZERO_EXTENSION, since it gives more information to
7792 : : backends. */
7793 : 8356 : if (set_src_cost (temp, mode, optimize_this_for_speed_p)
7794 : 8356 : <= set_src_cost (temp1, mode, optimize_this_for_speed_p))
7795 : : return temp;
7796 : 0 : return temp1;
7797 : : }
7798 : :
7799 : : /* Otherwise, sign- or zero-extend unless we already are in the
7800 : : proper mode. */
7801 : :
7802 : 2063875 : return (gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
7803 : 2063875 : mode, new_rtx));
7804 : : }
7805 : :
7806 : : /* Unless this is a COMPARE or we have a funny memory reference,
7807 : : don't do anything with zero-extending field extracts starting at
7808 : : the low-order bit since they are simple AND operations. */
7809 : 2769143 : if (pos_rtx == 0 && pos == 0 && ! in_dest
7810 : 1704282 : && ! in_compare && unsignedp)
7811 : : return 0;
7812 : :
7813 : : /* Unless INNER is not MEM, reject this if we would be spanning bytes or
7814 : : if the position is not a constant and the length is not 1. In all
7815 : : other cases, we would only be going outside our object in cases when
7816 : : an original shift would have been undefined. */
7817 : 1455260 : if (MEM_P (inner)
7818 : 1455260 : && ((pos_rtx == 0 && maybe_gt (pos + len, GET_MODE_PRECISION (is_mode)))
7819 : 3031 : || (pos_rtx != 0 && len != 1)))
7820 : : return 0;
7821 : :
7822 : 1562775 : enum extraction_pattern pattern = (in_dest ? EP_insv
7823 : 1448044 : : unsignedp ? EP_extzv : EP_extv);
7824 : :
7825 : : /* If INNER is not from memory, we want it to have the mode of a register
7826 : : extraction pattern's structure operand, or word_mode if there is no
7827 : : such pattern. The same applies to extraction_mode and pos_mode
7828 : : and their respective operands.
7829 : :
7830 : : For memory, assume that the desired extraction_mode and pos_mode
7831 : : are the same as for a register operation, since at present we don't
7832 : : have named patterns for aligned memory structures. */
7833 : 1455222 : class extraction_insn insn;
7834 : 1455222 : unsigned int inner_size;
7835 : 2910444 : if (GET_MODE_BITSIZE (inner_mode).is_constant (&inner_size)
7836 : 1455222 : && get_best_reg_extraction_insn (&insn, pattern, inner_size, mode))
7837 : : {
7838 : 1347066 : wanted_inner_reg_mode = insn.struct_mode.require ();
7839 : 1347066 : pos_mode = insn.pos_mode;
7840 : 1347066 : extraction_mode = insn.field_mode;
7841 : : }
7842 : :
7843 : : /* Never narrow an object, since that might not be safe. */
7844 : :
7845 : 1455222 : if (mode != VOIDmode
7846 : 1455222 : && partial_subreg_p (extraction_mode, mode))
7847 : : extraction_mode = mode;
7848 : :
7849 : : /* Punt if len is too large for extraction_mode. */
7850 : 1455222 : if (maybe_gt (len, GET_MODE_PRECISION (extraction_mode)))
7851 : : return NULL_RTX;
7852 : :
7853 : 1455210 : if (!MEM_P (inner))
7854 : 1270936 : wanted_inner_mode = wanted_inner_reg_mode;
7855 : : else
7856 : : {
7857 : : /* Be careful not to go beyond the extracted object and maintain the
7858 : : natural alignment of the memory. */
7859 : 184274 : wanted_inner_mode = smallest_int_mode_for_size (len).require ();
7860 : 371613 : while (pos % GET_MODE_BITSIZE (wanted_inner_mode) + len
7861 : 374678 : > GET_MODE_BITSIZE (wanted_inner_mode))
7862 : 3065 : wanted_inner_mode = GET_MODE_WIDER_MODE (wanted_inner_mode).require ();
7863 : : }
7864 : :
7865 : 1455210 : orig_pos = pos;
7866 : :
7867 : 1455210 : if (BITS_BIG_ENDIAN)
7868 : : {
7869 : : /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
7870 : : BITS_BIG_ENDIAN style. If position is constant, compute new
7871 : : position. Otherwise, build subtraction.
7872 : : Note that POS is relative to the mode of the original argument.
7873 : : If it's a MEM we need to recompute POS relative to that.
7874 : : However, if we're extracting from (or inserting into) a register,
7875 : : we want to recompute POS relative to wanted_inner_mode. */
7876 : : int width;
7877 : : if (!MEM_P (inner))
7878 : : width = GET_MODE_BITSIZE (wanted_inner_mode);
7879 : : else if (!GET_MODE_BITSIZE (is_mode).is_constant (&width))
7880 : : return NULL_RTX;
7881 : :
7882 : : if (pos_rtx == 0)
7883 : : pos = width - len - pos;
7884 : : else
7885 : : pos_rtx
7886 : : = gen_rtx_MINUS (GET_MODE (pos_rtx),
7887 : : gen_int_mode (width - len, GET_MODE (pos_rtx)),
7888 : : pos_rtx);
7889 : : /* POS may be less than 0 now, but we check for that below.
7890 : : Note that it can only be less than 0 if !MEM_P (inner). */
7891 : : }
7892 : :
7893 : : /* If INNER has a wider mode, and this is a constant extraction, try to
7894 : : make it smaller and adjust the byte to point to the byte containing
7895 : : the value. */
7896 : 1455210 : if (wanted_inner_mode != VOIDmode
7897 : 1455210 : && inner_mode != wanted_inner_mode
7898 : 204412 : && ! pos_rtx
7899 : 196360 : && partial_subreg_p (wanted_inner_mode, is_mode)
7900 : 115696 : && MEM_P (inner)
7901 : 29841 : && ! mode_dependent_address_p (XEXP (inner, 0), MEM_ADDR_SPACE (inner))
7902 : 1485051 : && ! MEM_VOLATILE_P (inner))
7903 : : {
7904 : 28231 : poly_int64 offset = 0;
7905 : :
7906 : : /* The computations below will be correct if the machine is big
7907 : : endian in both bits and bytes or little endian in bits and bytes.
7908 : : If it is mixed, we must adjust. */
7909 : :
7910 : : /* If bytes are big endian and we had a paradoxical SUBREG, we must
7911 : : adjust OFFSET to compensate. */
7912 : 28231 : if (BYTES_BIG_ENDIAN
7913 : : && paradoxical_subreg_p (is_mode, inner_mode))
7914 : : offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
7915 : :
7916 : : /* We can now move to the desired byte. */
7917 : 56462 : offset += (pos / GET_MODE_BITSIZE (wanted_inner_mode))
7918 : 28231 : * GET_MODE_SIZE (wanted_inner_mode);
7919 : 28231 : pos %= GET_MODE_BITSIZE (wanted_inner_mode);
7920 : :
7921 : 28231 : if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
7922 : : && is_mode != wanted_inner_mode)
7923 : : offset = (GET_MODE_SIZE (is_mode)
7924 : : - GET_MODE_SIZE (wanted_inner_mode) - offset);
7925 : :
7926 : 28231 : inner = adjust_address_nv (inner, wanted_inner_mode, offset);
7927 : : }
7928 : :
7929 : : /* If INNER is not memory, get it into the proper mode. If we are changing
7930 : : its mode, POS must be a constant and smaller than the size of the new
7931 : : mode. */
7932 : 1426979 : else if (!MEM_P (inner))
7933 : : {
7934 : : /* On the LHS, don't create paradoxical subregs implicitly truncating
7935 : : the register unless TARGET_TRULY_NOOP_TRUNCATION. */
7936 : 1270936 : if (in_dest
7937 : 1270936 : && !TRULY_NOOP_TRUNCATION_MODES_P (GET_MODE (inner),
7938 : : wanted_inner_mode))
7939 : 0 : return NULL_RTX;
7940 : :
7941 : 1270936 : if (GET_MODE (inner) != wanted_inner_mode
7942 : 1270936 : && (pos_rtx != 0
7943 : 333038 : || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
7944 : : return NULL_RTX;
7945 : :
7946 : 1203109 : if (orig_pos < 0)
7947 : : return NULL_RTX;
7948 : :
7949 : 2386149 : inner = force_to_mode (inner, wanted_inner_mode,
7950 : : pos_rtx
7951 : 1183040 : || len + orig_pos >= HOST_BITS_PER_WIDE_INT
7952 : : ? HOST_WIDE_INT_M1U
7953 : 1049742 : : (((HOST_WIDE_INT_1U << len) - 1)
7954 : 1049742 : << orig_pos), false);
7955 : : }
7956 : :
7957 : : /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
7958 : : have to zero extend. Otherwise, we can just use a SUBREG.
7959 : :
7960 : : We dealt with constant rtxes earlier, so pos_rtx cannot
7961 : : have VOIDmode at this point. */
7962 : 1387383 : if (pos_rtx != 0
7963 : 1387383 : && (GET_MODE_SIZE (pos_mode)
7964 : 1410445 : > GET_MODE_SIZE (as_a <scalar_int_mode> (GET_MODE (pos_rtx)))))
7965 : : {
7966 : 74 : rtx temp = simplify_gen_unary (ZERO_EXTEND, pos_mode, pos_rtx,
7967 : : GET_MODE (pos_rtx));
7968 : :
7969 : : /* If we know that no extraneous bits are set, and that the high
7970 : : bit is not set, convert extraction to cheaper one - either
7971 : : SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
7972 : : cases. */
7973 : 74 : if (flag_expensive_optimizations
7974 : 74 : && (HWI_COMPUTABLE_MODE_P (GET_MODE (pos_rtx))
7975 : 74 : && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx))
7976 : 74 : & ~(((unsigned HOST_WIDE_INT)
7977 : 74 : GET_MODE_MASK (GET_MODE (pos_rtx)))
7978 : 74 : >> 1))
7979 : : == 0)))
7980 : : {
7981 : 46 : rtx temp1 = simplify_gen_unary (SIGN_EXTEND, pos_mode, pos_rtx,
7982 : : GET_MODE (pos_rtx));
7983 : :
7984 : : /* Prefer ZERO_EXTENSION, since it gives more information to
7985 : : backends. */
7986 : 46 : if (set_src_cost (temp1, pos_mode, optimize_this_for_speed_p)
7987 : 46 : < set_src_cost (temp, pos_mode, optimize_this_for_speed_p))
7988 : 1387383 : temp = temp1;
7989 : : }
7990 : : pos_rtx = temp;
7991 : : }
7992 : :
7993 : : /* Make POS_RTX unless we already have it and it is correct. If we don't
7994 : : have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
7995 : : be a CONST_INT. */
7996 : 1387383 : if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
7997 : : pos_rtx = orig_pos_rtx;
7998 : :
7999 : 482557 : else if (pos_rtx == 0)
8000 : 459495 : pos_rtx = GEN_INT (pos);
8001 : :
8002 : : /* Make the required operation. See if we can use existing rtx. */
8003 : 1387383 : new_rtx = gen_rtx_fmt_eee (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
8004 : : extraction_mode, inner, GEN_INT (len), pos_rtx);
8005 : 1387383 : if (! in_dest)
8006 : 1380249 : new_rtx = gen_lowpart (mode, new_rtx);
8007 : :
8008 : : return new_rtx;
8009 : : }
8010 : :
8011 : : /* See if X (of mode MODE) contains an ASHIFT of COUNT or more bits that
8012 : : can be commuted with any other operations in X. Return X without
8013 : : that shift if so. */
8014 : :
8015 : : static rtx
8016 : 1584473 : extract_left_shift (scalar_int_mode mode, rtx x, int count)
8017 : : {
8018 : 1584473 : enum rtx_code code = GET_CODE (x);
8019 : 1584473 : rtx tem;
8020 : :
8021 : 1584473 : switch (code)
8022 : : {
8023 : 252426 : case ASHIFT:
8024 : : /* This is the shift itself. If it is wide enough, we will return
8025 : : either the value being shifted if the shift count is equal to
8026 : : COUNT or a shift for the difference. */
8027 : 252426 : if (CONST_INT_P (XEXP (x, 1))
8028 : 247176 : && INTVAL (XEXP (x, 1)) >= count)
8029 : 245934 : return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
8030 : 245934 : INTVAL (XEXP (x, 1)) - count);
8031 : : break;
8032 : :
8033 : 5507 : case NEG: case NOT:
8034 : 5507 : if ((tem = extract_left_shift (mode, XEXP (x, 0), count)) != 0)
8035 : 2639 : return simplify_gen_unary (code, mode, tem, mode);
8036 : :
8037 : : break;
8038 : :
8039 : 554824 : case PLUS: case IOR: case XOR: case AND:
8040 : : /* If we can safely shift this constant and we find the inner shift,
8041 : : make a new operation. */
8042 : 554824 : if (CONST_INT_P (XEXP (x, 1))
8043 : 285421 : && (UINTVAL (XEXP (x, 1))
8044 : 285421 : & (((HOST_WIDE_INT_1U << count)) - 1)) == 0
8045 : 683316 : && (tem = extract_left_shift (mode, XEXP (x, 0), count)) != 0)
8046 : : {
8047 : 6692 : HOST_WIDE_INT val = INTVAL (XEXP (x, 1)) >> count;
8048 : 6692 : return simplify_gen_binary (code, mode, tem,
8049 : 6692 : gen_int_mode (val, mode));
8050 : : }
8051 : : break;
8052 : :
8053 : : default:
8054 : : break;
8055 : : }
8056 : :
8057 : : return 0;
8058 : : }
8059 : :
8060 : : /* Subroutine of make_compound_operation. *X_PTR is the rtx at the current
8061 : : level of the expression and MODE is its mode. IN_CODE is as for
8062 : : make_compound_operation. *NEXT_CODE_PTR is the value of IN_CODE
8063 : : that should be used when recursing on operands of *X_PTR.
8064 : :
8065 : : There are two possible actions:
8066 : :
8067 : : - Return null. This tells the caller to recurse on *X_PTR with IN_CODE
8068 : : equal to *NEXT_CODE_PTR, after which *X_PTR holds the final value.
8069 : :
8070 : : - Return a new rtx, which the caller returns directly. */
8071 : :
8072 : : static rtx
8073 : 275998242 : make_compound_operation_int (scalar_int_mode mode, rtx *x_ptr,
8074 : : enum rtx_code in_code,
8075 : : enum rtx_code *next_code_ptr)
8076 : : {
8077 : 275998242 : rtx x = *x_ptr;
8078 : 275998242 : enum rtx_code next_code = *next_code_ptr;
8079 : 275998242 : enum rtx_code code = GET_CODE (x);
8080 : 275998242 : int mode_width = GET_MODE_PRECISION (mode);
8081 : 275998242 : rtx rhs, lhs;
8082 : 275998242 : rtx new_rtx = 0;
8083 : 275998242 : int i;
8084 : 275998242 : rtx tem;
8085 : 275998242 : scalar_int_mode inner_mode;
8086 : 275998242 : bool equality_comparison = false;
8087 : :
8088 : 275998242 : if (in_code == EQ)
8089 : : {
8090 : 8734819 : equality_comparison = true;
8091 : 8734819 : in_code = COMPARE;
8092 : : }
8093 : :
8094 : : /* Process depending on the code of this operation. If NEW is set
8095 : : nonzero, it will be returned. */
8096 : :
8097 : 275998242 : switch (code)
8098 : : {
8099 : 6469079 : case ASHIFT:
8100 : : /* Convert shifts by constants into multiplications if inside
8101 : : an address. */
8102 : 6469079 : if (in_code == MEM && CONST_INT_P (XEXP (x, 1))
8103 : 2059370 : && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
8104 : 2059370 : && INTVAL (XEXP (x, 1)) >= 0)
8105 : : {
8106 : 2059370 : HOST_WIDE_INT count = INTVAL (XEXP (x, 1));
8107 : 2059370 : HOST_WIDE_INT multval = HOST_WIDE_INT_1 << count;
8108 : :
8109 : 2059370 : new_rtx = make_compound_operation (XEXP (x, 0), next_code);
8110 : 2059370 : if (GET_CODE (new_rtx) == NEG)
8111 : : {
8112 : 9 : new_rtx = XEXP (new_rtx, 0);
8113 : 9 : multval = -multval;
8114 : : }
8115 : 2059370 : multval = trunc_int_for_mode (multval, mode);
8116 : 2059370 : new_rtx = gen_rtx_MULT (mode, new_rtx, gen_int_mode (multval, mode));
8117 : : }
8118 : : break;
8119 : :
8120 : 50914725 : case PLUS:
8121 : 50914725 : lhs = XEXP (x, 0);
8122 : 50914725 : rhs = XEXP (x, 1);
8123 : 50914725 : lhs = make_compound_operation (lhs, next_code);
8124 : 50914725 : rhs = make_compound_operation (rhs, next_code);
8125 : 50914725 : if (GET_CODE (lhs) == MULT && GET_CODE (XEXP (lhs, 0)) == NEG)
8126 : : {
8127 : 0 : tem = simplify_gen_binary (MULT, mode, XEXP (XEXP (lhs, 0), 0),
8128 : : XEXP (lhs, 1));
8129 : 0 : new_rtx = simplify_gen_binary (MINUS, mode, rhs, tem);
8130 : : }
8131 : 50914725 : else if (GET_CODE (lhs) == MULT
8132 : 4887998 : && (CONST_INT_P (XEXP (lhs, 1)) && INTVAL (XEXP (lhs, 1)) < 0))
8133 : : {
8134 : 32702 : tem = simplify_gen_binary (MULT, mode, XEXP (lhs, 0),
8135 : : simplify_gen_unary (NEG, mode,
8136 : : XEXP (lhs, 1),
8137 : : mode));
8138 : 32702 : new_rtx = simplify_gen_binary (MINUS, mode, rhs, tem);
8139 : : }
8140 : : else
8141 : : {
8142 : 50882023 : SUBST (XEXP (x, 0), lhs);
8143 : 50882023 : SUBST (XEXP (x, 1), rhs);
8144 : : }
8145 : 50914725 : maybe_swap_commutative_operands (x);
8146 : 50914725 : return x;
8147 : :
8148 : 3736327 : case MINUS:
8149 : 3736327 : lhs = XEXP (x, 0);
8150 : 3736327 : rhs = XEXP (x, 1);
8151 : 3736327 : lhs = make_compound_operation (lhs, next_code);
8152 : 3736327 : rhs = make_compound_operation (rhs, next_code);
8153 : 3736327 : if (GET_CODE (rhs) == MULT && GET_CODE (XEXP (rhs, 0)) == NEG)
8154 : : {
8155 : 0 : tem = simplify_gen_binary (MULT, mode, XEXP (XEXP (rhs, 0), 0),
8156 : : XEXP (rhs, 1));
8157 : 0 : return simplify_gen_binary (PLUS, mode, tem, lhs);
8158 : : }
8159 : 3736327 : else if (GET_CODE (rhs) == MULT
8160 : 57695 : && (CONST_INT_P (XEXP (rhs, 1)) && INTVAL (XEXP (rhs, 1)) < 0))
8161 : : {
8162 : 192 : tem = simplify_gen_binary (MULT, mode, XEXP (rhs, 0),
8163 : : simplify_gen_unary (NEG, mode,
8164 : : XEXP (rhs, 1),
8165 : : mode));
8166 : 192 : return simplify_gen_binary (PLUS, mode, tem, lhs);
8167 : : }
8168 : : else
8169 : : {
8170 : 3736135 : SUBST (XEXP (x, 0), lhs);
8171 : 3736135 : SUBST (XEXP (x, 1), rhs);
8172 : 3736135 : return x;
8173 : : }
8174 : :
8175 : 7463848 : case AND:
8176 : : /* If the second operand is not a constant, we can't do anything
8177 : : with it. */
8178 : 7463848 : if (!CONST_INT_P (XEXP (x, 1)))
8179 : : break;
8180 : :
8181 : : /* If the constant is a power of two minus one and the first operand
8182 : : is a logical right shift, make an extraction. */
8183 : 5905198 : if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
8184 : 5905198 : && (i = exact_log2 (UINTVAL (XEXP (x, 1)) + 1)) >= 0)
8185 : : {
8186 : 676365 : new_rtx = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
8187 : 676365 : new_rtx = make_extraction (mode, new_rtx, 0, XEXP (XEXP (x, 0), 1),
8188 : : i, true, false, in_code == COMPARE);
8189 : : }
8190 : :
8191 : : /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
8192 : 5228833 : else if (GET_CODE (XEXP (x, 0)) == SUBREG
8193 : 1335957 : && subreg_lowpart_p (XEXP (x, 0))
8194 : 6529762 : && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (XEXP (x, 0))),
8195 : : &inner_mode)
8196 : 1331399 : && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
8197 : 5260404 : && (i = exact_log2 (UINTVAL (XEXP (x, 1)) + 1)) >= 0)
8198 : : {
8199 : 30470 : rtx inner_x0 = SUBREG_REG (XEXP (x, 0));
8200 : 30470 : new_rtx = make_compound_operation (XEXP (inner_x0, 0), next_code);
8201 : 30470 : new_rtx = make_extraction (inner_mode, new_rtx, 0,
8202 : : XEXP (inner_x0, 1),
8203 : : i, true, false, in_code == COMPARE);
8204 : :
8205 : : /* If we narrowed the mode when dropping the subreg, then we lose. */
8206 : 91410 : if (GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (mode))
8207 : 30470 : new_rtx = NULL;
8208 : :
8209 : : /* If that didn't give anything, see if the AND simplifies on
8210 : : its own. */
8211 : 30470 : if (!new_rtx && i >= 0)
8212 : : {
8213 : 3642 : new_rtx = make_compound_operation (XEXP (x, 0), next_code);
8214 : 3642 : new_rtx = make_extraction (mode, new_rtx, 0, NULL_RTX, i,
8215 : : true, false, in_code == COMPARE);
8216 : : }
8217 : : }
8218 : : /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
8219 : 5198363 : else if ((GET_CODE (XEXP (x, 0)) == XOR
8220 : 5198363 : || GET_CODE (XEXP (x, 0)) == IOR)
8221 : 27771 : && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
8222 : 2381 : && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
8223 : 5198373 : && (i = exact_log2 (UINTVAL (XEXP (x, 1)) + 1)) >= 0)
8224 : : {
8225 : : /* Apply the distributive law, and then try to make extractions. */
8226 : 10 : new_rtx = gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)), mode,
8227 : : gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
8228 : : XEXP (x, 1)),
8229 : : gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
8230 : : XEXP (x, 1)));
8231 : 10 : new_rtx = make_compound_operation (new_rtx, in_code);
8232 : : }
8233 : :
8234 : : /* If we are have (and (rotate X C) M) and C is larger than the number
8235 : : of bits in M, this is an extraction. */
8236 : :
8237 : 5198353 : else if (GET_CODE (XEXP (x, 0)) == ROTATE
8238 : 1689 : && CONST_INT_P (XEXP (XEXP (x, 0), 1))
8239 : 1689 : && (i = exact_log2 (UINTVAL (XEXP (x, 1)) + 1)) >= 0
8240 : 5198385 : && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
8241 : : {
8242 : 0 : new_rtx = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
8243 : 0 : new_rtx = make_extraction (mode, new_rtx,
8244 : 0 : (GET_MODE_PRECISION (mode)
8245 : 0 : - INTVAL (XEXP (XEXP (x, 0), 1))),
8246 : : NULL_RTX, i, true, false,
8247 : : in_code == COMPARE);
8248 : : }
8249 : :
8250 : : /* On machines without logical shifts, if the operand of the AND is
8251 : : a logical shift and our mask turns off all the propagated sign
8252 : : bits, we can replace the logical shift with an arithmetic shift. */
8253 : 5198353 : else if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
8254 : 84506 : && !have_insn_for (LSHIFTRT, mode)
8255 : 0 : && have_insn_for (ASHIFTRT, mode)
8256 : 0 : && CONST_INT_P (XEXP (XEXP (x, 0), 1))
8257 : 0 : && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
8258 : 0 : && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
8259 : 5198353 : && mode_width <= HOST_BITS_PER_WIDE_INT)
8260 : : {
8261 : 0 : unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
8262 : :
8263 : 0 : mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
8264 : 0 : if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
8265 : 0 : SUBST (XEXP (x, 0),
8266 : : gen_rtx_ASHIFTRT (mode,
8267 : : make_compound_operation (XEXP (XEXP (x,
8268 : : 0),
8269 : : 0),
8270 : : next_code),
8271 : : XEXP (XEXP (x, 0), 1)));
8272 : : }
8273 : :
8274 : : /* If the constant is one less than a power of two, this might be
8275 : : representable by an extraction even if no shift is present.
8276 : : If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
8277 : : we are in a COMPARE. */
8278 : 5198353 : else if ((i = exact_log2 (UINTVAL (XEXP (x, 1)) + 1)) >= 0)
8279 : 2627153 : new_rtx = make_extraction (mode,
8280 : : make_compound_operation (XEXP (x, 0),
8281 : : next_code),
8282 : : 0, NULL_RTX, i,
8283 : : true, false, in_code == COMPARE);
8284 : :
8285 : : /* If we are in a comparison and this is an AND with a power of two,
8286 : : convert this into the appropriate bit extract. */
8287 : 2571200 : else if (in_code == COMPARE
8288 : 500087 : && (i = exact_log2 (UINTVAL (XEXP (x, 1)))) >= 0
8289 : 2661509 : && (equality_comparison || i < GET_MODE_PRECISION (mode) - 1))
8290 : 90309 : new_rtx = make_extraction (mode,
8291 : : make_compound_operation (XEXP (x, 0),
8292 : : next_code),
8293 : : i, NULL_RTX, 1, true, false, true);
8294 : :
8295 : : /* If the one operand is a paradoxical subreg of a register or memory and
8296 : : the constant (limited to the smaller mode) has only zero bits where
8297 : : the sub expression has known zero bits, this can be expressed as
8298 : : a zero_extend. */
8299 : 2480891 : else if (GET_CODE (XEXP (x, 0)) == SUBREG)
8300 : : {
8301 : 63441 : rtx sub;
8302 : :
8303 : 63441 : sub = XEXP (XEXP (x, 0), 0);
8304 : 63441 : machine_mode sub_mode = GET_MODE (sub);
8305 : 63441 : int sub_width;
8306 : 29789 : if ((REG_P (sub) || MEM_P (sub))
8307 : 34151 : && GET_MODE_PRECISION (sub_mode).is_constant (&sub_width)
8308 : 34151 : && sub_width < mode_width
8309 : 63441 : && (!WORD_REGISTER_OPERATIONS
8310 : : || sub_width >= BITS_PER_WORD
8311 : : /* On WORD_REGISTER_OPERATIONS targets the bits
8312 : : beyond sub_mode aren't considered undefined,
8313 : : so optimize only if it is a MEM load when MEM loads
8314 : : zero extend, because then the upper bits are all zero. */
8315 : : || (MEM_P (sub)
8316 : : && load_extend_op (sub_mode) == ZERO_EXTEND)))
8317 : : {
8318 : 26728 : unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (sub_mode);
8319 : 26728 : unsigned HOST_WIDE_INT mask;
8320 : :
8321 : : /* Original AND constant with all the known zero bits set. */
8322 : 26728 : mask = UINTVAL (XEXP (x, 1)) | (~nonzero_bits (sub, sub_mode));
8323 : 26728 : if ((mask & mode_mask) == mode_mask)
8324 : : {
8325 : 23493 : new_rtx = make_compound_operation (sub, next_code);
8326 : 23493 : new_rtx = make_extraction (mode, new_rtx, 0, 0, sub_width,
8327 : : true, false, in_code == COMPARE);
8328 : : }
8329 : : }
8330 : : }
8331 : :
8332 : : break;
8333 : :
8334 : 1928549 : case LSHIFTRT:
8335 : : /* If the sign bit is known to be zero, replace this with an
8336 : : arithmetic shift. */
8337 : 1928549 : if (have_insn_for (ASHIFTRT, mode)
8338 : 1928549 : && ! have_insn_for (LSHIFTRT, mode)
8339 : 0 : && mode_width <= HOST_BITS_PER_WIDE_INT
8340 : 1928549 : && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
8341 : : {
8342 : 0 : new_rtx = gen_rtx_ASHIFTRT (mode,
8343 : : make_compound_operation (XEXP (x, 0),
8344 : : next_code),
8345 : : XEXP (x, 1));
8346 : 0 : break;
8347 : : }
8348 : :
8349 : : /* fall through */
8350 : :
8351 : 4408085 : case ASHIFTRT:
8352 : 4408085 : lhs = XEXP (x, 0);
8353 : 4408085 : rhs = XEXP (x, 1);
8354 : :
8355 : : /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
8356 : : this is a SIGN_EXTRACT. */
8357 : 4408085 : if (CONST_INT_P (rhs)
8358 : 4251545 : && GET_CODE (lhs) == ASHIFT
8359 : 1124835 : && CONST_INT_P (XEXP (lhs, 1))
8360 : 1119589 : && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1))
8361 : 882725 : && INTVAL (XEXP (lhs, 1)) >= 0
8362 : 882721 : && INTVAL (rhs) < mode_width)
8363 : : {
8364 : 882721 : new_rtx = make_compound_operation (XEXP (lhs, 0), next_code);
8365 : 882721 : new_rtx = make_extraction (mode, new_rtx,
8366 : 882721 : INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
8367 : 882721 : NULL_RTX, mode_width - INTVAL (rhs),
8368 : : code == LSHIFTRT, false,
8369 : : in_code == COMPARE);
8370 : 882721 : break;
8371 : : }
8372 : :
8373 : : /* See if we have operations between an ASHIFTRT and an ASHIFT.
8374 : : If so, try to merge the shifts into a SIGN_EXTEND. We could
8375 : : also do this for some cases of SIGN_EXTRACT, but it doesn't
8376 : : seem worth the effort; the case checked for occurs on Alpha. */
8377 : :
8378 : 3525364 : if (!OBJECT_P (lhs)
8379 : 1546660 : && ! (GET_CODE (lhs) == SUBREG
8380 : 84613 : && (OBJECT_P (SUBREG_REG (lhs))))
8381 : 1476704 : && CONST_INT_P (rhs)
8382 : 1455137 : && INTVAL (rhs) >= 0
8383 : 1455137 : && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
8384 : 1450474 : && INTVAL (rhs) < mode_width
8385 : 4975838 : && (new_rtx = extract_left_shift (mode, lhs, INTVAL (rhs))) != 0)
8386 : 245934 : new_rtx = make_extraction (mode, make_compound_operation (new_rtx,
8387 : : next_code),
8388 : 245934 : 0, NULL_RTX, mode_width - INTVAL (rhs),
8389 : : code == LSHIFTRT, false, in_code == COMPARE);
8390 : :
8391 : : break;
8392 : :
8393 : 8712323 : case SUBREG:
8394 : : /* Call ourselves recursively on the inner expression. If we are
8395 : : narrowing the object and it has a different RTL code from
8396 : : what it originally did, do this SUBREG as a force_to_mode. */
8397 : 8712323 : {
8398 : 8712323 : rtx inner = SUBREG_REG (x), simplified;
8399 : 8712323 : enum rtx_code subreg_code = in_code;
8400 : :
8401 : : /* If the SUBREG is masking of a logical right shift,
8402 : : make an extraction. */
8403 : 8712323 : if (GET_CODE (inner) == LSHIFTRT
8404 : 8726963 : && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
8405 : 534274 : && GET_MODE_SIZE (mode) < GET_MODE_SIZE (inner_mode)
8406 : 259476 : && CONST_INT_P (XEXP (inner, 1))
8407 : 254567 : && UINTVAL (XEXP (inner, 1)) < GET_MODE_PRECISION (inner_mode)
8408 : 8966890 : && subreg_lowpart_p (x))
8409 : : {
8410 : 252497 : new_rtx = make_compound_operation (XEXP (inner, 0), next_code);
8411 : 252497 : int width = GET_MODE_PRECISION (inner_mode)
8412 : 252497 : - INTVAL (XEXP (inner, 1));
8413 : 252497 : if (width > mode_width)
8414 : : width = mode_width;
8415 : 252497 : new_rtx = make_extraction (mode, new_rtx, 0, XEXP (inner, 1),
8416 : : width, true, false, in_code == COMPARE);
8417 : 252497 : break;
8418 : : }
8419 : :
8420 : : /* If in_code is COMPARE, it isn't always safe to pass it through
8421 : : to the recursive make_compound_operation call. */
8422 : 8459826 : if (subreg_code == COMPARE
8423 : 8459826 : && (!subreg_lowpart_p (x)
8424 : 147357 : || GET_CODE (inner) == SUBREG
8425 : : /* (subreg:SI (and:DI (reg:DI) (const_int 0x800000000)) 0)
8426 : : is (const_int 0), rather than
8427 : : (subreg:SI (lshiftrt:DI (reg:DI) (const_int 35)) 0).
8428 : : Similarly (subreg:QI (and:SI (reg:SI) (const_int 0x80)) 0)
8429 : : for non-equality comparisons against 0 is not equivalent
8430 : : to (subreg:QI (lshiftrt:SI (reg:SI) (const_int 7)) 0). */
8431 : 147357 : || (GET_CODE (inner) == AND
8432 : 1199 : && CONST_INT_P (XEXP (inner, 1))
8433 : 139 : && partial_subreg_p (x)
8434 : 278 : && exact_log2 (UINTVAL (XEXP (inner, 1)))
8435 : 139 : >= GET_MODE_BITSIZE (mode) - 1)))
8436 : : subreg_code = SET;
8437 : :
8438 : 8459826 : tem = make_compound_operation (inner, subreg_code);
8439 : :
8440 : 8459826 : simplified
8441 : 8459826 : = simplify_subreg (mode, tem, GET_MODE (inner), SUBREG_BYTE (x));
8442 : 8459826 : if (simplified)
8443 : 8988 : tem = simplified;
8444 : :
8445 : 8459826 : if (GET_CODE (tem) != GET_CODE (inner)
8446 : 13742 : && partial_subreg_p (x)
8447 : 8471592 : && subreg_lowpart_p (x))
8448 : : {
8449 : 11750 : rtx newer
8450 : 11750 : = force_to_mode (tem, mode, HOST_WIDE_INT_M1U, false);
8451 : :
8452 : : /* If we have something other than a SUBREG, we might have
8453 : : done an expansion, so rerun ourselves. */
8454 : 11750 : if (GET_CODE (newer) != SUBREG)
8455 : 9524 : newer = make_compound_operation (newer, in_code);
8456 : :
8457 : : /* force_to_mode can expand compounds. If it just re-expanded
8458 : : the compound, use gen_lowpart to convert to the desired
8459 : : mode. */
8460 : 11750 : if (rtx_equal_p (newer, x)
8461 : : /* Likewise if it re-expanded the compound only partially.
8462 : : This happens for SUBREG of ZERO_EXTRACT if they extract
8463 : : the same number of bits. */
8464 : 11750 : || (GET_CODE (newer) == SUBREG
8465 : 1859 : && (GET_CODE (SUBREG_REG (newer)) == LSHIFTRT
8466 : 1859 : || GET_CODE (SUBREG_REG (newer)) == ASHIFTRT)
8467 : 113 : && GET_CODE (inner) == AND
8468 : 0 : && rtx_equal_p (SUBREG_REG (newer), XEXP (inner, 0))))
8469 : 1783 : return gen_lowpart (GET_MODE (x), tem);
8470 : :
8471 : 9967 : return newer;
8472 : : }
8473 : :
8474 : 8448076 : if (simplified)
8475 : : return tem;
8476 : : }
8477 : : break;
8478 : :
8479 : : default:
8480 : : break;
8481 : : }
8482 : :
8483 : 10207700 : if (new_rtx)
8484 : 5511573 : *x_ptr = gen_lowpart (mode, new_rtx);
8485 : 221334866 : *next_code_ptr = next_code;
8486 : 221334866 : return NULL_RTX;
8487 : : }
8488 : :
8489 : : /* Look at the expression rooted at X. Look for expressions
8490 : : equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
8491 : : Form these expressions.
8492 : :
8493 : : Return the new rtx, usually just X.
8494 : :
8495 : : Also, for machines like the VAX that don't have logical shift insns,
8496 : : try to convert logical to arithmetic shift operations in cases where
8497 : : they are equivalent. This undoes the canonicalizations to logical
8498 : : shifts done elsewhere.
8499 : :
8500 : : We try, as much as possible, to re-use rtl expressions to save memory.
8501 : :
8502 : : IN_CODE says what kind of expression we are processing. Normally, it is
8503 : : SET. In a memory address it is MEM. When processing the arguments of
8504 : : a comparison or a COMPARE against zero, it is COMPARE, or EQ if more
8505 : : precisely it is an equality comparison against zero. */
8506 : :
8507 : : rtx
8508 : 470935544 : make_compound_operation (rtx x, enum rtx_code in_code)
8509 : : {
8510 : 470935544 : enum rtx_code code = GET_CODE (x);
8511 : 470935544 : const char *fmt;
8512 : 470935544 : int i, j;
8513 : 470935544 : enum rtx_code next_code;
8514 : 470935544 : rtx new_rtx, tem;
8515 : :
8516 : : /* Select the code to be used in recursive calls. Once we are inside an
8517 : : address, we stay there. If we have a comparison, set to COMPARE,
8518 : : but once inside, go back to our default of SET. */
8519 : :
8520 : 470935544 : next_code = (code == MEM ? MEM
8521 : 442820255 : : ((code == COMPARE || COMPARISON_P (x))
8522 : 463182890 : && XEXP (x, 1) == const0_rtx) ? COMPARE
8523 : 434806244 : : in_code == COMPARE || in_code == EQ ? SET : in_code);
8524 : :
8525 : 470935544 : scalar_int_mode mode;
8526 : 470935544 : if (is_a <scalar_int_mode> (GET_MODE (x), &mode))
8527 : : {
8528 : 275998242 : rtx new_rtx = make_compound_operation_int (mode, &x, in_code,
8529 : : &next_code);
8530 : 275998242 : if (new_rtx)
8531 : : return new_rtx;
8532 : 221334866 : code = GET_CODE (x);
8533 : : }
8534 : :
8535 : : /* Now recursively process each operand of this operation. We need to
8536 : : handle ZERO_EXTEND specially so that we don't lose track of the
8537 : : inner mode. */
8538 : 416272168 : if (code == ZERO_EXTEND)
8539 : : {
8540 : 3355112 : new_rtx = make_compound_operation (XEXP (x, 0), next_code);
8541 : 6710224 : tem = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
8542 : 3355112 : new_rtx, GET_MODE (XEXP (x, 0)));
8543 : 3355112 : if (tem)
8544 : : return tem;
8545 : 3344887 : SUBST (XEXP (x, 0), new_rtx);
8546 : 3344887 : return x;
8547 : : }
8548 : :
8549 : 412917056 : fmt = GET_RTX_FORMAT (code);
8550 : 960387852 : for (i = 0; i < GET_RTX_LENGTH (code); i++)
8551 : 547470796 : if (fmt[i] == 'e')
8552 : : {
8553 : 210123449 : new_rtx = make_compound_operation (XEXP (x, i), next_code);
8554 : 210123449 : SUBST (XEXP (x, i), new_rtx);
8555 : : }
8556 : 337347347 : else if (fmt[i] == 'E')
8557 : 25254405 : for (j = 0; j < XVECLEN (x, i); j++)
8558 : : {
8559 : 18274988 : new_rtx = make_compound_operation (XVECEXP (x, i, j), next_code);
8560 : 18274988 : SUBST (XVECEXP (x, i, j), new_rtx);
8561 : : }
8562 : :
8563 : 412917056 : maybe_swap_commutative_operands (x);
8564 : 412917056 : return x;
8565 : : }
8566 : :
8567 : : /* Given M see if it is a value that would select a field of bits
8568 : : within an item, but not the entire word. Return -1 if not.
8569 : : Otherwise, return the starting position of the field, where 0 is the
8570 : : low-order bit.
8571 : :
8572 : : *PLEN is set to the length of the field. */
8573 : :
8574 : : static int
8575 : 9683 : get_pos_from_mask (unsigned HOST_WIDE_INT m, unsigned HOST_WIDE_INT *plen)
8576 : : {
8577 : : /* Get the bit number of the first 1 bit from the right, -1 if none. */
8578 : 9683 : int pos = m ? ctz_hwi (m) : -1;
8579 : 9683 : int len = 0;
8580 : :
8581 : 9683 : if (pos >= 0)
8582 : : /* Now shift off the low-order zero bits and see if we have a
8583 : : power of two minus 1. */
8584 : 9683 : len = exact_log2 ((m >> pos) + 1);
8585 : :
8586 : 7109 : if (len <= 0)
8587 : : pos = -1;
8588 : :
8589 : 9683 : *plen = len;
8590 : 9683 : return pos;
8591 : : }
8592 : :
8593 : : /* If X refers to a register that equals REG in value, replace these
8594 : : references with REG. */
8595 : : static rtx
8596 : 10090 : canon_reg_for_combine (rtx x, rtx reg)
8597 : : {
8598 : 10090 : rtx op0, op1, op2;
8599 : 10090 : const char *fmt;
8600 : 10090 : int i;
8601 : 10090 : bool copied;
8602 : :
8603 : 10090 : enum rtx_code code = GET_CODE (x);
8604 : 10090 : switch (GET_RTX_CLASS (code))
8605 : : {
8606 : 0 : case RTX_UNARY:
8607 : 0 : op0 = canon_reg_for_combine (XEXP (x, 0), reg);
8608 : 0 : if (op0 != XEXP (x, 0))
8609 : 0 : return simplify_gen_unary (GET_CODE (x), GET_MODE (x), op0,
8610 : 0 : GET_MODE (reg));
8611 : : break;
8612 : :
8613 : 1758 : case RTX_BIN_ARITH:
8614 : 1758 : case RTX_COMM_ARITH:
8615 : 1758 : op0 = canon_reg_for_combine (XEXP (x, 0), reg);
8616 : 1758 : op1 = canon_reg_for_combine (XEXP (x, 1), reg);
8617 : 1758 : if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
8618 : 0 : return simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
8619 : : break;
8620 : :
8621 : 14 : case RTX_COMPARE:
8622 : 14 : case RTX_COMM_COMPARE:
8623 : 14 : op0 = canon_reg_for_combine (XEXP (x, 0), reg);
8624 : 14 : op1 = canon_reg_for_combine (XEXP (x, 1), reg);
8625 : 14 : if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
8626 : 0 : return simplify_gen_relational (GET_CODE (x), GET_MODE (x),
8627 : 0 : GET_MODE (op0), op0, op1);
8628 : : break;
8629 : :
8630 : 0 : case RTX_TERNARY:
8631 : 0 : case RTX_BITFIELD_OPS:
8632 : 0 : op0 = canon_reg_for_combine (XEXP (x, 0), reg);
8633 : 0 : op1 = canon_reg_for_combine (XEXP (x, 1), reg);
8634 : 0 : op2 = canon_reg_for_combine (XEXP (x, 2), reg);
8635 : 0 : if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1) || op2 != XEXP (x, 2))
8636 : 0 : return simplify_gen_ternary (GET_CODE (x), GET_MODE (x),
8637 : 0 : GET_MODE (op0), op0, op1, op2);
8638 : : /* FALLTHRU */
8639 : :
8640 : 6045 : case RTX_OBJ:
8641 : 6045 : if (REG_P (x))
8642 : : {
8643 : 6039 : if (rtx_equal_p (get_last_value (reg), x)
8644 : 6039 : || rtx_equal_p (reg, get_last_value (x)))
8645 : 0 : return reg;
8646 : : else
8647 : : break;
8648 : : }
8649 : :
8650 : : /* fall through */
8651 : :
8652 : 2279 : default:
8653 : 2279 : fmt = GET_RTX_FORMAT (code);
8654 : 2279 : copied = false;
8655 : 4619 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8656 : 2340 : if (fmt[i] == 'e')
8657 : : {
8658 : 61 : rtx op = canon_reg_for_combine (XEXP (x, i), reg);
8659 : 61 : if (op != XEXP (x, i))
8660 : : {
8661 : 0 : if (!copied)
8662 : : {
8663 : 0 : copied = true;
8664 : 0 : x = copy_rtx (x);
8665 : : }
8666 : 0 : XEXP (x, i) = op;
8667 : : }
8668 : : }
8669 : 2279 : else if (fmt[i] == 'E')
8670 : : {
8671 : : int j;
8672 : 0 : for (j = 0; j < XVECLEN (x, i); j++)
8673 : : {
8674 : 0 : rtx op = canon_reg_for_combine (XVECEXP (x, i, j), reg);
8675 : 0 : if (op != XVECEXP (x, i, j))
8676 : : {
8677 : 0 : if (!copied)
8678 : : {
8679 : 0 : copied = true;
8680 : 0 : x = copy_rtx (x);
8681 : : }
8682 : 0 : XVECEXP (x, i, j) = op;
8683 : : }
8684 : : }
8685 : : }
8686 : :
8687 : : break;
8688 : : }
8689 : :
8690 : : return x;
8691 : : }
8692 : :
8693 : : /* Return X converted to MODE. If the value is already truncated to
8694 : : MODE we can just return a subreg even though in the general case we
8695 : : would need an explicit truncation. */
8696 : :
8697 : : static rtx
8698 : 114804345 : gen_lowpart_or_truncate (machine_mode mode, rtx x)
8699 : : {
8700 : 114804345 : if (!CONST_INT_P (x)
8701 : 109193868 : && partial_subreg_p (mode, GET_MODE (x))
8702 : 114804345 : && !TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x))
8703 : 114804345 : && !(REG_P (x) && reg_truncated_to_mode (mode, x)))
8704 : : {
8705 : : /* Bit-cast X into an integer mode. */
8706 : 0 : if (!SCALAR_INT_MODE_P (GET_MODE (x)))
8707 : 0 : x = gen_lowpart (int_mode_for_mode (GET_MODE (x)).require (), x);
8708 : 0 : x = simplify_gen_unary (TRUNCATE, int_mode_for_mode (mode).require (),
8709 : 0 : x, GET_MODE (x));
8710 : : }
8711 : :
8712 : 114804345 : return gen_lowpart (mode, x);
8713 : : }
8714 : :
8715 : : /* See if X can be simplified knowing that we will only refer to it in
8716 : : MODE and will only refer to those bits that are nonzero in MASK.
8717 : : If other bits are being computed or if masking operations are done
8718 : : that select a superset of the bits in MASK, they can sometimes be
8719 : : ignored.
8720 : :
8721 : : Return a possibly simplified expression, but always convert X to
8722 : : MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
8723 : :
8724 : : If JUST_SELECT is true, don't optimize by noticing that bits in MASK
8725 : : are all off in X. This is used when X will be complemented, by either
8726 : : NOT, NEG, or XOR. */
8727 : :
8728 : : static rtx
8729 : 85991140 : force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask,
8730 : : bool just_select)
8731 : : {
8732 : 85991140 : enum rtx_code code = GET_CODE (x);
8733 : 85991140 : bool next_select = just_select || code == XOR || code == NOT || code == NEG;
8734 : 85991140 : machine_mode op_mode;
8735 : 85991140 : unsigned HOST_WIDE_INT nonzero;
8736 : :
8737 : : /* If this is a CALL or ASM_OPERANDS, don't do anything. Some of the
8738 : : code below will do the wrong thing since the mode of such an
8739 : : expression is VOIDmode.
8740 : :
8741 : : Also do nothing if X is a CLOBBER; this can happen if X was
8742 : : the return value from a call to gen_lowpart. */
8743 : 85991140 : if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
8744 : : return x;
8745 : :
8746 : : /* We want to perform the operation in its present mode unless we know
8747 : : that the operation is valid in MODE, in which case we do the operation
8748 : : in MODE. */
8749 : 140749298 : op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
8750 : 79849334 : && have_insn_for (code, mode))
8751 : 134684428 : ? mode : GET_MODE (x));
8752 : :
8753 : : /* It is not valid to do a right-shift in a narrower mode
8754 : : than the one it came in with. */
8755 : 85914204 : if ((code == LSHIFTRT || code == ASHIFTRT)
8756 : 85914204 : && partial_subreg_p (mode, GET_MODE (x)))
8757 : 361702 : op_mode = GET_MODE (x);
8758 : :
8759 : : /* Truncate MASK to fit OP_MODE. */
8760 : 85914204 : if (op_mode)
8761 : 79880513 : mask &= GET_MODE_MASK (op_mode);
8762 : :
8763 : : /* Determine what bits of X are guaranteed to be (non)zero. */
8764 : 85914204 : nonzero = nonzero_bits (x, mode);
8765 : :
8766 : : /* If none of the bits in X are needed, return a zero. */
8767 : 85914204 : if (!just_select && (nonzero & mask) == 0 && !side_effects_p (x))
8768 : 608885 : x = const0_rtx;
8769 : :
8770 : : /* If X is a CONST_INT, return a new one. Do this here since the
8771 : : test below will fail. */
8772 : 85914204 : if (CONST_INT_P (x))
8773 : : {
8774 : 6228143 : if (SCALAR_INT_MODE_P (mode))
8775 : 6228143 : return gen_int_mode (INTVAL (x) & mask, mode);
8776 : : else
8777 : : {
8778 : 0 : x = GEN_INT (INTVAL (x) & mask);
8779 : 0 : return gen_lowpart_common (mode, x);
8780 : : }
8781 : : }
8782 : :
8783 : : /* If X is narrower than MODE and we want all the bits in X's mode, just
8784 : : get X in the proper mode. */
8785 : 79686061 : if (paradoxical_subreg_p (mode, GET_MODE (x))
8786 : 79686061 : && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
8787 : 3126237 : return gen_lowpart (mode, x);
8788 : :
8789 : : /* We can ignore the effect of a SUBREG if it narrows the mode or
8790 : : if the constant masks to zero all the bits the mode doesn't have. */
8791 : 76559824 : if (GET_CODE (x) == SUBREG
8792 : 6822198 : && subreg_lowpart_p (x)
8793 : 83228021 : && (partial_subreg_p (x)
8794 : 5120327 : || (mask
8795 : 5120327 : & GET_MODE_MASK (GET_MODE (x))
8796 : 5120327 : & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))) == 0))
8797 : 6641925 : return force_to_mode (SUBREG_REG (x), mode, mask, next_select);
8798 : :
8799 : 69917899 : scalar_int_mode int_mode, xmode;
8800 : 69917899 : if (is_a <scalar_int_mode> (mode, &int_mode)
8801 : 69917899 : && is_a <scalar_int_mode> (GET_MODE (x), &xmode))
8802 : : /* OP_MODE is either MODE or XMODE, so it must be a scalar
8803 : : integer too. */
8804 : 69887117 : return force_int_to_mode (x, int_mode, xmode,
8805 : : as_a <scalar_int_mode> (op_mode),
8806 : 69887117 : mask, just_select);
8807 : :
8808 : 30782 : return gen_lowpart_or_truncate (mode, x);
8809 : : }
8810 : :
8811 : : /* Subroutine of force_to_mode that handles cases in which both X and
8812 : : the result are scalar integers. MODE is the mode of the result,
8813 : : XMODE is the mode of X, and OP_MODE says which of MODE or XMODE
8814 : : is preferred for simplified versions of X. The other arguments
8815 : : are as for force_to_mode. */
8816 : :
8817 : : static rtx
8818 : 69887117 : force_int_to_mode (rtx x, scalar_int_mode mode, scalar_int_mode xmode,
8819 : : scalar_int_mode op_mode, unsigned HOST_WIDE_INT mask,
8820 : : bool just_select)
8821 : : {
8822 : 69887117 : enum rtx_code code = GET_CODE (x);
8823 : 69887117 : bool next_select = just_select || code == XOR || code == NOT || code == NEG;
8824 : 69887117 : unsigned HOST_WIDE_INT fuller_mask;
8825 : 69887117 : rtx op0, op1, temp;
8826 : 69887117 : poly_int64 const_op0;
8827 : :
8828 : : /* When we have an arithmetic operation, or a shift whose count we
8829 : : do not know, we need to assume that all bits up to the highest-order
8830 : : bit in MASK will be needed. This is how we form such a mask. */
8831 : 69887117 : if (mask & (HOST_WIDE_INT_1U << (HOST_BITS_PER_WIDE_INT - 1)))
8832 : : fuller_mask = HOST_WIDE_INT_M1U;
8833 : : else
8834 : 77462757 : fuller_mask = ((HOST_WIDE_INT_1U << (floor_log2 (mask) + 1)) - 1);
8835 : :
8836 : 69887117 : switch (code)
8837 : : {
8838 : : case CLOBBER:
8839 : : /* If X is a (clobber (const_int)), return it since we know we are
8840 : : generating something that won't match. */
8841 : : return x;
8842 : :
8843 : 322278 : case SIGN_EXTEND:
8844 : 322278 : case ZERO_EXTEND:
8845 : 322278 : case ZERO_EXTRACT:
8846 : 322278 : case SIGN_EXTRACT:
8847 : 322278 : x = expand_compound_operation (x);
8848 : 322278 : if (GET_CODE (x) != code)
8849 : 191965 : return force_to_mode (x, mode, mask, next_select);
8850 : : break;
8851 : :
8852 : 141 : case TRUNCATE:
8853 : : /* Similarly for a truncate. */
8854 : 141 : return force_to_mode (XEXP (x, 0), mode, mask, next_select);
8855 : :
8856 : 3525438 : case AND:
8857 : : /* If this is an AND with a constant, convert it into an AND
8858 : : whose constant is the AND of that constant with MASK. If it
8859 : : remains an AND of MASK, delete it since it is redundant. */
8860 : :
8861 : 3525438 : if (CONST_INT_P (XEXP (x, 1)))
8862 : : {
8863 : 5598608 : x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
8864 : 2799304 : mask & INTVAL (XEXP (x, 1)));
8865 : 2799304 : xmode = op_mode;
8866 : :
8867 : : /* If X is still an AND, see if it is an AND with a mask that
8868 : : is just some low-order bits. If so, and it is MASK, we don't
8869 : : need it. */
8870 : :
8871 : 2778315 : if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1))
8872 : 5577619 : && (INTVAL (XEXP (x, 1)) & GET_MODE_MASK (xmode)) == mask)
8873 : 19294 : x = XEXP (x, 0);
8874 : :
8875 : : /* If it remains an AND, try making another AND with the bits
8876 : : in the mode mask that aren't in MASK turned on. If the
8877 : : constant in the AND is wide enough, this might make a
8878 : : cheaper constant. */
8879 : :
8880 : 2759025 : if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1))
8881 : 2759021 : && GET_MODE_MASK (xmode) != mask
8882 : 2879778 : && HWI_COMPUTABLE_MODE_P (xmode))
8883 : : {
8884 : 80474 : unsigned HOST_WIDE_INT cval
8885 : 80474 : = UINTVAL (XEXP (x, 1)) | (GET_MODE_MASK (xmode) & ~mask);
8886 : 80474 : rtx y;
8887 : :
8888 : 80474 : y = simplify_gen_binary (AND, xmode, XEXP (x, 0),
8889 : 80474 : gen_int_mode (cval, xmode));
8890 : 80474 : if (set_src_cost (y, xmode, optimize_this_for_speed_p)
8891 : 80474 : < set_src_cost (x, xmode, optimize_this_for_speed_p))
8892 : 69602099 : x = y;
8893 : : }
8894 : :
8895 : : break;
8896 : : }
8897 : :
8898 : 726134 : goto binop;
8899 : :
8900 : 9675431 : case PLUS:
8901 : : /* In (and (plus FOO C1) M), if M is a mask that just turns off
8902 : : low-order bits (as in an alignment operation) and FOO is already
8903 : : aligned to that boundary, mask C1 to that boundary as well.
8904 : : This may eliminate that PLUS and, later, the AND. */
8905 : :
8906 : 9675431 : {
8907 : 9675431 : unsigned int width = GET_MODE_PRECISION (mode);
8908 : 9675431 : unsigned HOST_WIDE_INT smask = mask;
8909 : :
8910 : : /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
8911 : : number, sign extend it. */
8912 : :
8913 : 9675431 : if (width < HOST_BITS_PER_WIDE_INT
8914 : 2991908 : && (smask & (HOST_WIDE_INT_1U << (width - 1))) != 0)
8915 : 2671127 : smask |= HOST_WIDE_INT_M1U << width;
8916 : :
8917 : 9675431 : if (CONST_INT_P (XEXP (x, 1))
8918 : 3610763 : && pow2p_hwi (- smask)
8919 : 3067977 : && (nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0
8920 : 12360160 : && (INTVAL (XEXP (x, 1)) & ~smask) != 0)
8921 : 11460 : return force_to_mode (plus_constant (xmode, XEXP (x, 0),
8922 : 11460 : (INTVAL (XEXP (x, 1)) & smask)),
8923 : : mode, smask, next_select);
8924 : : }
8925 : :
8926 : : /* fall through */
8927 : :
8928 : 11505675 : case MULT:
8929 : : /* Substituting into the operands of a widening MULT is not likely to
8930 : : create RTL matching a machine insn. */
8931 : 11505675 : if (code == MULT
8932 : 1841704 : && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
8933 : 1841704 : || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
8934 : 82075 : && (GET_CODE (XEXP (x, 1)) == ZERO_EXTEND
8935 : 82075 : || GET_CODE (XEXP (x, 1)) == SIGN_EXTEND)
8936 : 39718 : && REG_P (XEXP (XEXP (x, 0), 0))
8937 : 30042 : && REG_P (XEXP (XEXP (x, 1), 0)))
8938 : 22298 : return gen_lowpart_or_truncate (mode, x);
8939 : :
8940 : : /* For PLUS, MINUS and MULT, we need any bits less significant than the
8941 : : most significant bit in MASK since carries from those bits will
8942 : : affect the bits we are interested in. */
8943 : 11483377 : mask = fuller_mask;
8944 : 11483377 : goto binop;
8945 : :
8946 : 2216582 : case MINUS:
8947 : : /* If X is (minus C Y) where C's least set bit is larger than any bit
8948 : : in the mask, then we may replace with (neg Y). */
8949 : 2216582 : if (poly_int_rtx_p (XEXP (x, 0), &const_op0)
8950 : 169570 : && known_alignment (poly_uint64 (const_op0)) > mask)
8951 : : {
8952 : 19 : x = simplify_gen_unary (NEG, xmode, XEXP (x, 1), xmode);
8953 : 19 : return force_to_mode (x, mode, mask, next_select);
8954 : : }
8955 : :
8956 : : /* Similarly, if C contains every bit in the fuller_mask, then we may
8957 : : replace with (not Y). */
8958 : 2216563 : if (CONST_INT_P (XEXP (x, 0))
8959 : 169551 : && ((UINTVAL (XEXP (x, 0)) | fuller_mask) == UINTVAL (XEXP (x, 0))))
8960 : : {
8961 : 455 : x = simplify_gen_unary (NOT, xmode, XEXP (x, 1), xmode);
8962 : 455 : return force_to_mode (x, mode, mask, next_select);
8963 : : }
8964 : :
8965 : 2216108 : mask = fuller_mask;
8966 : 2216108 : goto binop;
8967 : :
8968 : 2515258 : case IOR:
8969 : 2515258 : case XOR:
8970 : : /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
8971 : : LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
8972 : : operation which may be a bitfield extraction. Ensure that the
8973 : : constant we form is not wider than the mode of X. */
8974 : :
8975 : 2515258 : if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
8976 : 73765 : && CONST_INT_P (XEXP (XEXP (x, 0), 1))
8977 : 63684 : && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
8978 : 63684 : && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
8979 : 63684 : && CONST_INT_P (XEXP (x, 1))
8980 : 7743 : && ((INTVAL (XEXP (XEXP (x, 0), 1))
8981 : 15486 : + floor_log2 (INTVAL (XEXP (x, 1))))
8982 : 7743 : < GET_MODE_PRECISION (xmode))
8983 : 2515258 : && (UINTVAL (XEXP (x, 1))
8984 : 4466 : & ~nonzero_bits (XEXP (x, 0), xmode)) == 0)
8985 : : {
8986 : 8196 : temp = gen_int_mode ((INTVAL (XEXP (x, 1)) & mask)
8987 : 4098 : << INTVAL (XEXP (XEXP (x, 0), 1)),
8988 : : xmode);
8989 : 8196 : temp = simplify_gen_binary (GET_CODE (x), xmode,
8990 : 4098 : XEXP (XEXP (x, 0), 0), temp);
8991 : 8196 : x = simplify_gen_binary (LSHIFTRT, xmode, temp,
8992 : 4098 : XEXP (XEXP (x, 0), 1));
8993 : 4098 : return force_to_mode (x, mode, mask, next_select);
8994 : : }
8995 : :
8996 : 16936779 : binop:
8997 : : /* For most binary operations, just propagate into the operation and
8998 : : change the mode if we have an operation of that mode. */
8999 : :
9000 : 16936779 : op0 = force_to_mode (XEXP (x, 0), mode, mask, next_select);
9001 : 16936779 : op1 = force_to_mode (XEXP (x, 1), mode, mask, next_select);
9002 : :
9003 : : /* If we ended up truncating both operands, truncate the result of the
9004 : : operation instead. */
9005 : 16936779 : if (GET_CODE (op0) == TRUNCATE
9006 : 0 : && GET_CODE (op1) == TRUNCATE)
9007 : : {
9008 : 0 : op0 = XEXP (op0, 0);
9009 : 0 : op1 = XEXP (op1, 0);
9010 : : }
9011 : :
9012 : 16936779 : op0 = gen_lowpart_or_truncate (op_mode, op0);
9013 : 16936779 : op1 = gen_lowpart_or_truncate (op_mode, op1);
9014 : :
9015 : 16936779 : if (op_mode != xmode || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
9016 : : {
9017 : 2010251 : x = simplify_gen_binary (code, op_mode, op0, op1);
9018 : 2010251 : xmode = op_mode;
9019 : : }
9020 : : break;
9021 : :
9022 : 4234480 : case ASHIFT:
9023 : : /* For left shifts, do the same, but just for the first operand.
9024 : : However, we cannot do anything with shifts where we cannot
9025 : : guarantee that the counts are smaller than the size of the mode
9026 : : because such a count will have a different meaning in a
9027 : : wider mode. */
9028 : :
9029 : 4037842 : if (! (CONST_INT_P (XEXP (x, 1))
9030 : 4037867 : && INTVAL (XEXP (x, 1)) >= 0
9031 : 4037842 : && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (mode))
9032 : 4237142 : && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
9033 : 196613 : && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
9034 : 196613 : < (unsigned HOST_WIDE_INT) GET_MODE_PRECISION (mode))))
9035 : : break;
9036 : :
9037 : : /* If the shift count is a constant and we can do arithmetic in
9038 : : the mode of the shift, refine which bits we need. Otherwise, use the
9039 : : conservative form of the mask. */
9040 : 4100296 : if (CONST_INT_P (XEXP (x, 1))
9041 : 4035205 : && INTVAL (XEXP (x, 1)) >= 0
9042 : 4035205 : && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (op_mode)
9043 : 8135501 : && HWI_COMPUTABLE_MODE_P (op_mode))
9044 : 4034271 : mask >>= INTVAL (XEXP (x, 1));
9045 : : else
9046 : : mask = fuller_mask;
9047 : :
9048 : 4100296 : op0 = gen_lowpart_or_truncate (op_mode,
9049 : : force_to_mode (XEXP (x, 0), mode,
9050 : : mask, next_select));
9051 : :
9052 : 4100296 : if (op_mode != xmode || op0 != XEXP (x, 0))
9053 : : {
9054 : 1011928 : x = simplify_gen_binary (code, op_mode, op0, XEXP (x, 1));
9055 : 1011928 : xmode = op_mode;
9056 : : }
9057 : : break;
9058 : :
9059 : 3155811 : case LSHIFTRT:
9060 : : /* Here we can only do something if the shift count is a constant,
9061 : : this shift constant is valid for the host, and we can do arithmetic
9062 : : in OP_MODE. */
9063 : :
9064 : 3155811 : if (CONST_INT_P (XEXP (x, 1))
9065 : 3046415 : && INTVAL (XEXP (x, 1)) >= 0
9066 : 3046414 : && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
9067 : 6202207 : && HWI_COMPUTABLE_MODE_P (op_mode))
9068 : : {
9069 : 3042416 : rtx inner = XEXP (x, 0);
9070 : 3042416 : unsigned HOST_WIDE_INT inner_mask;
9071 : :
9072 : : /* Select the mask of the bits we need for the shift operand. */
9073 : 3042416 : inner_mask = mask << INTVAL (XEXP (x, 1));
9074 : :
9075 : : /* We can only change the mode of the shift if we can do arithmetic
9076 : : in the mode of the shift and INNER_MASK is no wider than the
9077 : : width of X's mode. */
9078 : 3042416 : if ((inner_mask & ~GET_MODE_MASK (xmode)) != 0)
9079 : 302333 : op_mode = xmode;
9080 : :
9081 : 3042416 : inner = force_to_mode (inner, op_mode, inner_mask, next_select);
9082 : :
9083 : 3042416 : if (xmode != op_mode || inner != XEXP (x, 0))
9084 : : {
9085 : 819378 : x = simplify_gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
9086 : 819378 : xmode = op_mode;
9087 : : }
9088 : : }
9089 : :
9090 : : /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
9091 : : shift and AND produces only copies of the sign bit (C2 is one less
9092 : : than a power of two), we can do this with just a shift. */
9093 : :
9094 : 3155811 : if (GET_CODE (x) == LSHIFTRT
9095 : 3155757 : && CONST_INT_P (XEXP (x, 1))
9096 : : /* The shift puts one of the sign bit copies in the least significant
9097 : : bit. */
9098 : 6092722 : && ((INTVAL (XEXP (x, 1))
9099 : 3046361 : + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
9100 : 3046361 : >= GET_MODE_PRECISION (xmode))
9101 : 245907 : && pow2p_hwi (mask + 1)
9102 : : /* Number of bits left after the shift must be more than the mask
9103 : : needs. */
9104 : 73707 : && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1))
9105 : 73707 : <= GET_MODE_PRECISION (xmode))
9106 : : /* Must be more sign bit copies than the mask needs. */
9107 : 3184781 : && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
9108 : 28970 : >= exact_log2 (mask + 1)))
9109 : : {
9110 : 28970 : int nbits = GET_MODE_PRECISION (xmode) - exact_log2 (mask + 1);
9111 : 28970 : x = simplify_gen_binary (LSHIFTRT, xmode, XEXP (x, 0),
9112 : 28970 : gen_int_shift_amount (xmode, nbits));
9113 : : }
9114 : 3155811 : goto shiftrt;
9115 : :
9116 : 1884609 : case ASHIFTRT:
9117 : : /* If we are just looking for the sign bit, we don't need this shift at
9118 : : all, even if it has a variable count. */
9119 : 1884609 : if (val_signbit_p (xmode, mask))
9120 : 1282 : return force_to_mode (XEXP (x, 0), mode, mask, next_select);
9121 : :
9122 : : /* If this is a shift by a constant, get a mask that contains those bits
9123 : : that are not copies of the sign bit. We then have two cases: If
9124 : : MASK only includes those bits, this can be a logical shift, which may
9125 : : allow simplifications. If MASK is a single-bit field not within
9126 : : those bits, we are requesting a copy of the sign bit and hence can
9127 : : shift the sign bit to the appropriate location. */
9128 : :
9129 : 1883327 : if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0
9130 : 1845185 : && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
9131 : : {
9132 : 1845074 : unsigned HOST_WIDE_INT nonzero;
9133 : 1845074 : int i;
9134 : :
9135 : : /* If the considered data is wider than HOST_WIDE_INT, we can't
9136 : : represent a mask for all its bits in a single scalar.
9137 : : But we only care about the lower bits, so calculate these. */
9138 : :
9139 : 1845074 : if (GET_MODE_PRECISION (xmode) > HOST_BITS_PER_WIDE_INT)
9140 : : {
9141 : 408 : nonzero = HOST_WIDE_INT_M1U;
9142 : :
9143 : : /* GET_MODE_PRECISION (GET_MODE (x)) - INTVAL (XEXP (x, 1))
9144 : : is the number of bits a full-width mask would have set.
9145 : : We need only shift if these are fewer than nonzero can
9146 : : hold. If not, we must keep all bits set in nonzero. */
9147 : :
9148 : 408 : if (GET_MODE_PRECISION (xmode) - INTVAL (XEXP (x, 1))
9149 : : < HOST_BITS_PER_WIDE_INT)
9150 : 0 : nonzero >>= INTVAL (XEXP (x, 1))
9151 : 0 : + HOST_BITS_PER_WIDE_INT
9152 : 0 : - GET_MODE_PRECISION (xmode);
9153 : : }
9154 : : else
9155 : : {
9156 : 1844666 : nonzero = GET_MODE_MASK (xmode);
9157 : 1844666 : nonzero >>= INTVAL (XEXP (x, 1));
9158 : : }
9159 : :
9160 : 1845074 : if ((mask & ~nonzero) == 0)
9161 : : {
9162 : 44165 : x = simplify_shift_const (NULL_RTX, LSHIFTRT, xmode,
9163 : : XEXP (x, 0), INTVAL (XEXP (x, 1)));
9164 : 44165 : if (GET_CODE (x) != ASHIFTRT)
9165 : 44165 : return force_to_mode (x, mode, mask, next_select);
9166 : : }
9167 : :
9168 : 1800909 : else if ((i = exact_log2 (mask)) >= 0)
9169 : : {
9170 : 73 : x = simplify_shift_const
9171 : 146 : (NULL_RTX, LSHIFTRT, xmode, XEXP (x, 0),
9172 : 73 : GET_MODE_PRECISION (xmode) - 1 - i);
9173 : :
9174 : 73 : if (GET_CODE (x) != ASHIFTRT)
9175 : 73 : return force_to_mode (x, mode, mask, next_select);
9176 : : }
9177 : : }
9178 : :
9179 : : /* If MASK is 1, convert this to an LSHIFTRT. This can be done
9180 : : even if the shift count isn't a constant. */
9181 : 1839089 : if (mask == 1)
9182 : 3157 : x = simplify_gen_binary (LSHIFTRT, xmode, XEXP (x, 0), XEXP (x, 1));
9183 : :
9184 : 1835932 : shiftrt:
9185 : :
9186 : : /* If this is a zero- or sign-extension operation that just affects bits
9187 : : we don't care about, remove it. Be sure the call above returned
9188 : : something that is still a shift. */
9189 : :
9190 : 4994900 : if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
9191 : 4994846 : && CONST_INT_P (XEXP (x, 1))
9192 : 4847308 : && INTVAL (XEXP (x, 1)) >= 0
9193 : 4847307 : && (INTVAL (XEXP (x, 1))
9194 : 9694614 : <= GET_MODE_PRECISION (xmode) - (floor_log2 (mask) + 1))
9195 : 1786766 : && GET_CODE (XEXP (x, 0)) == ASHIFT
9196 : 4995838 : && XEXP (XEXP (x, 0), 1) == XEXP (x, 1))
9197 : 770 : return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask, next_select);
9198 : :
9199 : : break;
9200 : :
9201 : 38393 : case ROTATE:
9202 : 38393 : case ROTATERT:
9203 : : /* If the shift count is constant and we can do computations
9204 : : in the mode of X, compute where the bits we care about are.
9205 : : Otherwise, we can't do anything. Don't change the mode of
9206 : : the shift or propagate MODE into the shift, though. */
9207 : 38393 : if (CONST_INT_P (XEXP (x, 1))
9208 : 29290 : && INTVAL (XEXP (x, 1)) >= 0)
9209 : : {
9210 : 29288 : temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
9211 : 29288 : xmode, gen_int_mode (mask, xmode),
9212 : : XEXP (x, 1));
9213 : 29288 : if (temp && CONST_INT_P (temp))
9214 : 29288 : x = simplify_gen_binary (code, xmode,
9215 : : force_to_mode (XEXP (x, 0), xmode,
9216 : 29288 : INTVAL (temp), next_select),
9217 : : XEXP (x, 1));
9218 : : }
9219 : : break;
9220 : :
9221 : 150820 : case NEG:
9222 : : /* If we just want the low-order bit, the NEG isn't needed since it
9223 : : won't change the low-order bit. */
9224 : 150820 : if (mask == 1)
9225 : 286 : return force_to_mode (XEXP (x, 0), mode, mask, just_select);
9226 : :
9227 : : /* We need any bits less significant than the most significant bit in
9228 : : MASK since carries from those bits will affect the bits we are
9229 : : interested in. */
9230 : 150534 : mask = fuller_mask;
9231 : 150534 : goto unop;
9232 : :
9233 : 427113 : case NOT:
9234 : : /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
9235 : : same as the XOR case above. Ensure that the constant we form is not
9236 : : wider than the mode of X. */
9237 : :
9238 : 427113 : if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
9239 : 15864 : && CONST_INT_P (XEXP (XEXP (x, 0), 1))
9240 : 15232 : && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
9241 : 30464 : && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
9242 : 15232 : < GET_MODE_PRECISION (xmode))
9243 : 435119 : && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
9244 : : {
9245 : 8006 : temp = gen_int_mode (mask << INTVAL (XEXP (XEXP (x, 0), 1)), xmode);
9246 : 8006 : temp = simplify_gen_binary (XOR, xmode, XEXP (XEXP (x, 0), 0), temp);
9247 : 16012 : x = simplify_gen_binary (LSHIFTRT, xmode,
9248 : 8006 : temp, XEXP (XEXP (x, 0), 1));
9249 : :
9250 : 8006 : return force_to_mode (x, mode, mask, next_select);
9251 : : }
9252 : :
9253 : : /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
9254 : : use the full mask inside the NOT. */
9255 : : mask = fuller_mask;
9256 : :
9257 : 569641 : unop:
9258 : 569641 : op0 = gen_lowpart_or_truncate (op_mode,
9259 : : force_to_mode (XEXP (x, 0), mode, mask,
9260 : : next_select));
9261 : 569641 : if (op_mode != xmode || op0 != XEXP (x, 0))
9262 : : {
9263 : 65418 : x = simplify_gen_unary (code, op_mode, op0, op_mode);
9264 : 65418 : xmode = op_mode;
9265 : : }
9266 : : break;
9267 : :
9268 : 552572 : case NE:
9269 : : /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
9270 : : in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
9271 : : which is equal to STORE_FLAG_VALUE. */
9272 : 552572 : if ((mask & ~STORE_FLAG_VALUE) == 0
9273 : 3004 : && XEXP (x, 1) == const0_rtx
9274 : 2983 : && GET_MODE (XEXP (x, 0)) == mode
9275 : 2 : && pow2p_hwi (nonzero_bits (XEXP (x, 0), mode))
9276 : 552572 : && (nonzero_bits (XEXP (x, 0), mode)
9277 : : == (unsigned HOST_WIDE_INT) STORE_FLAG_VALUE))
9278 : 0 : return force_to_mode (XEXP (x, 0), mode, mask, next_select);
9279 : :
9280 : : break;
9281 : :
9282 : 1403907 : case IF_THEN_ELSE:
9283 : : /* We have no way of knowing if the IF_THEN_ELSE can itself be
9284 : : written in a narrower mode. We play it safe and do not do so. */
9285 : :
9286 : 1403907 : op0 = gen_lowpart_or_truncate (xmode,
9287 : : force_to_mode (XEXP (x, 1), mode,
9288 : : mask, next_select));
9289 : 1403907 : op1 = gen_lowpart_or_truncate (xmode,
9290 : : force_to_mode (XEXP (x, 2), mode,
9291 : : mask, next_select));
9292 : 1403907 : if (op0 != XEXP (x, 1) || op1 != XEXP (x, 2))
9293 : 234019 : x = simplify_gen_ternary (IF_THEN_ELSE, xmode,
9294 : 234019 : GET_MODE (XEXP (x, 0)), XEXP (x, 0),
9295 : : op0, op1);
9296 : : break;
9297 : :
9298 : : default:
9299 : : break;
9300 : : }
9301 : :
9302 : : /* Ensure we return a value of the proper mode. */
9303 : 69602099 : return gen_lowpart_or_truncate (mode, x);
9304 : : }
9305 : :
9306 : : /* Return nonzero if X is an expression that has one of two values depending on
9307 : : whether some other value is zero or nonzero. In that case, we return the
9308 : : value that is being tested, *PTRUE is set to the value if the rtx being
9309 : : returned has a nonzero value, and *PFALSE is set to the other alternative.
9310 : :
9311 : : If we return zero, we set *PTRUE and *PFALSE to X. */
9312 : :
9313 : : static rtx
9314 : 231072406 : if_then_else_cond (rtx x, rtx *ptrue, rtx *pfalse)
9315 : : {
9316 : 231072406 : machine_mode mode = GET_MODE (x);
9317 : 231072406 : enum rtx_code code = GET_CODE (x);
9318 : 231072406 : rtx cond0, cond1, true0, true1, false0, false1;
9319 : 231072406 : unsigned HOST_WIDE_INT nz;
9320 : 231072406 : scalar_int_mode int_mode;
9321 : :
9322 : : /* If we are comparing a value against zero, we are done. */
9323 : 231072406 : if ((code == NE || code == EQ)
9324 : 2669104 : && XEXP (x, 1) == const0_rtx)
9325 : : {
9326 : 1593527 : *ptrue = (code == NE) ? const_true_rtx : const0_rtx;
9327 : 1593527 : *pfalse = (code == NE) ? const0_rtx : const_true_rtx;
9328 : 1593527 : return XEXP (x, 0);
9329 : : }
9330 : :
9331 : : /* If this is a unary operation whose operand has one of two values, apply
9332 : : our opcode to compute those values. */
9333 : 229478879 : else if (UNARY_P (x)
9334 : 229478879 : && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
9335 : : {
9336 : 428958 : *ptrue = simplify_gen_unary (code, mode, true0, GET_MODE (XEXP (x, 0)));
9337 : 857916 : *pfalse = simplify_gen_unary (code, mode, false0,
9338 : 428958 : GET_MODE (XEXP (x, 0)));
9339 : 428958 : return cond0;
9340 : : }
9341 : :
9342 : : /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
9343 : : make can't possibly match and would suppress other optimizations. */
9344 : 229049921 : else if (code == COMPARE)
9345 : : ;
9346 : :
9347 : : /* If this is a binary operation, see if either side has only one of two
9348 : : values. If either one does or if both do and they are conditional on
9349 : : the same value, compute the new true and false values. */
9350 : 224692944 : else if (BINARY_P (x))
9351 : : {
9352 : 83834533 : rtx op0 = XEXP (x, 0);
9353 : 83834533 : rtx op1 = XEXP (x, 1);
9354 : 83834533 : cond0 = if_then_else_cond (op0, &true0, &false0);
9355 : 83834533 : cond1 = if_then_else_cond (op1, &true1, &false1);
9356 : :
9357 : 544493 : if ((cond0 != 0 && cond1 != 0 && !rtx_equal_p (cond0, cond1))
9358 : 84325035 : && (REG_P (op0) || REG_P (op1)))
9359 : : {
9360 : : /* Try to enable a simplification by undoing work done by
9361 : : if_then_else_cond if it converted a REG into something more
9362 : : complex. */
9363 : 423345 : if (REG_P (op0))
9364 : : {
9365 : 104825 : cond0 = 0;
9366 : 104825 : true0 = false0 = op0;
9367 : : }
9368 : : else
9369 : : {
9370 : 318520 : cond1 = 0;
9371 : 318520 : true1 = false1 = op1;
9372 : : }
9373 : : }
9374 : :
9375 : 83834533 : if ((cond0 != 0 || cond1 != 0)
9376 : 83834533 : && ! (cond0 != 0 && cond1 != 0 && !rtx_equal_p (cond0, cond1)))
9377 : : {
9378 : : /* If if_then_else_cond returned zero, then true/false are the
9379 : : same rtl. We must copy one of them to prevent invalid rtl
9380 : : sharing. */
9381 : 4241437 : if (cond0 == 0)
9382 : 1212348 : true0 = copy_rtx (true0);
9383 : 3029089 : else if (cond1 == 0)
9384 : 2975098 : true1 = copy_rtx (true1);
9385 : :
9386 : 4241437 : if (COMPARISON_P (x))
9387 : : {
9388 : 358275 : *ptrue = simplify_gen_relational (code, mode, VOIDmode,
9389 : : true0, true1);
9390 : 358275 : *pfalse = simplify_gen_relational (code, mode, VOIDmode,
9391 : : false0, false1);
9392 : : }
9393 : : else
9394 : : {
9395 : 3883162 : *ptrue = simplify_gen_binary (code, mode, true0, true1);
9396 : 3883162 : *pfalse = simplify_gen_binary (code, mode, false0, false1);
9397 : : }
9398 : :
9399 : 5453785 : return cond0 ? cond0 : cond1;
9400 : : }
9401 : :
9402 : : /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
9403 : : operands is zero when the other is nonzero, and vice-versa,
9404 : : and STORE_FLAG_VALUE is 1 or -1. */
9405 : :
9406 : 79593096 : if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9407 : 79593096 : && (code == PLUS || code == IOR || code == XOR || code == MINUS
9408 : : || code == UMAX)
9409 : 33635093 : && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
9410 : : {
9411 : 35868 : rtx op0 = XEXP (XEXP (x, 0), 1);
9412 : 35868 : rtx op1 = XEXP (XEXP (x, 1), 1);
9413 : :
9414 : 35868 : cond0 = XEXP (XEXP (x, 0), 0);
9415 : 35868 : cond1 = XEXP (XEXP (x, 1), 0);
9416 : :
9417 : 35868 : if (COMPARISON_P (cond0)
9418 : 1 : && COMPARISON_P (cond1)
9419 : 0 : && SCALAR_INT_MODE_P (mode)
9420 : 0 : && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
9421 : 0 : && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
9422 : 0 : && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
9423 : 0 : || ((swap_condition (GET_CODE (cond0))
9424 : 0 : == reversed_comparison_code (cond1, NULL))
9425 : 0 : && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
9426 : 0 : && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
9427 : 35868 : && ! side_effects_p (x))
9428 : : {
9429 : 0 : *ptrue = simplify_gen_binary (MULT, mode, op0, const_true_rtx);
9430 : 0 : *pfalse = simplify_gen_binary (MULT, mode,
9431 : : (code == MINUS
9432 : 0 : ? simplify_gen_unary (NEG, mode,
9433 : : op1, mode)
9434 : : : op1),
9435 : : const_true_rtx);
9436 : 0 : return cond0;
9437 : : }
9438 : : }
9439 : :
9440 : : /* Similarly for MULT, AND and UMIN, except that for these the result
9441 : : is always zero. */
9442 : 79593096 : if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9443 : 79593096 : && (code == MULT || code == AND || code == UMIN)
9444 : 11238438 : && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
9445 : : {
9446 : 886 : cond0 = XEXP (XEXP (x, 0), 0);
9447 : 886 : cond1 = XEXP (XEXP (x, 1), 0);
9448 : :
9449 : 886 : if (COMPARISON_P (cond0)
9450 : 0 : && COMPARISON_P (cond1)
9451 : 0 : && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
9452 : 0 : && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
9453 : 0 : && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
9454 : 0 : || ((swap_condition (GET_CODE (cond0))
9455 : 0 : == reversed_comparison_code (cond1, NULL))
9456 : 0 : && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
9457 : 0 : && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
9458 : 886 : && ! side_effects_p (x))
9459 : : {
9460 : 0 : *ptrue = *pfalse = const0_rtx;
9461 : 0 : return cond0;
9462 : : }
9463 : : }
9464 : : }
9465 : :
9466 : 140858411 : else if (code == IF_THEN_ELSE)
9467 : : {
9468 : : /* If we have IF_THEN_ELSE already, extract the condition and
9469 : : canonicalize it if it is NE or EQ. */
9470 : 618281 : cond0 = XEXP (x, 0);
9471 : 618281 : *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
9472 : 618281 : if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
9473 : 256615 : return XEXP (cond0, 0);
9474 : 361666 : else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
9475 : : {
9476 : 23919 : *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
9477 : 23919 : return XEXP (cond0, 0);
9478 : : }
9479 : : else
9480 : : return cond0;
9481 : : }
9482 : :
9483 : : /* If X is a SUBREG, we can narrow both the true and false values
9484 : : if the inner expression, if there is a condition. */
9485 : 140240130 : else if (code == SUBREG
9486 : 140240130 : && (cond0 = if_then_else_cond (SUBREG_REG (x), &true0,
9487 : : &false0)) != 0)
9488 : : {
9489 : 687520 : true0 = simplify_gen_subreg (mode, true0,
9490 : 343760 : GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
9491 : 687520 : false0 = simplify_gen_subreg (mode, false0,
9492 : 343760 : GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
9493 : 343760 : if (true0 && false0)
9494 : : {
9495 : 343760 : *ptrue = true0;
9496 : 343760 : *pfalse = false0;
9497 : 343760 : return cond0;
9498 : : }
9499 : : }
9500 : :
9501 : : /* If X is a constant, this isn't special and will cause confusions
9502 : : if we treat it as such. Likewise if it is equivalent to a constant. */
9503 : 139896370 : else if (CONSTANT_P (x)
9504 : 139896370 : || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
9505 : : ;
9506 : :
9507 : : /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that
9508 : : will be least confusing to the rest of the compiler. */
9509 : 93615253 : else if (mode == BImode)
9510 : : {
9511 : 0 : *ptrue = GEN_INT (STORE_FLAG_VALUE), *pfalse = const0_rtx;
9512 : 0 : return x;
9513 : : }
9514 : :
9515 : : /* If X is known to be either 0 or -1, those are the true and
9516 : : false values when testing X. */
9517 : 93615253 : else if (x == constm1_rtx || x == const0_rtx
9518 : 93615253 : || (is_a <scalar_int_mode> (mode, &int_mode)
9519 : 66362931 : && (num_sign_bit_copies (x, int_mode)
9520 : 66362931 : == GET_MODE_PRECISION (int_mode))))
9521 : : {
9522 : 776280 : *ptrue = constm1_rtx, *pfalse = const0_rtx;
9523 : 776280 : return x;
9524 : : }
9525 : :
9526 : : /* Likewise for 0 or a single bit. */
9527 : 92838973 : else if (HWI_COMPUTABLE_MODE_P (mode)
9528 : 62274279 : && pow2p_hwi (nz = nonzero_bits (x, mode)))
9529 : : {
9530 : 1805900 : *ptrue = gen_int_mode (nz, mode), *pfalse = const0_rtx;
9531 : 1805900 : return x;
9532 : : }
9533 : :
9534 : : /* Otherwise fail; show no condition with true and false values the same. */
9535 : 221264263 : *ptrue = *pfalse = x;
9536 : 221264263 : return 0;
9537 : : }
9538 : :
9539 : : /* Return the value of expression X given the fact that condition COND
9540 : : is known to be true when applied to REG as its first operand and VAL
9541 : : as its second. X is known to not be shared and so can be modified in
9542 : : place.
9543 : :
9544 : : We only handle the simplest cases, and specifically those cases that
9545 : : arise with IF_THEN_ELSE expressions. */
9546 : :
9547 : : static rtx
9548 : 611999 : known_cond (rtx x, enum rtx_code cond, rtx reg, rtx val)
9549 : : {
9550 : 611999 : enum rtx_code code = GET_CODE (x);
9551 : 611999 : const char *fmt;
9552 : 611999 : int i, j;
9553 : :
9554 : 611999 : if (side_effects_p (x))
9555 : : return x;
9556 : :
9557 : : /* If either operand of the condition is a floating point value,
9558 : : then we have to avoid collapsing an EQ comparison. */
9559 : 611999 : if (cond == EQ
9560 : 123237 : && rtx_equal_p (x, reg)
9561 : 82120 : && ! FLOAT_MODE_P (GET_MODE (x))
9562 : 694119 : && ! FLOAT_MODE_P (GET_MODE (val)))
9563 : : return val;
9564 : :
9565 : 529879 : if (cond == UNEQ && rtx_equal_p (x, reg))
9566 : : return val;
9567 : :
9568 : : /* If X is (abs REG) and we know something about REG's relationship
9569 : : with zero, we may be able to simplify this. */
9570 : :
9571 : 529879 : if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
9572 : 3 : switch (cond)
9573 : : {
9574 : 1 : case GE: case GT: case EQ:
9575 : 1 : return XEXP (x, 0);
9576 : 2 : case LT: case LE:
9577 : 4 : return simplify_gen_unary (NEG, GET_MODE (XEXP (x, 0)),
9578 : : XEXP (x, 0),
9579 : 2 : GET_MODE (XEXP (x, 0)));
9580 : : default:
9581 : : break;
9582 : : }
9583 : :
9584 : : /* The only other cases we handle are MIN, MAX, and comparisons if the
9585 : : operands are the same as REG and VAL. */
9586 : :
9587 : 529876 : else if (COMPARISON_P (x) || COMMUTATIVE_ARITH_P (x))
9588 : : {
9589 : 242233 : if (rtx_equal_p (XEXP (x, 0), val))
9590 : : {
9591 : 2 : std::swap (val, reg);
9592 : 2 : cond = swap_condition (cond);
9593 : : }
9594 : :
9595 : 242233 : if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
9596 : : {
9597 : 220400 : if (COMPARISON_P (x))
9598 : : {
9599 : 220183 : if (comparison_dominates_p (cond, code))
9600 : 348 : return VECTOR_MODE_P (GET_MODE (x)) ? x : const_true_rtx;
9601 : :
9602 : 219835 : code = reversed_comparison_code (x, NULL);
9603 : 219835 : if (code != UNKNOWN
9604 : 219835 : && comparison_dominates_p (cond, code))
9605 : 42 : return CONST0_RTX (GET_MODE (x));
9606 : : else
9607 : 219793 : return x;
9608 : : }
9609 : 217 : else if (code == SMAX || code == SMIN
9610 : 217 : || code == UMIN || code == UMAX)
9611 : : {
9612 : 39 : int unsignedp = (code == UMIN || code == UMAX);
9613 : :
9614 : : /* Do not reverse the condition when it is NE or EQ.
9615 : : This is because we cannot conclude anything about
9616 : : the value of 'SMAX (x, y)' when x is not equal to y,
9617 : : but we can when x equals y. */
9618 : 39 : if ((code == SMAX || code == UMAX)
9619 : 36 : && ! (cond == EQ || cond == NE))
9620 : 3 : cond = reverse_condition (cond);
9621 : :
9622 : 6 : switch (cond)
9623 : : {
9624 : 2 : case GE: case GT:
9625 : 2 : return unsignedp ? x : XEXP (x, 1);
9626 : 4 : case LE: case LT:
9627 : 4 : return unsignedp ? x : XEXP (x, 0);
9628 : 0 : case GEU: case GTU:
9629 : 0 : return unsignedp ? XEXP (x, 1) : x;
9630 : 0 : case LEU: case LTU:
9631 : 0 : return unsignedp ? XEXP (x, 0) : x;
9632 : : default:
9633 : : break;
9634 : : }
9635 : : }
9636 : : }
9637 : : }
9638 : 287643 : else if (code == SUBREG)
9639 : : {
9640 : 9624 : machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
9641 : 9624 : rtx new_rtx, r = known_cond (SUBREG_REG (x), cond, reg, val);
9642 : :
9643 : 9624 : if (SUBREG_REG (x) != r)
9644 : : {
9645 : : /* We must simplify subreg here, before we lose track of the
9646 : : original inner_mode. */
9647 : 34 : new_rtx = simplify_subreg (GET_MODE (x), r,
9648 : 17 : inner_mode, SUBREG_BYTE (x));
9649 : 17 : if (new_rtx)
9650 : : return new_rtx;
9651 : : else
9652 : 17 : SUBST (SUBREG_REG (x), r);
9653 : : }
9654 : :
9655 : 9624 : return x;
9656 : : }
9657 : : /* We don't have to handle SIGN_EXTEND here, because even in the
9658 : : case of replacing something with a modeless CONST_INT, a
9659 : : CONST_INT is already (supposed to be) a valid sign extension for
9660 : : its narrower mode, which implies it's already properly
9661 : : sign-extended for the wider mode. Now, for ZERO_EXTEND, the
9662 : : story is different. */
9663 : 278019 : else if (code == ZERO_EXTEND)
9664 : : {
9665 : 1276 : machine_mode inner_mode = GET_MODE (XEXP (x, 0));
9666 : 1276 : rtx new_rtx, r = known_cond (XEXP (x, 0), cond, reg, val);
9667 : :
9668 : 1276 : if (XEXP (x, 0) != r)
9669 : : {
9670 : : /* We must simplify the zero_extend here, before we lose
9671 : : track of the original inner_mode. */
9672 : 0 : new_rtx = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
9673 : : r, inner_mode);
9674 : 0 : if (new_rtx)
9675 : : return new_rtx;
9676 : : else
9677 : 0 : SUBST (XEXP (x, 0), r);
9678 : : }
9679 : :
9680 : 1276 : return x;
9681 : : }
9682 : :
9683 : 298787 : fmt = GET_RTX_FORMAT (code);
9684 : 687097 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9685 : : {
9686 : 388310 : if (fmt[i] == 'e')
9687 : 183746 : SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
9688 : 204564 : else if (fmt[i] == 'E')
9689 : 15148 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9690 : 12212 : SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
9691 : : cond, reg, val));
9692 : : }
9693 : :
9694 : : return x;
9695 : : }
9696 : :
9697 : : /* See if X and Y are equal for the purposes of seeing if we can rewrite an
9698 : : assignment as a field assignment. */
9699 : :
9700 : : static bool
9701 : 575906 : rtx_equal_for_field_assignment_p (rtx x, rtx y, bool widen_x)
9702 : : {
9703 : 575906 : if (widen_x && GET_MODE (x) != GET_MODE (y))
9704 : : {
9705 : 56093 : if (paradoxical_subreg_p (GET_MODE (x), GET_MODE (y)))
9706 : : return false;
9707 : 56093 : if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
9708 : : return false;
9709 : 56093 : x = adjust_address_nv (x, GET_MODE (y),
9710 : : byte_lowpart_offset (GET_MODE (y),
9711 : : GET_MODE (x)));
9712 : : }
9713 : :
9714 : 575906 : if (x == y || rtx_equal_p (x, y))
9715 : 10362 : return true;
9716 : :
9717 : 565544 : if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
9718 : : return false;
9719 : :
9720 : : /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
9721 : : Note that all SUBREGs of MEM are paradoxical; otherwise they
9722 : : would have been rewritten. */
9723 : 95656 : if (MEM_P (x) && GET_CODE (y) == SUBREG
9724 : 6130 : && MEM_P (SUBREG_REG (y))
9725 : 565544 : && rtx_equal_p (SUBREG_REG (y),
9726 : 0 : gen_lowpart (GET_MODE (SUBREG_REG (y)), x)))
9727 : : return true;
9728 : :
9729 : 67079 : if (MEM_P (y) && GET_CODE (x) == SUBREG
9730 : 5153 : && MEM_P (SUBREG_REG (x))
9731 : 565731 : && rtx_equal_p (SUBREG_REG (x),
9732 : 187 : gen_lowpart (GET_MODE (SUBREG_REG (x)), y)))
9733 : : return true;
9734 : :
9735 : : /* We used to see if get_last_value of X and Y were the same but that's
9736 : : not correct. In one direction, we'll cause the assignment to have
9737 : : the wrong destination and in the case, we'll import a register into this
9738 : : insn that might have already have been dead. So fail if none of the
9739 : : above cases are true. */
9740 : : return false;
9741 : : }
9742 : :
9743 : : /* See if X, a SET operation, can be rewritten as a bit-field assignment.
9744 : : Return that assignment if so.
9745 : :
9746 : : We only handle the most common cases. */
9747 : :
9748 : : static rtx
9749 : 46662217 : make_field_assignment (rtx x)
9750 : : {
9751 : 46662217 : rtx dest = SET_DEST (x);
9752 : 46662217 : rtx src = SET_SRC (x);
9753 : 46662217 : rtx assign;
9754 : 46662217 : rtx rhs, lhs;
9755 : 46662217 : HOST_WIDE_INT c1;
9756 : 46662217 : HOST_WIDE_INT pos;
9757 : 46662217 : unsigned HOST_WIDE_INT len;
9758 : 46662217 : rtx other;
9759 : :
9760 : : /* All the rules in this function are specific to scalar integers. */
9761 : 46662217 : scalar_int_mode mode;
9762 : 67693696 : if (!is_a <scalar_int_mode> (GET_MODE (dest), &mode))
9763 : : return x;
9764 : :
9765 : : /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
9766 : : a clear of a one-bit field. We will have changed it to
9767 : : (and (rotate (const_int -2) POS) DEST), so check for that. Also check
9768 : : for a SUBREG. */
9769 : :
9770 : 1268669 : if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
9771 : 2526 : && CONST_INT_P (XEXP (XEXP (src, 0), 0))
9772 : 553 : && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
9773 : 21040013 : && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
9774 : : {
9775 : 176 : assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
9776 : : 1, true, true, false);
9777 : 176 : if (assign != 0)
9778 : 173 : return gen_rtx_SET (assign, const0_rtx);
9779 : : return x;
9780 : : }
9781 : :
9782 : 1268493 : if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
9783 : 85081 : && subreg_lowpart_p (XEXP (src, 0))
9784 : 85048 : && partial_subreg_p (XEXP (src, 0))
9785 : 19424 : && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
9786 : 125 : && CONST_INT_P (XEXP (SUBREG_REG (XEXP (src, 0)), 0))
9787 : 57 : && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
9788 : 21039341 : && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
9789 : : {
9790 : 14 : assign = make_extraction (VOIDmode, dest, 0,
9791 : 7 : XEXP (SUBREG_REG (XEXP (src, 0)), 1),
9792 : : 1, true, true, false);
9793 : 7 : if (assign != 0)
9794 : 7 : return gen_rtx_SET (assign, const0_rtx);
9795 : : return x;
9796 : : }
9797 : :
9798 : : /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
9799 : : one-bit field. */
9800 : 1809115 : if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
9801 : 407266 : && XEXP (XEXP (src, 0), 0) == const1_rtx
9802 : 21041533 : && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
9803 : : {
9804 : 547 : assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
9805 : : 1, true, true, false);
9806 : 547 : if (assign != 0)
9807 : 518 : return gen_rtx_SET (assign, const1_rtx);
9808 : : return x;
9809 : : }
9810 : :
9811 : : /* If DEST is already a field assignment, i.e. ZERO_EXTRACT, and the
9812 : : SRC is an AND with all bits of that field set, then we can discard
9813 : : the AND. */
9814 : 21038730 : if (GET_CODE (dest) == ZERO_EXTRACT
9815 : 2676 : && CONST_INT_P (XEXP (dest, 1))
9816 : 2676 : && GET_CODE (src) == AND
9817 : 816 : && CONST_INT_P (XEXP (src, 1)))
9818 : : {
9819 : 816 : HOST_WIDE_INT width = INTVAL (XEXP (dest, 1));
9820 : 816 : unsigned HOST_WIDE_INT and_mask = INTVAL (XEXP (src, 1));
9821 : 816 : unsigned HOST_WIDE_INT ze_mask;
9822 : :
9823 : 816 : if (width >= HOST_BITS_PER_WIDE_INT)
9824 : : ze_mask = -1;
9825 : : else
9826 : 816 : ze_mask = (HOST_WIDE_INT_1U << width) - 1;
9827 : :
9828 : : /* Complete overlap. We can remove the source AND. */
9829 : 816 : if ((and_mask & ze_mask) == ze_mask)
9830 : 792 : return gen_rtx_SET (dest, XEXP (src, 0));
9831 : :
9832 : : /* Partial overlap. We can reduce the source AND. */
9833 : 24 : if ((and_mask & ze_mask) != and_mask)
9834 : : {
9835 : 6 : src = gen_rtx_AND (mode, XEXP (src, 0),
9836 : : gen_int_mode (and_mask & ze_mask, mode));
9837 : 6 : return gen_rtx_SET (dest, src);
9838 : : }
9839 : : }
9840 : :
9841 : : /* The other case we handle is assignments into a constant-position
9842 : : field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents
9843 : : a mask that has all one bits except for a group of zero bits and
9844 : : OTHER is known to have zeros where C1 has ones, this is such an
9845 : : assignment. Compute the position and length from C1. Shift OTHER
9846 : : to the appropriate position, force it to the required mode, and
9847 : : make the extraction. Check for the AND in both operands. */
9848 : :
9849 : : /* One or more SUBREGs might obscure the constant-position field
9850 : : assignment. The first one we are likely to encounter is an outer
9851 : : narrowing SUBREG, which we can just strip for the purposes of
9852 : : identifying the constant-field assignment. */
9853 : 21037932 : scalar_int_mode src_mode = mode;
9854 : 21037932 : if (GET_CODE (src) == SUBREG
9855 : 200994 : && subreg_lowpart_p (src)
9856 : 21223082 : && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (src)), &src_mode))
9857 : : src = SUBREG_REG (src);
9858 : :
9859 : 21037932 : if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
9860 : : return x;
9861 : :
9862 : 1986955 : rhs = expand_compound_operation (XEXP (src, 0));
9863 : 1986955 : lhs = expand_compound_operation (XEXP (src, 1));
9864 : :
9865 : 1986955 : if (GET_CODE (rhs) == AND
9866 : 784365 : && CONST_INT_P (XEXP (rhs, 1))
9867 : 2426580 : && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
9868 : 9654 : c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
9869 : : /* The second SUBREG that might get in the way is a paradoxical
9870 : : SUBREG around the first operand of the AND. We want to
9871 : : pretend the operand is as wide as the destination here. We
9872 : : do this by adjusting the MEM to wider mode for the sole
9873 : : purpose of the call to rtx_equal_for_field_assignment_p. Also
9874 : : note this trick only works for MEMs. */
9875 : 1977301 : else if (GET_CODE (rhs) == AND
9876 : 774711 : && paradoxical_subreg_p (XEXP (rhs, 0))
9877 : 68230 : && MEM_P (SUBREG_REG (XEXP (rhs, 0)))
9878 : 30863 : && CONST_INT_P (XEXP (rhs, 1))
9879 : 2008164 : && rtx_equal_for_field_assignment_p (SUBREG_REG (XEXP (rhs, 0)),
9880 : : dest, true))
9881 : 0 : c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
9882 : 1977301 : else if (GET_CODE (lhs) == AND
9883 : 87061 : && CONST_INT_P (XEXP (lhs, 1))
9884 : 2054623 : && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
9885 : 29 : c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
9886 : : /* The second SUBREG that might get in the way is a paradoxical
9887 : : SUBREG around the first operand of the AND. We want to
9888 : : pretend the operand is as wide as the destination here. We
9889 : : do this by adjusting the MEM to wider mode for the sole
9890 : : purpose of the call to rtx_equal_for_field_assignment_p. Also
9891 : : note this trick only works for MEMs. */
9892 : 1977272 : else if (GET_CODE (lhs) == AND
9893 : 87032 : && paradoxical_subreg_p (XEXP (lhs, 0))
9894 : 37337 : && MEM_P (SUBREG_REG (XEXP (lhs, 0)))
9895 : 25230 : && CONST_INT_P (XEXP (lhs, 1))
9896 : 2002502 : && rtx_equal_for_field_assignment_p (SUBREG_REG (XEXP (lhs, 0)),
9897 : : dest, true))
9898 : 0 : c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
9899 : : else
9900 : 1977272 : return x;
9901 : :
9902 : 9683 : pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (mode), &len);
9903 : 9683 : if (pos < 0
9904 : 7109 : || pos + len > GET_MODE_PRECISION (mode)
9905 : 7109 : || GET_MODE_PRECISION (mode) > HOST_BITS_PER_WIDE_INT
9906 : 16788 : || (c1 & nonzero_bits (other, mode)) != 0)
9907 : 3186 : return x;
9908 : :
9909 : 6497 : assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len,
9910 : : true, true, false);
9911 : 6497 : if (assign == 0)
9912 : : return x;
9913 : :
9914 : : /* The mode to use for the source is the mode of the assignment, or of
9915 : : what is inside a possible STRICT_LOW_PART. */
9916 : 12970 : machine_mode new_mode = (GET_CODE (assign) == STRICT_LOW_PART
9917 : 6485 : ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
9918 : :
9919 : : /* Shift OTHER right POS places and make it the source, restricting it
9920 : : to the proper length and mode. */
9921 : :
9922 : 6485 : src = canon_reg_for_combine (simplify_shift_const (NULL_RTX, LSHIFTRT,
9923 : : src_mode, other, pos),
9924 : : dest);
9925 : 12970 : src = force_to_mode (src, new_mode,
9926 : : len >= HOST_BITS_PER_WIDE_INT
9927 : : ? HOST_WIDE_INT_M1U
9928 : 6485 : : (HOST_WIDE_INT_1U << len) - 1, false);
9929 : :
9930 : : /* If SRC is masked by an AND that does not make a difference in
9931 : : the value being stored, strip it. */
9932 : 6485 : if (GET_CODE (assign) == ZERO_EXTRACT
9933 : 6436 : && CONST_INT_P (XEXP (assign, 1))
9934 : 6436 : && INTVAL (XEXP (assign, 1)) < HOST_BITS_PER_WIDE_INT
9935 : 6436 : && GET_CODE (src) == AND
9936 : 0 : && CONST_INT_P (XEXP (src, 1))
9937 : 0 : && UINTVAL (XEXP (src, 1))
9938 : 0 : == (HOST_WIDE_INT_1U << INTVAL (XEXP (assign, 1))) - 1)
9939 : 0 : src = XEXP (src, 0);
9940 : :
9941 : 6485 : return gen_rtx_SET (assign, src);
9942 : : }
9943 : :
9944 : : /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
9945 : : if so. */
9946 : :
9947 : : static rtx
9948 : 50699443 : apply_distributive_law (rtx x)
9949 : : {
9950 : 50699443 : enum rtx_code code = GET_CODE (x);
9951 : 50699443 : enum rtx_code inner_code;
9952 : 50699443 : rtx lhs, rhs, other;
9953 : 50699443 : rtx tem;
9954 : :
9955 : : /* Distributivity is not true for floating point as it can change the
9956 : : value. So we don't do it unless -funsafe-math-optimizations. */
9957 : 50699443 : if (FLOAT_MODE_P (GET_MODE (x))
9958 : 3631532 : && ! flag_unsafe_math_optimizations)
9959 : : return x;
9960 : :
9961 : : /* The outer operation can only be one of the following: */
9962 : 47500813 : if (code != IOR && code != AND && code != XOR
9963 : 47500813 : && code != PLUS && code != MINUS)
9964 : : return x;
9965 : :
9966 : 47487485 : lhs = XEXP (x, 0);
9967 : 47487485 : rhs = XEXP (x, 1);
9968 : :
9969 : : /* If either operand is a primitive we can't do anything, so get out
9970 : : fast. */
9971 : 47487485 : if (OBJECT_P (lhs) || OBJECT_P (rhs))
9972 : : return x;
9973 : :
9974 : 2710811 : lhs = expand_compound_operation (lhs);
9975 : 2710811 : rhs = expand_compound_operation (rhs);
9976 : 2710811 : inner_code = GET_CODE (lhs);
9977 : 2710811 : if (inner_code != GET_CODE (rhs))
9978 : : return x;
9979 : :
9980 : : /* See if the inner and outer operations distribute. */
9981 : 704363 : switch (inner_code)
9982 : : {
9983 : 278490 : case LSHIFTRT:
9984 : 278490 : case ASHIFTRT:
9985 : 278490 : case AND:
9986 : 278490 : case IOR:
9987 : : /* These all distribute except over PLUS. */
9988 : 278490 : if (code == PLUS || code == MINUS)
9989 : : return x;
9990 : : break;
9991 : :
9992 : 92135 : case MULT:
9993 : 92135 : if (code != PLUS && code != MINUS)
9994 : : return x;
9995 : : break;
9996 : :
9997 : : case ASHIFT:
9998 : : /* This is also a multiply, so it distributes over everything. */
9999 : : break;
10000 : :
10001 : : /* This used to handle SUBREG, but this turned out to be counter-
10002 : : productive, since (subreg (op ...)) usually is not handled by
10003 : : insn patterns, and this "optimization" therefore transformed
10004 : : recognizable patterns into unrecognizable ones. Therefore the
10005 : : SUBREG case was removed from here.
10006 : :
10007 : : It is possible that distributing SUBREG over arithmetic operations
10008 : : leads to an intermediate result than can then be optimized further,
10009 : : e.g. by moving the outer SUBREG to the other side of a SET as done
10010 : : in simplify_set. This seems to have been the original intent of
10011 : : handling SUBREGs here.
10012 : :
10013 : : However, with current GCC this does not appear to actually happen,
10014 : : at least on major platforms. If some case is found where removing
10015 : : the SUBREG case here prevents follow-on optimizations, distributing
10016 : : SUBREGs ought to be re-added at that place, e.g. in simplify_set. */
10017 : :
10018 : : default:
10019 : : return x;
10020 : : }
10021 : :
10022 : : /* Set LHS and RHS to the inner operands (A and B in the example
10023 : : above) and set OTHER to the common operand (C in the example).
10024 : : There is only one way to do this unless the inner operation is
10025 : : commutative. */
10026 : 302045 : if (COMMUTATIVE_ARITH_P (lhs)
10027 : 302045 : && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
10028 : 2149 : other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
10029 : 299896 : else if (COMMUTATIVE_ARITH_P (lhs)
10030 : 299896 : && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
10031 : 15 : other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
10032 : 299881 : else if (COMMUTATIVE_ARITH_P (lhs)
10033 : 299881 : && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
10034 : 10607 : other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
10035 : 289274 : else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
10036 : 63188 : other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
10037 : : else
10038 : : return x;
10039 : :
10040 : : /* Form the new inner operation, seeing if it simplifies first. */
10041 : 75959 : tem = simplify_gen_binary (code, GET_MODE (x), lhs, rhs);
10042 : :
10043 : : /* There is one exception to the general way of distributing:
10044 : : (a | c) ^ (b | c) -> (a ^ b) & ~c */
10045 : 75959 : if (code == XOR && inner_code == IOR)
10046 : : {
10047 : 76 : inner_code = AND;
10048 : 76 : other = simplify_gen_unary (NOT, GET_MODE (x), other, GET_MODE (x));
10049 : : }
10050 : :
10051 : : /* We may be able to continuing distributing the result, so call
10052 : : ourselves recursively on the inner operation before forming the
10053 : : outer operation, which we return. */
10054 : 75959 : return simplify_gen_binary (inner_code, GET_MODE (x),
10055 : 75959 : apply_distributive_law (tem), other);
10056 : : }
10057 : :
10058 : : /* See if X is of the form (* (+ A B) C), and if so convert to
10059 : : (+ (* A C) (* B C)) and try to simplify.
10060 : :
10061 : : Most of the time, this results in no change. However, if some of
10062 : : the operands are the same or inverses of each other, simplifications
10063 : : will result.
10064 : :
10065 : : For example, (and (ior A B) (not B)) can occur as the result of
10066 : : expanding a bit field assignment. When we apply the distributive
10067 : : law to this, we get (ior (and (A (not B))) (and (B (not B)))),
10068 : : which then simplifies to (and (A (not B))).
10069 : :
10070 : : Note that no checks happen on the validity of applying the inverse
10071 : : distributive law. This is pointless since we can do it in the
10072 : : few places where this routine is called.
10073 : :
10074 : : N is the index of the term that is decomposed (the arithmetic operation,
10075 : : i.e. (+ A B) in the first example above). !N is the index of the term that
10076 : : is distributed, i.e. of C in the first example above. */
10077 : : static rtx
10078 : 1661561 : distribute_and_simplify_rtx (rtx x, int n)
10079 : : {
10080 : 1661561 : machine_mode mode;
10081 : 1661561 : enum rtx_code outer_code, inner_code;
10082 : 1661561 : rtx decomposed, distributed, inner_op0, inner_op1, new_op0, new_op1, tmp;
10083 : :
10084 : : /* Distributivity is not true for floating point as it can change the
10085 : : value. So we don't do it unless -funsafe-math-optimizations. */
10086 : 1661561 : if (FLOAT_MODE_P (GET_MODE (x))
10087 : 164008 : && ! flag_unsafe_math_optimizations)
10088 : : return NULL_RTX;
10089 : :
10090 : 1501147 : decomposed = XEXP (x, n);
10091 : 1501147 : if (!ARITHMETIC_P (decomposed))
10092 : : return NULL_RTX;
10093 : :
10094 : 1501147 : mode = GET_MODE (x);
10095 : 1501147 : outer_code = GET_CODE (x);
10096 : 1501147 : distributed = XEXP (x, !n);
10097 : :
10098 : 1501147 : inner_code = GET_CODE (decomposed);
10099 : 1501147 : inner_op0 = XEXP (decomposed, 0);
10100 : 1501147 : inner_op1 = XEXP (decomposed, 1);
10101 : :
10102 : : /* Special case (and (xor B C) (not A)), which is equivalent to
10103 : : (xor (ior A B) (ior A C)) */
10104 : 1501147 : if (outer_code == AND && inner_code == XOR && GET_CODE (distributed) == NOT)
10105 : : {
10106 : 71 : distributed = XEXP (distributed, 0);
10107 : 71 : outer_code = IOR;
10108 : : }
10109 : :
10110 : 1501147 : if (n == 0)
10111 : : {
10112 : : /* Distribute the second term. */
10113 : 1442381 : new_op0 = simplify_gen_binary (outer_code, mode, inner_op0, distributed);
10114 : 1442381 : new_op1 = simplify_gen_binary (outer_code, mode, inner_op1, distributed);
10115 : : }
10116 : : else
10117 : : {
10118 : : /* Distribute the first term. */
10119 : 58766 : new_op0 = simplify_gen_binary (outer_code, mode, distributed, inner_op0);
10120 : 58766 : new_op1 = simplify_gen_binary (outer_code, mode, distributed, inner_op1);
10121 : : }
10122 : :
10123 : 1501147 : tmp = apply_distributive_law (simplify_gen_binary (inner_code, mode,
10124 : : new_op0, new_op1));
10125 : 1501147 : if (GET_CODE (tmp) != outer_code
10126 : 1501147 : && (set_src_cost (tmp, mode, optimize_this_for_speed_p)
10127 : 245155 : < set_src_cost (x, mode, optimize_this_for_speed_p)))
10128 : : return tmp;
10129 : :
10130 : : return NULL_RTX;
10131 : : }
10132 : :
10133 : : /* Simplify a logical `and' of VAROP with the constant CONSTOP, to be done
10134 : : in MODE. Return an equivalent form, if different from (and VAROP
10135 : : (const_int CONSTOP)). Otherwise, return NULL_RTX. */
10136 : :
10137 : : static rtx
10138 : 11990465 : simplify_and_const_int_1 (scalar_int_mode mode, rtx varop,
10139 : : unsigned HOST_WIDE_INT constop)
10140 : : {
10141 : 11990465 : unsigned HOST_WIDE_INT nonzero;
10142 : 11990465 : unsigned HOST_WIDE_INT orig_constop;
10143 : 11990465 : rtx orig_varop;
10144 : 11990465 : int i;
10145 : :
10146 : 11990465 : orig_varop = varop;
10147 : 11990465 : orig_constop = constop;
10148 : 11990465 : if (GET_CODE (varop) == CLOBBER)
10149 : : return NULL_RTX;
10150 : :
10151 : : /* Simplify VAROP knowing that we will be only looking at some of the
10152 : : bits in it.
10153 : :
10154 : : Note by passing in CONSTOP, we guarantee that the bits not set in
10155 : : CONSTOP are not significant and will never be examined. We must
10156 : : ensure that is the case by explicitly masking out those bits
10157 : : before returning. */
10158 : 11990459 : varop = force_to_mode (varop, mode, constop, false);
10159 : :
10160 : : /* If VAROP is a CLOBBER, we will fail so return it. */
10161 : 11990459 : if (GET_CODE (varop) == CLOBBER)
10162 : : return varop;
10163 : :
10164 : : /* If VAROP is a CONST_INT, then we need to apply the mask in CONSTOP
10165 : : to VAROP and return the new constant. */
10166 : 11990449 : if (CONST_INT_P (varop))
10167 : 314935 : return gen_int_mode (INTVAL (varop) & constop, mode);
10168 : :
10169 : : /* See what bits may be nonzero in VAROP. Unlike the general case of
10170 : : a call to nonzero_bits, here we don't care about bits outside
10171 : : MODE unless WORD_REGISTER_OPERATIONS is true. */
10172 : :
10173 : 11675514 : scalar_int_mode tmode = mode;
10174 : 11675514 : if (WORD_REGISTER_OPERATIONS && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
10175 : : tmode = word_mode;
10176 : 11675514 : nonzero = nonzero_bits (varop, tmode) & GET_MODE_MASK (tmode);
10177 : :
10178 : : /* Turn off all bits in the constant that are known to already be zero.
10179 : : Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
10180 : : which is tested below. */
10181 : :
10182 : 11675514 : constop &= nonzero;
10183 : :
10184 : : /* If we don't have any bits left, return zero. */
10185 : 11675514 : if (constop == 0 && !side_effects_p (varop))
10186 : 0 : return const0_rtx;
10187 : :
10188 : : /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
10189 : : a power of two, we can replace this with an ASHIFT. */
10190 : 35166 : if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), tmode) == 1
10191 : 11681728 : && (i = exact_log2 (constop)) >= 0)
10192 : 121 : return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
10193 : :
10194 : : /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
10195 : : or XOR, then try to apply the distributive law. This may eliminate
10196 : : operations if either branch can be simplified because of the AND.
10197 : : It may also make some cases more complex, but those cases probably
10198 : : won't match a pattern either with or without this. */
10199 : :
10200 : 11675393 : if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
10201 : : {
10202 : 84583 : scalar_int_mode varop_mode = as_a <scalar_int_mode> (GET_MODE (varop));
10203 : 84583 : return
10204 : 84583 : gen_lowpart
10205 : 84583 : (mode,
10206 : : apply_distributive_law
10207 : 84583 : (simplify_gen_binary (GET_CODE (varop), varop_mode,
10208 : : simplify_and_const_int (NULL_RTX, varop_mode,
10209 : : XEXP (varop, 0),
10210 : : constop),
10211 : : simplify_and_const_int (NULL_RTX, varop_mode,
10212 : : XEXP (varop, 1),
10213 : : constop))));
10214 : : }
10215 : :
10216 : : /* If VAROP is PLUS, and the constant is a mask of low bits, distribute
10217 : : the AND and see if one of the operands simplifies to zero. If so, we
10218 : : may eliminate it. */
10219 : :
10220 : 11590810 : if (GET_CODE (varop) == PLUS
10221 : 11590810 : && pow2p_hwi (constop + 1))
10222 : : {
10223 : 441130 : rtx o0, o1;
10224 : :
10225 : 441130 : o0 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 0), constop);
10226 : 441130 : o1 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 1), constop);
10227 : 441130 : if (o0 == const0_rtx)
10228 : : return o1;
10229 : 441130 : if (o1 == const0_rtx)
10230 : : return o0;
10231 : : }
10232 : :
10233 : : /* Make a SUBREG if necessary. If we can't make it, fail. */
10234 : 11590748 : varop = gen_lowpart (mode, varop);
10235 : 11590748 : if (varop == NULL_RTX || GET_CODE (varop) == CLOBBER)
10236 : : return NULL_RTX;
10237 : :
10238 : : /* If we are only masking insignificant bits, return VAROP. */
10239 : 11590748 : if (constop == nonzero)
10240 : : return varop;
10241 : :
10242 : 11162480 : if (varop == orig_varop && constop == orig_constop)
10243 : : return NULL_RTX;
10244 : :
10245 : : /* Otherwise, return an AND. */
10246 : 6092568 : return simplify_gen_binary (AND, mode, varop, gen_int_mode (constop, mode));
10247 : : }
10248 : :
10249 : :
10250 : : /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
10251 : : in MODE.
10252 : :
10253 : : Return an equivalent form, if different from X. Otherwise, return X. If
10254 : : X is zero, we are to always construct the equivalent form. */
10255 : :
10256 : : static rtx
10257 : 11990465 : simplify_and_const_int (rtx x, scalar_int_mode mode, rtx varop,
10258 : : unsigned HOST_WIDE_INT constop)
10259 : : {
10260 : 11990465 : rtx tem = simplify_and_const_int_1 (mode, varop, constop);
10261 : 11990465 : if (tem)
10262 : : return tem;
10263 : :
10264 : 5069918 : if (!x)
10265 : 1296022 : x = simplify_gen_binary (AND, GET_MODE (varop), varop,
10266 : 1296022 : gen_int_mode (constop, mode));
10267 : 5069918 : if (GET_MODE (x) != mode)
10268 : 0 : x = gen_lowpart (mode, x);
10269 : : return x;
10270 : : }
10271 : :
10272 : : /* Given a REG X of mode XMODE, compute which bits in X can be nonzero.
10273 : : We don't care about bits outside of those defined in MODE.
10274 : : We DO care about all the bits in MODE, even if XMODE is smaller than MODE.
10275 : :
10276 : : For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
10277 : : a shift, AND, or zero_extract, we can do better. */
10278 : :
10279 : : static rtx
10280 : 436795308 : reg_nonzero_bits_for_combine (const_rtx x, scalar_int_mode xmode,
10281 : : scalar_int_mode mode,
10282 : : unsigned HOST_WIDE_INT *nonzero)
10283 : : {
10284 : 436795308 : rtx tem;
10285 : 436795308 : reg_stat_type *rsp;
10286 : :
10287 : : /* If X is a register whose nonzero bits value is current, use it.
10288 : : Otherwise, if X is a register whose value we can find, use that
10289 : : value. Otherwise, use the previously-computed global nonzero bits
10290 : : for this register. */
10291 : :
10292 : 436795308 : rsp = ®_stat[REGNO (x)];
10293 : 436795308 : if (rsp->last_set_value != 0
10294 : 406596061 : && (rsp->last_set_mode == mode
10295 : 1278 : || (REGNO (x) >= FIRST_PSEUDO_REGISTER
10296 : 0 : && GET_MODE_CLASS (rsp->last_set_mode) == MODE_INT
10297 : 0 : && GET_MODE_CLASS (mode) == MODE_INT))
10298 : 843390091 : && ((rsp->last_set_label >= label_tick_ebb_start
10299 : 303207593 : && rsp->last_set_label < label_tick)
10300 : 385184088 : || (rsp->last_set_label == label_tick
10301 : 281796898 : && DF_INSN_LUID (rsp->last_set) < subst_low_luid)
10302 : 131014351 : || (REGNO (x) >= FIRST_PSEUDO_REGISTER
10303 : 130948137 : && REGNO (x) < reg_n_sets_max
10304 : 130948011 : && REG_N_SETS (REGNO (x)) == 1
10305 : 148295958 : && !REGNO_REG_SET_P
10306 : : (DF_LR_IN (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb),
10307 : : REGNO (x)))))
10308 : : {
10309 : : /* Note that, even if the precision of last_set_mode is lower than that
10310 : : of mode, record_value_for_reg invoked nonzero_bits on the register
10311 : : with nonzero_bits_mode (because last_set_mode is necessarily integral
10312 : : and HWI_COMPUTABLE_MODE_P in this case) so bits in nonzero_bits_mode
10313 : : are all valid, hence in mode too since nonzero_bits_mode is defined
10314 : : to the largest HWI_COMPUTABLE_MODE_P mode. */
10315 : 349638967 : *nonzero &= rsp->last_set_nonzero_bits;
10316 : 349638967 : return NULL;
10317 : : }
10318 : :
10319 : 87156341 : tem = get_last_value (x);
10320 : 87156341 : if (tem)
10321 : : {
10322 : : if (SHORT_IMMEDIATES_SIGN_EXTEND)
10323 : : tem = sign_extend_short_imm (tem, xmode, GET_MODE_PRECISION (mode));
10324 : :
10325 : : return tem;
10326 : : }
10327 : :
10328 : 87156335 : if (nonzero_sign_valid && rsp->nonzero_bits)
10329 : : {
10330 : 53917463 : unsigned HOST_WIDE_INT mask = rsp->nonzero_bits;
10331 : :
10332 : 53917463 : if (GET_MODE_PRECISION (xmode) < GET_MODE_PRECISION (mode))
10333 : : /* We don't know anything about the upper bits. */
10334 : 0 : mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (xmode);
10335 : :
10336 : 53917463 : *nonzero &= mask;
10337 : : }
10338 : :
10339 : : return NULL;
10340 : : }
10341 : :
10342 : : /* Given a reg X of mode XMODE, return the number of bits at the high-order
10343 : : end of X that are known to be equal to the sign bit. X will be used
10344 : : in mode MODE; the returned value will always be between 1 and the
10345 : : number of bits in MODE. */
10346 : :
10347 : : static rtx
10348 : 127183679 : reg_num_sign_bit_copies_for_combine (const_rtx x, scalar_int_mode xmode,
10349 : : scalar_int_mode mode,
10350 : : unsigned int *result)
10351 : : {
10352 : 127183679 : rtx tem;
10353 : 127183679 : reg_stat_type *rsp;
10354 : :
10355 : 127183679 : rsp = ®_stat[REGNO (x)];
10356 : 127183679 : if (rsp->last_set_value != 0
10357 : 116232027 : && rsp->last_set_mode == mode
10358 : 243415537 : && ((rsp->last_set_label >= label_tick_ebb_start
10359 : 87074332 : && rsp->last_set_label < label_tick)
10360 : 110662283 : || (rsp->last_set_label == label_tick
10361 : 81504757 : && DF_INSN_LUID (rsp->last_set) < subst_low_luid)
10362 : 36823038 : || (REGNO (x) >= FIRST_PSEUDO_REGISTER
10363 : 36810419 : && REGNO (x) < reg_n_sets_max
10364 : 36810337 : && REG_N_SETS (REGNO (x)) == 1
10365 : 41770130 : && !REGNO_REG_SET_P
10366 : : (DF_LR_IN (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb),
10367 : : REGNO (x)))))
10368 : : {
10369 : 100270958 : *result = rsp->last_set_sign_bit_copies;
10370 : 100270958 : return NULL;
10371 : : }
10372 : :
10373 : 26912721 : tem = get_last_value (x);
10374 : 26912721 : if (tem != 0)
10375 : : return tem;
10376 : :
10377 : 17575956 : if (nonzero_sign_valid && rsp->sign_bit_copies != 0
10378 : 40370296 : && GET_MODE_PRECISION (xmode) == GET_MODE_PRECISION (mode))
10379 : 13457580 : *result = rsp->sign_bit_copies;
10380 : :
10381 : : return NULL;
10382 : : }
10383 : :
10384 : : /* Return the number of "extended" bits there are in X, when interpreted
10385 : : as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
10386 : : unsigned quantities, this is the number of high-order zero bits.
10387 : : For signed quantities, this is the number of copies of the sign bit
10388 : : minus 1. In both case, this function returns the number of "spare"
10389 : : bits. For example, if two quantities for which this function returns
10390 : : at least 1 are added, the addition is known not to overflow.
10391 : :
10392 : : This function will always return 0 unless called during combine, which
10393 : : implies that it must be called from a define_split. */
10394 : :
10395 : : unsigned int
10396 : 0 : extended_count (const_rtx x, machine_mode mode, bool unsignedp)
10397 : : {
10398 : 0 : if (nonzero_sign_valid == 0)
10399 : : return 0;
10400 : :
10401 : 0 : scalar_int_mode int_mode;
10402 : 0 : return (unsignedp
10403 : 0 : ? (is_a <scalar_int_mode> (mode, &int_mode)
10404 : 0 : && HWI_COMPUTABLE_MODE_P (int_mode)
10405 : 0 : ? (unsigned int) (GET_MODE_PRECISION (int_mode) - 1
10406 : 0 : - floor_log2 (nonzero_bits (x, int_mode)))
10407 : : : 0)
10408 : 0 : : num_sign_bit_copies (x, mode) - 1);
10409 : : }
10410 : :
10411 : : /* This function is called from `simplify_shift_const' to merge two
10412 : : outer operations. Specifically, we have already found that we need
10413 : : to perform operation *POP0 with constant *PCONST0 at the outermost
10414 : : position. We would now like to also perform OP1 with constant CONST1
10415 : : (with *POP0 being done last).
10416 : :
10417 : : Return true if we can do the operation and update *POP0 and *PCONST0 with
10418 : : the resulting operation. *PCOMP_P is set to true if we would need to
10419 : : complement the innermost operand, otherwise it is unchanged.
10420 : :
10421 : : MODE is the mode in which the operation will be done. No bits outside
10422 : : the width of this mode matter. It is assumed that the width of this mode
10423 : : is smaller than or equal to HOST_BITS_PER_WIDE_INT.
10424 : :
10425 : : If *POP0 or OP1 are UNKNOWN, it means no operation is required. Only NEG, PLUS,
10426 : : IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
10427 : : result is simply *PCONST0.
10428 : :
10429 : : If the resulting operation cannot be expressed as one operation, we
10430 : : return false and do not change *POP0, *PCONST0, and *PCOMP_P. */
10431 : :
10432 : : static bool
10433 : 3684355 : merge_outer_ops (enum rtx_code *pop0, HOST_WIDE_INT *pconst0,
10434 : : enum rtx_code op1, HOST_WIDE_INT const1,
10435 : : machine_mode mode, bool *pcomp_p)
10436 : : {
10437 : 3684355 : enum rtx_code op0 = *pop0;
10438 : 3684355 : HOST_WIDE_INT const0 = *pconst0;
10439 : :
10440 : 3684355 : const0 &= GET_MODE_MASK (mode);
10441 : 3684355 : const1 &= GET_MODE_MASK (mode);
10442 : :
10443 : : /* If OP0 is an AND, clear unimportant bits in CONST1. */
10444 : 3684355 : if (op0 == AND)
10445 : 13154 : const1 &= const0;
10446 : :
10447 : : /* If OP0 or OP1 is UNKNOWN, this is easy. Similarly if they are the same or
10448 : : if OP0 is SET. */
10449 : :
10450 : 3684355 : if (op1 == UNKNOWN || op0 == SET)
10451 : : return true;
10452 : :
10453 : 3684355 : else if (op0 == UNKNOWN)
10454 : : op0 = op1, const0 = const1;
10455 : :
10456 : 64260 : else if (op0 == op1)
10457 : : {
10458 : 13015 : switch (op0)
10459 : : {
10460 : 13010 : case AND:
10461 : 13010 : const0 &= const1;
10462 : 13010 : break;
10463 : 5 : case IOR:
10464 : 5 : const0 |= const1;
10465 : 5 : break;
10466 : 0 : case XOR:
10467 : 0 : const0 ^= const1;
10468 : 0 : break;
10469 : 0 : case PLUS:
10470 : 0 : const0 += const1;
10471 : 0 : break;
10472 : : case NEG:
10473 : 3655876 : op0 = UNKNOWN;
10474 : : break;
10475 : : default:
10476 : : break;
10477 : : }
10478 : : }
10479 : :
10480 : : /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
10481 : 51245 : else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
10482 : : return false;
10483 : :
10484 : : /* If the two constants aren't the same, we can't do anything. The
10485 : : remaining six cases can all be done. */
10486 : 23906 : else if (const0 != const1)
10487 : : return false;
10488 : :
10489 : : else
10490 : 22766 : switch (op0)
10491 : : {
10492 : 8 : case IOR:
10493 : 8 : if (op1 == AND)
10494 : : /* (a & b) | b == b */
10495 : 0 : op0 = SET;
10496 : : else /* op1 == XOR */
10497 : : /* (a ^ b) | b == a | b */
10498 : : {;}
10499 : : break;
10500 : :
10501 : 22618 : case XOR:
10502 : 22618 : if (op1 == AND)
10503 : : /* (a & b) ^ b == (~a) & b */
10504 : 22618 : op0 = AND, *pcomp_p = true;
10505 : : else /* op1 == IOR */
10506 : : /* (a | b) ^ b == a & ~b */
10507 : 0 : op0 = AND, const0 = ~const0;
10508 : : break;
10509 : :
10510 : 140 : case AND:
10511 : 140 : if (op1 == IOR)
10512 : : /* (a | b) & b == b */
10513 : : op0 = SET;
10514 : : else /* op1 == XOR */
10515 : : /* (a ^ b) & b) == (~a) & b */
10516 : 140 : *pcomp_p = true;
10517 : : break;
10518 : : default:
10519 : : break;
10520 : : }
10521 : :
10522 : : /* Check for NO-OP cases. */
10523 : 3655876 : const0 &= GET_MODE_MASK (mode);
10524 : 3655876 : if (const0 == 0
10525 : 21004 : && (op0 == IOR || op0 == XOR || op0 == PLUS))
10526 : : op0 = UNKNOWN;
10527 : 3653621 : else if (const0 == 0 && op0 == AND)
10528 : : op0 = SET;
10529 : 3653621 : else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
10530 : 16020 : && op0 == AND)
10531 : 3655876 : op0 = UNKNOWN;
10532 : :
10533 : 3655876 : *pop0 = op0;
10534 : :
10535 : : /* ??? Slightly redundant with the above mask, but not entirely.
10536 : : Moving this above means we'd have to sign-extend the mode mask
10537 : : for the final test. */
10538 : 3655876 : if (op0 != UNKNOWN && op0 != NEG)
10539 : 3625790 : *pconst0 = trunc_int_for_mode (const0, mode);
10540 : :
10541 : : return true;
10542 : : }
10543 : :
10544 : : /* A helper to simplify_shift_const_1 to determine the mode we can perform
10545 : : the shift in. The original shift operation CODE is performed on OP in
10546 : : ORIG_MODE. Return the wider mode MODE if we can perform the operation
10547 : : in that mode. Return ORIG_MODE otherwise. We can also assume that the
10548 : : result of the shift is subject to operation OUTER_CODE with operand
10549 : : OUTER_CONST. */
10550 : :
10551 : : static scalar_int_mode
10552 : 294311 : try_widen_shift_mode (enum rtx_code code, rtx op, int count,
10553 : : scalar_int_mode orig_mode, scalar_int_mode mode,
10554 : : enum rtx_code outer_code, HOST_WIDE_INT outer_const)
10555 : : {
10556 : 294311 : gcc_assert (GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (orig_mode));
10557 : :
10558 : : /* In general we can't perform in wider mode for right shift and rotate. */
10559 : 294311 : switch (code)
10560 : : {
10561 : 31829 : case ASHIFTRT:
10562 : : /* We can still widen if the bits brought in from the left are identical
10563 : : to the sign bit of ORIG_MODE. */
10564 : 31829 : if (num_sign_bit_copies (op, mode)
10565 : 31829 : > (unsigned) (GET_MODE_PRECISION (mode)
10566 : 31829 : - GET_MODE_PRECISION (orig_mode)))
10567 : 347 : return mode;
10568 : 31482 : return orig_mode;
10569 : :
10570 : 34808 : case LSHIFTRT:
10571 : : /* Similarly here but with zero bits. */
10572 : 34808 : if (HWI_COMPUTABLE_MODE_P (mode)
10573 : 34808 : && (nonzero_bits (op, mode) & ~GET_MODE_MASK (orig_mode)) == 0)
10574 : 4005 : return mode;
10575 : :
10576 : : /* We can also widen if the bits brought in will be masked off. This
10577 : : operation is performed in ORIG_MODE. */
10578 : 30803 : if (outer_code == AND)
10579 : : {
10580 : 8128 : int care_bits = low_bitmask_len (orig_mode, outer_const);
10581 : :
10582 : 8128 : if (care_bits >= 0
10583 : 8128 : && GET_MODE_PRECISION (orig_mode) - care_bits >= count)
10584 : 8110 : return mode;
10585 : : }
10586 : : /* fall through */
10587 : :
10588 : 23357 : case ROTATE:
10589 : 23357 : return orig_mode;
10590 : :
10591 : 0 : case ROTATERT:
10592 : 0 : gcc_unreachable ();
10593 : :
10594 : 227010 : default:
10595 : 227010 : return mode;
10596 : : }
10597 : : }
10598 : :
10599 : : /* Simplify a shift of VAROP by ORIG_COUNT bits. CODE says what kind
10600 : : of shift. The result of the shift is RESULT_MODE. Return NULL_RTX
10601 : : if we cannot simplify it. Otherwise, return a simplified value.
10602 : :
10603 : : The shift is normally computed in the widest mode we find in VAROP, as
10604 : : long as it isn't a different number of words than RESULT_MODE. Exceptions
10605 : : are ASHIFTRT and ROTATE, which are always done in their original mode. */
10606 : :
10607 : : static rtx
10608 : 23765020 : simplify_shift_const_1 (enum rtx_code code, machine_mode result_mode,
10609 : : rtx varop, int orig_count)
10610 : : {
10611 : 23765020 : enum rtx_code orig_code = code;
10612 : 23765020 : rtx orig_varop = varop;
10613 : 23765020 : int count, log2;
10614 : 23765020 : machine_mode mode = result_mode;
10615 : 23765020 : machine_mode shift_mode;
10616 : 23765020 : scalar_int_mode tmode, inner_mode, int_mode, int_varop_mode, int_result_mode;
10617 : : /* We form (outer_op (code varop count) (outer_const)). */
10618 : 23765020 : enum rtx_code outer_op = UNKNOWN;
10619 : 23765020 : HOST_WIDE_INT outer_const = 0;
10620 : 23765020 : bool complement_p = false;
10621 : 23765020 : rtx new_rtx, x;
10622 : :
10623 : : /* Make sure and truncate the "natural" shift on the way in. We don't
10624 : : want to do this inside the loop as it makes it more difficult to
10625 : : combine shifts. */
10626 : 23765020 : if (SHIFT_COUNT_TRUNCATED)
10627 : : orig_count &= GET_MODE_UNIT_BITSIZE (mode) - 1;
10628 : :
10629 : : /* If we were given an invalid count, don't do anything except exactly
10630 : : what was requested. */
10631 : :
10632 : 47529940 : if (orig_count < 0 || orig_count >= (int) GET_MODE_UNIT_PRECISION (mode))
10633 : : return NULL_RTX;
10634 : :
10635 : : count = orig_count;
10636 : :
10637 : : /* Unless one of the branches of the `if' in this loop does a `continue',
10638 : : we will `break' the loop after the `if'. */
10639 : :
10640 : 27723256 : while (count != 0)
10641 : : {
10642 : : /* If we have an operand of (clobber (const_int 0)), fail. */
10643 : 23986823 : if (GET_CODE (varop) == CLOBBER)
10644 : 23765020 : return NULL_RTX;
10645 : :
10646 : : /* Convert ROTATERT to ROTATE. */
10647 : 23986823 : if (code == ROTATERT)
10648 : : {
10649 : 11029 : unsigned int bitsize = GET_MODE_UNIT_PRECISION (result_mode);
10650 : 11029 : code = ROTATE;
10651 : 11029 : count = bitsize - count;
10652 : : }
10653 : :
10654 : 23986823 : shift_mode = result_mode;
10655 : 23986823 : if (shift_mode != mode)
10656 : : {
10657 : : /* We only change the modes of scalar shifts. */
10658 : 149275 : int_mode = as_a <scalar_int_mode> (mode);
10659 : 149275 : int_result_mode = as_a <scalar_int_mode> (result_mode);
10660 : 149275 : shift_mode = try_widen_shift_mode (code, varop, count,
10661 : : int_result_mode, int_mode,
10662 : : outer_op, outer_const);
10663 : : }
10664 : :
10665 : 23986823 : scalar_int_mode shift_unit_mode;
10666 : 67956873 : if (!is_a <scalar_int_mode> (GET_MODE_INNER (shift_mode),
10667 : : &shift_unit_mode))
10668 : : return NULL_RTX;
10669 : :
10670 : : /* Handle cases where the count is greater than the size of the mode
10671 : : minus 1. For ASHIFT, use the size minus one as the count (this can
10672 : : occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
10673 : : take the count modulo the size. For other shifts, the result is
10674 : : zero.
10675 : :
10676 : : Since these shifts are being produced by the compiler by combining
10677 : : multiple operations, each of which are defined, we know what the
10678 : : result is supposed to be. */
10679 : :
10680 : 23986823 : if (count > (GET_MODE_PRECISION (shift_unit_mode) - 1))
10681 : : {
10682 : 8151 : if (code == ASHIFTRT)
10683 : 8145 : count = GET_MODE_PRECISION (shift_unit_mode) - 1;
10684 : 6 : else if (code == ROTATE || code == ROTATERT)
10685 : 6 : count %= GET_MODE_PRECISION (shift_unit_mode);
10686 : : else
10687 : : {
10688 : : /* We can't simply return zero because there may be an
10689 : : outer op. */
10690 : 0 : varop = const0_rtx;
10691 : 0 : count = 0;
10692 : 0 : break;
10693 : : }
10694 : : }
10695 : :
10696 : : /* If we discovered we had to complement VAROP, leave. Making a NOT
10697 : : here would cause an infinite loop. */
10698 : 23986823 : if (complement_p)
10699 : : break;
10700 : :
10701 : 23973365 : if (shift_mode == shift_unit_mode)
10702 : : {
10703 : : /* An arithmetic right shift of a quantity known to be -1 or 0
10704 : : is a no-op. */
10705 : 23377225 : if (code == ASHIFTRT
10706 : 23377225 : && (num_sign_bit_copies (varop, shift_unit_mode)
10707 : 4246694 : == GET_MODE_PRECISION (shift_unit_mode)))
10708 : : {
10709 : : count = 0;
10710 : : break;
10711 : : }
10712 : :
10713 : : /* If we are doing an arithmetic right shift and discarding all but
10714 : : the sign bit copies, this is equivalent to doing a shift by the
10715 : : bitsize minus one. Convert it into that shift because it will
10716 : : often allow other simplifications. */
10717 : :
10718 : 23377156 : if (code == ASHIFTRT
10719 : 23377156 : && (count + num_sign_bit_copies (varop, shift_unit_mode)
10720 : 4246625 : >= GET_MODE_PRECISION (shift_unit_mode)))
10721 : 255083 : count = GET_MODE_PRECISION (shift_unit_mode) - 1;
10722 : :
10723 : : /* We simplify the tests below and elsewhere by converting
10724 : : ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
10725 : : `make_compound_operation' will convert it to an ASHIFTRT for
10726 : : those machines (such as VAX) that don't have an LSHIFTRT. */
10727 : 23377156 : if (code == ASHIFTRT
10728 : 4246625 : && HWI_COMPUTABLE_MODE_P (shift_unit_mode)
10729 : 27598566 : && val_signbit_known_clear_p (shift_unit_mode,
10730 : : nonzero_bits (varop,
10731 : : shift_unit_mode)))
10732 : : code = LSHIFTRT;
10733 : :
10734 : 23348017 : if (((code == LSHIFTRT
10735 : 5845845 : && HWI_COMPUTABLE_MODE_P (shift_unit_mode)
10736 : 5825377 : && !(nonzero_bits (varop, shift_unit_mode) >> count))
10737 : 23375408 : || (code == ASHIFT
10738 : 13283371 : && HWI_COMPUTABLE_MODE_P (shift_unit_mode)
10739 : 12839869 : && !((nonzero_bits (varop, shift_unit_mode) << count)
10740 : 12839869 : & GET_MODE_MASK (shift_unit_mode))))
10741 : 23351508 : && !side_effects_p (varop))
10742 : 3491 : varop = const0_rtx;
10743 : : }
10744 : :
10745 : 23973296 : switch (GET_CODE (varop))
10746 : : {
10747 : 479320 : case SIGN_EXTEND:
10748 : 479320 : case ZERO_EXTEND:
10749 : 479320 : case SIGN_EXTRACT:
10750 : 479320 : case ZERO_EXTRACT:
10751 : 479320 : new_rtx = expand_compound_operation (varop);
10752 : 479320 : if (new_rtx != varop)
10753 : : {
10754 : 68199 : varop = new_rtx;
10755 : 27791455 : continue;
10756 : : }
10757 : : break;
10758 : :
10759 : 257971 : case MEM:
10760 : : /* The following rules apply only to scalars. */
10761 : 257971 : if (shift_mode != shift_unit_mode)
10762 : : break;
10763 : 243901 : int_mode = as_a <scalar_int_mode> (mode);
10764 : :
10765 : : /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
10766 : : minus the width of a smaller mode, we can do this with a
10767 : : SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
10768 : 248628 : if ((code == ASHIFTRT || code == LSHIFTRT)
10769 : 79727 : && ! mode_dependent_address_p (XEXP (varop, 0),
10770 : 79727 : MEM_ADDR_SPACE (varop))
10771 : 79727 : && ! MEM_VOLATILE_P (varop)
10772 : 322066 : && (int_mode_for_size (GET_MODE_BITSIZE (int_mode) - count, 1)
10773 : 239174 : .exists (&tmode)))
10774 : : {
10775 : 4727 : new_rtx = adjust_address_nv (varop, tmode,
10776 : : BYTES_BIG_ENDIAN ? 0
10777 : : : count / BITS_PER_UNIT);
10778 : :
10779 : 4727 : varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
10780 : : : ZERO_EXTEND, int_mode, new_rtx);
10781 : 4727 : count = 0;
10782 : 4727 : continue;
10783 : : }
10784 : : break;
10785 : :
10786 : 4839227 : case SUBREG:
10787 : : /* The following rules apply only to scalars. */
10788 : 4839227 : if (shift_mode != shift_unit_mode)
10789 : : break;
10790 : 4432073 : int_mode = as_a <scalar_int_mode> (mode);
10791 : 4432073 : int_varop_mode = as_a <scalar_int_mode> (GET_MODE (varop));
10792 : :
10793 : : /* If VAROP is a SUBREG, strip it as long as the inner operand has
10794 : : the same number of words as what we've seen so far. Then store
10795 : : the widest mode in MODE. */
10796 : 4432073 : if (subreg_lowpart_p (varop)
10797 : 28182573 : && is_int_mode (GET_MODE (SUBREG_REG (varop)), &inner_mode)
10798 : 8816396 : && GET_MODE_SIZE (inner_mode) > GET_MODE_SIZE (int_varop_mode)
10799 : 178837 : && (CEIL (GET_MODE_SIZE (inner_mode), UNITS_PER_WORD)
10800 : 162494 : == CEIL (GET_MODE_SIZE (int_mode), UNITS_PER_WORD))
10801 : 4577122 : && GET_MODE_CLASS (int_varop_mode) == MODE_INT)
10802 : : {
10803 : 145049 : varop = SUBREG_REG (varop);
10804 : 435147 : if (GET_MODE_SIZE (inner_mode) > GET_MODE_SIZE (int_mode))
10805 : 145049 : mode = inner_mode;
10806 : 145049 : continue;
10807 : : }
10808 : : break;
10809 : :
10810 : 420898 : case MULT:
10811 : : /* Some machines use MULT instead of ASHIFT because MULT
10812 : : is cheaper. But it is still better on those machines to
10813 : : merge two shifts into one. */
10814 : 420898 : if (CONST_INT_P (XEXP (varop, 1))
10815 : 420898 : && (log2 = exact_log2 (UINTVAL (XEXP (varop, 1)))) >= 0)
10816 : : {
10817 : 0 : rtx log2_rtx = gen_int_shift_amount (GET_MODE (varop), log2);
10818 : 0 : varop = simplify_gen_binary (ASHIFT, GET_MODE (varop),
10819 : : XEXP (varop, 0), log2_rtx);
10820 : 0 : continue;
10821 : 0 : }
10822 : : break;
10823 : :
10824 : 8822 : case UDIV:
10825 : : /* Similar, for when divides are cheaper. */
10826 : 8822 : if (CONST_INT_P (XEXP (varop, 1))
10827 : 8822 : && (log2 = exact_log2 (UINTVAL (XEXP (varop, 1)))) >= 0)
10828 : : {
10829 : 9 : rtx log2_rtx = gen_int_shift_amount (GET_MODE (varop), log2);
10830 : 9 : varop = simplify_gen_binary (LSHIFTRT, GET_MODE (varop),
10831 : : XEXP (varop, 0), log2_rtx);
10832 : 9 : continue;
10833 : 9 : }
10834 : : break;
10835 : :
10836 : 347794 : case ASHIFTRT:
10837 : : /* If we are extracting just the sign bit of an arithmetic
10838 : : right shift, that shift is not needed. However, the sign
10839 : : bit of a wider mode may be different from what would be
10840 : : interpreted as the sign bit in a narrower mode, so, if
10841 : : the result is narrower, don't discard the shift. */
10842 : 349671 : if (code == LSHIFTRT
10843 : 14723 : && count == (GET_MODE_UNIT_BITSIZE (result_mode) - 1)
10844 : 347794 : && (GET_MODE_UNIT_BITSIZE (result_mode)
10845 : 3780 : >= GET_MODE_UNIT_BITSIZE (GET_MODE (varop))))
10846 : : {
10847 : 1877 : varop = XEXP (varop, 0);
10848 : 1877 : continue;
10849 : : }
10850 : :
10851 : : /* fall through */
10852 : :
10853 : 5810051 : case LSHIFTRT:
10854 : 5810051 : case ASHIFT:
10855 : 5810051 : case ROTATE:
10856 : : /* The following rules apply only to scalars. */
10857 : 5810051 : if (shift_mode != shift_unit_mode)
10858 : : break;
10859 : 5802286 : int_mode = as_a <scalar_int_mode> (mode);
10860 : 5802286 : int_varop_mode = as_a <scalar_int_mode> (GET_MODE (varop));
10861 : 5802286 : int_result_mode = as_a <scalar_int_mode> (result_mode);
10862 : :
10863 : : /* Here we have two nested shifts. The result is usually the
10864 : : AND of a new shift with a mask. We compute the result below. */
10865 : 5802286 : if (CONST_INT_P (XEXP (varop, 1))
10866 : 5782155 : && INTVAL (XEXP (varop, 1)) >= 0
10867 : 5782152 : && INTVAL (XEXP (varop, 1)) < GET_MODE_PRECISION (int_varop_mode)
10868 : 5782152 : && HWI_COMPUTABLE_MODE_P (int_result_mode)
10869 : 11551257 : && HWI_COMPUTABLE_MODE_P (int_mode))
10870 : : {
10871 : 5748971 : enum rtx_code first_code = GET_CODE (varop);
10872 : 5748971 : unsigned int first_count = INTVAL (XEXP (varop, 1));
10873 : 5748971 : unsigned HOST_WIDE_INT mask;
10874 : 5748971 : rtx mask_rtx;
10875 : :
10876 : : /* We have one common special case. We can't do any merging if
10877 : : the inner code is an ASHIFTRT of a smaller mode. However, if
10878 : : we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
10879 : : with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
10880 : : we can convert it to
10881 : : (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0) C3) C2) C1).
10882 : : This simplifies certain SIGN_EXTEND operations. */
10883 : 5748971 : if (code == ASHIFT && first_code == ASHIFTRT
10884 : 5748971 : && count == (GET_MODE_PRECISION (int_result_mode)
10885 : 314845 : - GET_MODE_PRECISION (int_varop_mode)))
10886 : : {
10887 : : /* C3 has the low-order C1 bits zero. */
10888 : :
10889 : 0 : mask = GET_MODE_MASK (int_mode)
10890 : 0 : & ~((HOST_WIDE_INT_1U << first_count) - 1);
10891 : :
10892 : 0 : varop = simplify_and_const_int (NULL_RTX, int_result_mode,
10893 : : XEXP (varop, 0), mask);
10894 : 0 : varop = simplify_shift_const (NULL_RTX, ASHIFT,
10895 : : int_result_mode, varop, count);
10896 : 0 : count = first_count;
10897 : 0 : code = ASHIFTRT;
10898 : 0 : continue;
10899 : : }
10900 : :
10901 : : /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
10902 : : than C1 high-order bits equal to the sign bit, we can convert
10903 : : this to either an ASHIFT or an ASHIFTRT depending on the
10904 : : two counts.
10905 : :
10906 : : We cannot do this if VAROP's mode is not SHIFT_UNIT_MODE. */
10907 : :
10908 : 5750443 : if (code == ASHIFTRT && first_code == ASHIFT
10909 : 2854187 : && int_varop_mode == shift_unit_mode
10910 : 8596273 : && (num_sign_bit_copies (XEXP (varop, 0), shift_unit_mode)
10911 : : > first_count))
10912 : : {
10913 : 1472 : varop = XEXP (varop, 0);
10914 : 1472 : count -= first_count;
10915 : 1472 : if (count < 0)
10916 : : {
10917 : 0 : count = -count;
10918 : 0 : code = ASHIFT;
10919 : : }
10920 : :
10921 : 1472 : continue;
10922 : : }
10923 : :
10924 : : /* There are some cases we can't do. If CODE is ASHIFTRT,
10925 : : we can only do this if FIRST_CODE is also ASHIFTRT.
10926 : :
10927 : : We can't do the case when CODE is ROTATE and FIRST_CODE is
10928 : : ASHIFTRT.
10929 : :
10930 : : If the mode of this shift is not the mode of the outer shift,
10931 : : we can't do this if either shift is a right shift or ROTATE.
10932 : :
10933 : : Finally, we can't do any of these if the mode is too wide
10934 : : unless the codes are the same.
10935 : :
10936 : : Handle the case where the shift codes are the same
10937 : : first. */
10938 : :
10939 : 5747499 : if (code == first_code)
10940 : : {
10941 : 21185 : if (int_varop_mode != int_result_mode
10942 : 21185 : && (code == ASHIFTRT || code == LSHIFTRT
10943 : 69 : || code == ROTATE))
10944 : : break;
10945 : :
10946 : 21149 : count += first_count;
10947 : 21149 : varop = XEXP (varop, 0);
10948 : 21149 : continue;
10949 : : }
10950 : :
10951 : 5726314 : if (code == ASHIFTRT
10952 : 2873551 : || (code == ROTATE && first_code == ASHIFTRT)
10953 : 2873521 : || GET_MODE_PRECISION (int_mode) > HOST_BITS_PER_WIDE_INT
10954 : 8599835 : || (int_varop_mode != int_result_mode
10955 : 52344 : && (first_code == ASHIFTRT || first_code == LSHIFTRT
10956 : 52344 : || first_code == ROTATE
10957 : 8448 : || code == ROTATE)))
10958 : : break;
10959 : :
10960 : : /* To compute the mask to apply after the shift, shift the
10961 : : nonzero bits of the inner shift the same way the
10962 : : outer shift will. */
10963 : :
10964 : 2829625 : mask_rtx = gen_int_mode (nonzero_bits (varop, int_varop_mode),
10965 : : int_result_mode);
10966 : 2829625 : rtx count_rtx = gen_int_shift_amount (int_result_mode, count);
10967 : 2829625 : mask_rtx
10968 : 2829625 : = simplify_const_binary_operation (code, int_result_mode,
10969 : : mask_rtx, count_rtx);
10970 : :
10971 : : /* Give up if we can't compute an outer operation to use. */
10972 : 2829625 : if (mask_rtx == 0
10973 : 2829625 : || !CONST_INT_P (mask_rtx)
10974 : 5659250 : || ! merge_outer_ops (&outer_op, &outer_const, AND,
10975 : : INTVAL (mask_rtx),
10976 : : int_result_mode, &complement_p))
10977 : : break;
10978 : :
10979 : : /* If the shifts are in the same direction, we add the
10980 : : counts. Otherwise, we subtract them. */
10981 : 2802882 : if ((code == ASHIFTRT || code == LSHIFTRT)
10982 : 2802882 : == (first_code == ASHIFTRT || first_code == LSHIFTRT))
10983 : 11473 : count += first_count;
10984 : : else
10985 : 2791409 : count -= first_count;
10986 : :
10987 : : /* If COUNT is positive, the new shift is usually CODE,
10988 : : except for the two exceptions below, in which case it is
10989 : : FIRST_CODE. If the count is negative, FIRST_CODE should
10990 : : always be used */
10991 : 2802882 : if (count > 0
10992 : 687767 : && ((first_code == ROTATE && code == ASHIFT)
10993 : 686992 : || (first_code == ASHIFTRT && code == LSHIFTRT)))
10994 : : code = first_code;
10995 : 2791415 : else if (count < 0)
10996 : 290423 : code = first_code, count = -count;
10997 : :
10998 : 2802882 : varop = XEXP (varop, 0);
10999 : 2802882 : continue;
11000 : 2802882 : }
11001 : :
11002 : : /* If we have (A << B << C) for any shift, we can convert this to
11003 : : (A << C << B). This wins if A is a constant. Only try this if
11004 : : B is not a constant. */
11005 : :
11006 : 53315 : else if (GET_CODE (varop) == code
11007 : 5174 : && CONST_INT_P (XEXP (varop, 0))
11008 : 1013 : && !CONST_INT_P (XEXP (varop, 1)))
11009 : : {
11010 : : /* For ((unsigned) (cstULL >> count)) >> cst2 we have to make
11011 : : sure the result will be masked. See PR70222. */
11012 : 1013 : if (code == LSHIFTRT
11013 : 7 : && int_mode != int_result_mode
11014 : 1020 : && !merge_outer_ops (&outer_op, &outer_const, AND,
11015 : 7 : GET_MODE_MASK (int_result_mode)
11016 : 7 : >> orig_count, int_result_mode,
11017 : : &complement_p))
11018 : : break;
11019 : : /* For ((int) (cstLL >> count)) >> cst2 just give up. Queuing
11020 : : up outer sign extension (often left and right shift) is
11021 : : hardly more efficient than the original. See PR70429.
11022 : : Similarly punt for rotates with different modes.
11023 : : See PR97386. */
11024 : 1013 : if ((code == ASHIFTRT || code == ROTATE)
11025 : 1013 : && int_mode != int_result_mode)
11026 : : break;
11027 : :
11028 : 999 : rtx count_rtx = gen_int_shift_amount (int_result_mode, count);
11029 : 999 : rtx new_rtx = simplify_const_binary_operation (code, int_mode,
11030 : : XEXP (varop, 0),
11031 : : count_rtx);
11032 : 999 : varop = gen_rtx_fmt_ee (code, int_mode, new_rtx, XEXP (varop, 1));
11033 : 999 : count = 0;
11034 : 999 : continue;
11035 : 999 : }
11036 : : break;
11037 : :
11038 : 53725 : case NOT:
11039 : : /* The following rules apply only to scalars. */
11040 : 53725 : if (shift_mode != shift_unit_mode)
11041 : : break;
11042 : :
11043 : : /* Make this fit the case below. */
11044 : 53723 : varop = gen_rtx_XOR (mode, XEXP (varop, 0), constm1_rtx);
11045 : 53723 : continue;
11046 : :
11047 : 779132 : case IOR:
11048 : 779132 : case AND:
11049 : 779132 : case XOR:
11050 : : /* The following rules apply only to scalars. */
11051 : 779132 : if (shift_mode != shift_unit_mode)
11052 : : break;
11053 : 777432 : int_varop_mode = as_a <scalar_int_mode> (GET_MODE (varop));
11054 : 777432 : int_result_mode = as_a <scalar_int_mode> (result_mode);
11055 : :
11056 : : /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
11057 : : with C the size of VAROP - 1 and the shift is logical if
11058 : : STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
11059 : : we have an (le X 0) operation. If we have an arithmetic shift
11060 : : and STORE_FLAG_VALUE is 1 or we have a logical shift with
11061 : : STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
11062 : :
11063 : 236063 : if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
11064 : 1962 : && XEXP (XEXP (varop, 0), 1) == constm1_rtx
11065 : : && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
11066 : 221 : && (code == LSHIFTRT || code == ASHIFTRT)
11067 : 221 : && count == (GET_MODE_PRECISION (int_varop_mode) - 1)
11068 : 777653 : && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
11069 : : {
11070 : 53 : count = 0;
11071 : 53 : varop = gen_rtx_LE (int_varop_mode, XEXP (varop, 1),
11072 : : const0_rtx);
11073 : :
11074 : 53 : if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
11075 : 53 : varop = gen_rtx_NEG (int_varop_mode, varop);
11076 : :
11077 : 53 : continue;
11078 : : }
11079 : :
11080 : : /* If we have (shift (logical)), move the logical to the outside
11081 : : to allow it to possibly combine with another logical and the
11082 : : shift to combine with another shift. This also canonicalizes to
11083 : : what a ZERO_EXTRACT looks like. Also, some machines have
11084 : : (and (shift)) insns. */
11085 : :
11086 : 1211740 : if (CONST_INT_P (XEXP (varop, 1))
11087 : : /* We can't do this if we have (ashiftrt (xor)) and the
11088 : : constant has its sign bit set in shift_unit_mode with
11089 : : shift_unit_mode wider than result_mode. */
11090 : 435921 : && !(code == ASHIFTRT && GET_CODE (varop) == XOR
11091 : 7143 : && int_result_mode != shift_unit_mode
11092 : 0 : && trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
11093 : : shift_unit_mode) < 0)
11094 : 435921 : && (new_rtx = simplify_const_binary_operation
11095 : 435921 : (code, int_result_mode,
11096 : 435921 : gen_int_mode (INTVAL (XEXP (varop, 1)), int_result_mode),
11097 : 435921 : gen_int_shift_amount (int_result_mode, count))) != 0
11098 : 435921 : && CONST_INT_P (new_rtx)
11099 : 1213300 : && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
11100 : : INTVAL (new_rtx), int_result_mode,
11101 : : &complement_p))
11102 : : {
11103 : 434361 : varop = XEXP (varop, 0);
11104 : 434361 : continue;
11105 : : }
11106 : :
11107 : : /* If we can't do that, try to simplify the shift in each arm of the
11108 : : logical expression, make a new logical expression, and apply
11109 : : the inverse distributive law. This also can't be done for
11110 : : (ashiftrt (xor)) where we've widened the shift and the constant
11111 : : changes the sign bit. */
11112 : 343018 : if (CONST_INT_P (XEXP (varop, 1))
11113 : 343018 : && !(code == ASHIFTRT && GET_CODE (varop) == XOR
11114 : 48 : && int_result_mode != shift_unit_mode
11115 : 0 : && trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
11116 : : shift_unit_mode) < 0))
11117 : : {
11118 : 1560 : rtx lhs = simplify_shift_const (NULL_RTX, code, shift_unit_mode,
11119 : : XEXP (varop, 0), count);
11120 : 1560 : rtx rhs = simplify_shift_const (NULL_RTX, code, shift_unit_mode,
11121 : : XEXP (varop, 1), count);
11122 : :
11123 : 1560 : varop = simplify_gen_binary (GET_CODE (varop), shift_unit_mode,
11124 : : lhs, rhs);
11125 : 1560 : varop = apply_distributive_law (varop);
11126 : :
11127 : 1560 : count = 0;
11128 : 1560 : continue;
11129 : 1560 : }
11130 : : break;
11131 : :
11132 : 30197 : case EQ:
11133 : : /* The following rules apply only to scalars. */
11134 : 30197 : if (shift_mode != shift_unit_mode)
11135 : : break;
11136 : 30197 : int_result_mode = as_a <scalar_int_mode> (result_mode);
11137 : :
11138 : : /* Convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
11139 : : says that the sign bit can be tested, FOO has mode MODE, C is
11140 : : GET_MODE_PRECISION (MODE) - 1, and FOO has only its low-order bit
11141 : : that may be nonzero. */
11142 : 30197 : if (code == LSHIFTRT
11143 : : && XEXP (varop, 1) == const0_rtx
11144 : : && GET_MODE (XEXP (varop, 0)) == int_result_mode
11145 : : && count == (GET_MODE_PRECISION (int_result_mode) - 1)
11146 : : && HWI_COMPUTABLE_MODE_P (int_result_mode)
11147 : : && STORE_FLAG_VALUE == -1
11148 : : && nonzero_bits (XEXP (varop, 0), int_result_mode) == 1
11149 : : && merge_outer_ops (&outer_op, &outer_const, XOR, 1,
11150 : : int_result_mode, &complement_p))
11151 : : {
11152 : : varop = XEXP (varop, 0);
11153 : : count = 0;
11154 : : continue;
11155 : : }
11156 : : break;
11157 : :
11158 : 28325 : case NEG:
11159 : : /* The following rules apply only to scalars. */
11160 : 28325 : if (shift_mode != shift_unit_mode)
11161 : : break;
11162 : 28182 : int_result_mode = as_a <scalar_int_mode> (result_mode);
11163 : :
11164 : : /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
11165 : : than the number of bits in the mode is equivalent to A. */
11166 : 28187 : if (code == LSHIFTRT
11167 : 6122 : && count == (GET_MODE_PRECISION (int_result_mode) - 1)
11168 : 30802 : && nonzero_bits (XEXP (varop, 0), int_result_mode) == 1)
11169 : : {
11170 : 5 : varop = XEXP (varop, 0);
11171 : 5 : count = 0;
11172 : 5 : continue;
11173 : : }
11174 : :
11175 : : /* NEG commutes with ASHIFT since it is multiplication. Move the
11176 : : NEG outside to allow shifts to combine. */
11177 : 46926 : if (code == ASHIFT
11178 : 28177 : && merge_outer_ops (&outer_op, &outer_const, NEG, 0,
11179 : : int_result_mode, &complement_p))
11180 : : {
11181 : 18749 : varop = XEXP (varop, 0);
11182 : 18749 : continue;
11183 : : }
11184 : : break;
11185 : :
11186 : 1930074 : case PLUS:
11187 : : /* The following rules apply only to scalars. */
11188 : 1930074 : if (shift_mode != shift_unit_mode)
11189 : : break;
11190 : 1885448 : int_result_mode = as_a <scalar_int_mode> (result_mode);
11191 : :
11192 : : /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
11193 : : is one less than the number of bits in the mode is
11194 : : equivalent to (xor A 1). */
11195 : 1885448 : if (code == LSHIFTRT
11196 : 385390 : && count == (GET_MODE_PRECISION (int_result_mode) - 1)
11197 : 30698 : && XEXP (varop, 1) == constm1_rtx
11198 : 13509 : && nonzero_bits (XEXP (varop, 0), int_result_mode) == 1
11199 : 1885448 : && merge_outer_ops (&outer_op, &outer_const, XOR, 1,
11200 : : int_result_mode, &complement_p))
11201 : : {
11202 : 0 : count = 0;
11203 : 0 : varop = XEXP (varop, 0);
11204 : 0 : continue;
11205 : : }
11206 : :
11207 : : /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
11208 : : that might be nonzero in BAR are those being shifted out and those
11209 : : bits are known zero in FOO, we can replace the PLUS with FOO.
11210 : : Similarly in the other operand order. This code occurs when
11211 : : we are computing the size of a variable-size array. */
11212 : :
11213 : 1888595 : if ((code == ASHIFTRT || code == LSHIFTRT)
11214 : 544306 : && count < HOST_BITS_PER_WIDE_INT
11215 : 544222 : && nonzero_bits (XEXP (varop, 1), int_result_mode) >> count == 0
11216 : 2032890 : && (nonzero_bits (XEXP (varop, 1), int_result_mode)
11217 : 147442 : & nonzero_bits (XEXP (varop, 0), int_result_mode)) == 0)
11218 : : {
11219 : 3147 : varop = XEXP (varop, 0);
11220 : 3147 : continue;
11221 : : }
11222 : 1882332 : else if ((code == ASHIFTRT || code == LSHIFTRT)
11223 : 541159 : && count < HOST_BITS_PER_WIDE_INT
11224 : 541075 : && HWI_COMPUTABLE_MODE_P (int_result_mode)
11225 : 539846 : && (nonzero_bits (XEXP (varop, 0), int_result_mode)
11226 : 539846 : >> count) == 0
11227 : 1945844 : && (nonzero_bits (XEXP (varop, 0), int_result_mode)
11228 : 63543 : & nonzero_bits (XEXP (varop, 1), int_result_mode)) == 0)
11229 : : {
11230 : 31 : varop = XEXP (varop, 1);
11231 : 31 : continue;
11232 : : }
11233 : :
11234 : : /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
11235 : 2273556 : if (code == ASHIFT
11236 : 1332457 : && CONST_INT_P (XEXP (varop, 1))
11237 : 391458 : && (new_rtx = simplify_const_binary_operation
11238 : 391458 : (ASHIFT, int_result_mode,
11239 : 391458 : gen_int_mode (INTVAL (XEXP (varop, 1)), int_result_mode),
11240 : 391458 : gen_int_shift_amount (int_result_mode, count))) != 0
11241 : 391458 : && CONST_INT_P (new_rtx)
11242 : 2273728 : && merge_outer_ops (&outer_op, &outer_const, PLUS,
11243 : : INTVAL (new_rtx), int_result_mode,
11244 : : &complement_p))
11245 : : {
11246 : 391286 : varop = XEXP (varop, 0);
11247 : 391286 : continue;
11248 : : }
11249 : :
11250 : : /* Check for 'PLUS signbit', which is the canonical form of 'XOR
11251 : : signbit', and attempt to change the PLUS to an XOR and move it to
11252 : : the outer operation as is done above in the AND/IOR/XOR case
11253 : : leg for shift(logical). See details in logical handling above
11254 : : for reasoning in doing so. */
11255 : 1499575 : if (code == LSHIFTRT
11256 : 382292 : && CONST_INT_P (XEXP (varop, 1))
11257 : 282176 : && mode_signbit_p (int_result_mode, XEXP (varop, 1))
11258 : 8591 : && (new_rtx = simplify_const_binary_operation
11259 : 1490984 : (code, int_result_mode,
11260 : 8591 : gen_int_mode (INTVAL (XEXP (varop, 1)), int_result_mode),
11261 : 8591 : gen_int_shift_amount (int_result_mode, count))) != 0
11262 : 8591 : && CONST_INT_P (new_rtx)
11263 : 1499575 : && merge_outer_ops (&outer_op, &outer_const, XOR,
11264 : : INTVAL (new_rtx), int_result_mode,
11265 : : &complement_p))
11266 : : {
11267 : 8591 : varop = XEXP (varop, 0);
11268 : 8591 : continue;
11269 : : }
11270 : :
11271 : : break;
11272 : :
11273 : 609674 : case MINUS:
11274 : : /* The following rules apply only to scalars. */
11275 : 609674 : if (shift_mode != shift_unit_mode)
11276 : : break;
11277 : 598354 : int_varop_mode = as_a <scalar_int_mode> (GET_MODE (varop));
11278 : :
11279 : : /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
11280 : : with C the size of VAROP - 1 and the shift is logical if
11281 : : STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
11282 : : we have a (gt X 0) operation. If the shift is arithmetic with
11283 : : STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
11284 : : we have a (neg (gt X 0)) operation. */
11285 : :
11286 : 598354 : if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
11287 : 598354 : && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
11288 : 13340 : && count == (GET_MODE_PRECISION (int_varop_mode) - 1)
11289 : 45 : && (code == LSHIFTRT || code == ASHIFTRT)
11290 : 14 : && CONST_INT_P (XEXP (XEXP (varop, 0), 1))
11291 : 14 : && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
11292 : 598354 : && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
11293 : : {
11294 : 0 : count = 0;
11295 : 0 : varop = gen_rtx_GT (int_varop_mode, XEXP (varop, 1),
11296 : : const0_rtx);
11297 : :
11298 : 0 : if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
11299 : 0 : varop = gen_rtx_NEG (int_varop_mode, varop);
11300 : :
11301 : 0 : continue;
11302 : : }
11303 : : break;
11304 : :
11305 : 667 : case TRUNCATE:
11306 : : /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
11307 : : if the truncate does not affect the value. */
11308 : 667 : if (code == LSHIFTRT
11309 : 509 : && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
11310 : 509 : && CONST_INT_P (XEXP (XEXP (varop, 0), 1))
11311 : 667 : && (INTVAL (XEXP (XEXP (varop, 0), 1))
11312 : 509 : >= (GET_MODE_UNIT_PRECISION (GET_MODE (XEXP (varop, 0)))
11313 : 1018 : - GET_MODE_UNIT_PRECISION (GET_MODE (varop)))))
11314 : : {
11315 : 509 : rtx varop_inner = XEXP (varop, 0);
11316 : 509 : int new_count = count + INTVAL (XEXP (varop_inner, 1));
11317 : 509 : rtx new_count_rtx = gen_int_shift_amount (GET_MODE (varop_inner),
11318 : 509 : new_count);
11319 : 509 : varop_inner = gen_rtx_LSHIFTRT (GET_MODE (varop_inner),
11320 : : XEXP (varop_inner, 0),
11321 : : new_count_rtx);
11322 : 509 : varop = gen_rtx_TRUNCATE (GET_MODE (varop), varop_inner);
11323 : 509 : count = 0;
11324 : 509 : continue;
11325 : 509 : }
11326 : : break;
11327 : :
11328 : : default:
11329 : : break;
11330 : 53723 : }
11331 : :
11332 : : break;
11333 : : }
11334 : :
11335 : 23764878 : shift_mode = result_mode;
11336 : 23764878 : if (shift_mode != mode)
11337 : : {
11338 : : /* We only change the modes of scalar shifts. */
11339 : 145036 : int_mode = as_a <scalar_int_mode> (mode);
11340 : 145036 : int_result_mode = as_a <scalar_int_mode> (result_mode);
11341 : 145036 : shift_mode = try_widen_shift_mode (code, varop, count, int_result_mode,
11342 : : int_mode, outer_op, outer_const);
11343 : : }
11344 : :
11345 : : /* We have now finished analyzing the shift. The result should be
11346 : : a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
11347 : : OUTER_OP is non-UNKNOWN, it is an operation that needs to be applied
11348 : : to the result of the shift. OUTER_CONST is the relevant constant,
11349 : : but we must turn off all bits turned off in the shift. */
11350 : :
11351 : 23764878 : if (outer_op == UNKNOWN
11352 : 20156120 : && orig_code == code && orig_count == count
11353 : 20114857 : && varop == orig_varop
11354 : 19985867 : && shift_mode == GET_MODE (varop))
11355 : : return NULL_RTX;
11356 : :
11357 : : /* Make a SUBREG if necessary. If we can't make it, fail. */
11358 : 3781793 : varop = gen_lowpart (shift_mode, varop);
11359 : 3781793 : if (varop == NULL_RTX || GET_CODE (varop) == CLOBBER)
11360 : : return NULL_RTX;
11361 : :
11362 : : /* If we have an outer operation and we just made a shift, it is
11363 : : possible that we could have simplified the shift were it not
11364 : : for the outer operation. So try to do the simplification
11365 : : recursively. */
11366 : :
11367 : 3781793 : if (outer_op != UNKNOWN)
11368 : 3608758 : x = simplify_shift_const_1 (code, shift_mode, varop, count);
11369 : : else
11370 : : x = NULL_RTX;
11371 : :
11372 : 3608758 : if (x == NULL_RTX)
11373 : 3746139 : x = simplify_gen_binary (code, shift_mode, varop,
11374 : 3746139 : gen_int_shift_amount (shift_mode, count));
11375 : :
11376 : : /* If we were doing an LSHIFTRT in a wider mode than it was originally,
11377 : : turn off all the bits that the shift would have turned off. */
11378 : 3781793 : if (orig_code == LSHIFTRT && result_mode != shift_mode)
11379 : : /* We only change the modes of scalar shifts. */
11380 : 9169 : x = simplify_and_const_int (NULL_RTX, as_a <scalar_int_mode> (shift_mode),
11381 : 9169 : x, GET_MODE_MASK (result_mode) >> orig_count);
11382 : :
11383 : : /* Do the remainder of the processing in RESULT_MODE. */
11384 : 3781793 : x = gen_lowpart_or_truncate (result_mode, x);
11385 : :
11386 : : /* If COMPLEMENT_P is set, we have to complement X before doing the outer
11387 : : operation. */
11388 : 3781793 : if (complement_p)
11389 : 22758 : x = simplify_gen_unary (NOT, result_mode, x, result_mode);
11390 : :
11391 : 3781793 : if (outer_op != UNKNOWN)
11392 : : {
11393 : 3608758 : int_result_mode = as_a <scalar_int_mode> (result_mode);
11394 : :
11395 : 3608758 : if (GET_RTX_CLASS (outer_op) != RTX_UNARY
11396 : 3608758 : && GET_MODE_PRECISION (int_result_mode) < HOST_BITS_PER_WIDE_INT)
11397 : 1304468 : outer_const = trunc_int_for_mode (outer_const, int_result_mode);
11398 : :
11399 : 3608758 : if (outer_op == AND)
11400 : 3133467 : x = simplify_and_const_int (NULL_RTX, int_result_mode, x, outer_const);
11401 : 475291 : else if (outer_op == SET)
11402 : : {
11403 : : /* This means that we have determined that the result is
11404 : : equivalent to a constant. This should be rare. */
11405 : 0 : if (!side_effects_p (x))
11406 : 0 : x = GEN_INT (outer_const);
11407 : : }
11408 : 475291 : else if (GET_RTX_CLASS (outer_op) == RTX_UNARY)
11409 : 18749 : x = simplify_gen_unary (outer_op, int_result_mode, x, int_result_mode);
11410 : : else
11411 : 456542 : x = simplify_gen_binary (outer_op, int_result_mode, x,
11412 : : GEN_INT (outer_const));
11413 : : }
11414 : :
11415 : : return x;
11416 : : }
11417 : :
11418 : : /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
11419 : : The result of the shift is RESULT_MODE. If we cannot simplify it,
11420 : : return X or, if it is NULL, synthesize the expression with
11421 : : simplify_gen_binary. Otherwise, return a simplified value.
11422 : :
11423 : : The shift is normally computed in the widest mode we find in VAROP, as
11424 : : long as it isn't a different number of words than RESULT_MODE. Exceptions
11425 : : are ASHIFTRT and ROTATE, which are always done in their original mode. */
11426 : :
11427 : : static rtx
11428 : 20156262 : simplify_shift_const (rtx x, enum rtx_code code, machine_mode result_mode,
11429 : : rtx varop, int count)
11430 : : {
11431 : 20156262 : rtx tem = simplify_shift_const_1 (code, result_mode, varop, count);
11432 : 20156262 : if (tem)
11433 : : return tem;
11434 : :
11435 : 16410123 : if (!x)
11436 : 4879307 : x = simplify_gen_binary (code, GET_MODE (varop), varop,
11437 : 4879307 : gen_int_shift_amount (GET_MODE (varop), count));
11438 : 16410123 : if (GET_MODE (x) != result_mode)
11439 : 0 : x = gen_lowpart (result_mode, x);
11440 : : return x;
11441 : : }
11442 : :
11443 : :
11444 : : /* A subroutine of recog_for_combine. See there for arguments and
11445 : : return value. */
11446 : :
11447 : : static int
11448 : 48681779 : recog_for_combine_1 (rtx *pnewpat, rtx_insn *insn, rtx *pnotes,
11449 : : unsigned old_nregs, unsigned new_nregs)
11450 : : {
11451 : 48681779 : rtx pat = *pnewpat;
11452 : 48681779 : rtx pat_without_clobbers;
11453 : 48681779 : int insn_code_number;
11454 : 48681779 : int num_clobbers_to_add = 0;
11455 : 48681779 : int i;
11456 : 48681779 : rtx notes = NULL_RTX;
11457 : 48681779 : rtx old_notes, old_pat;
11458 : 48681779 : int old_icode;
11459 : :
11460 : : /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
11461 : : we use to indicate that something didn't match. If we find such a
11462 : : thing, force rejection. */
11463 : 48681779 : if (GET_CODE (pat) == PARALLEL)
11464 : 52444813 : for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
11465 : 36108409 : if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
11466 : 7194323 : && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
11467 : : return -1;
11468 : :
11469 : 48679727 : old_pat = PATTERN (insn);
11470 : 48679727 : old_notes = REG_NOTES (insn);
11471 : 48679727 : PATTERN (insn) = pat;
11472 : 48679727 : REG_NOTES (insn) = NULL_RTX;
11473 : :
11474 : 48679727 : insn_code_number = recog (pat, insn, &num_clobbers_to_add);
11475 : 48679727 : if (dump_file && (dump_flags & TDF_DETAILS))
11476 : : {
11477 : 277 : if (insn_code_number < 0)
11478 : 177 : fputs ("Failed to match this instruction:\n", dump_file);
11479 : : else
11480 : 100 : fputs ("Successfully matched this instruction:\n", dump_file);
11481 : 277 : print_rtl_single (dump_file, pat);
11482 : : }
11483 : :
11484 : : /* If it isn't, there is the possibility that we previously had an insn
11485 : : that clobbered some register as a side effect, but the combined
11486 : : insn doesn't need to do that. So try once more without the clobbers
11487 : : unless this represents an ASM insn. */
11488 : :
11489 : 38794852 : if (insn_code_number < 0 && ! check_asm_operands (pat)
11490 : 87473076 : && GET_CODE (pat) == PARALLEL)
11491 : : {
11492 : : int pos;
11493 : :
11494 : 50987155 : for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
11495 : 35117686 : if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
11496 : : {
11497 : 28321658 : if (i != pos)
11498 : 2237252 : SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
11499 : 28321658 : pos++;
11500 : : }
11501 : :
11502 : 15869469 : SUBST_INT (XVECLEN (pat, 0), pos);
11503 : :
11504 : 15869469 : if (pos == 1)
11505 : 4619936 : pat = XVECEXP (pat, 0, 0);
11506 : :
11507 : 15869469 : PATTERN (insn) = pat;
11508 : 15869469 : insn_code_number = recog (pat, insn, &num_clobbers_to_add);
11509 : 15869469 : if (dump_file && (dump_flags & TDF_DETAILS))
11510 : : {
11511 : 82 : if (insn_code_number < 0)
11512 : 81 : fputs ("Failed to match this instruction:\n", dump_file);
11513 : : else
11514 : 1 : fputs ("Successfully matched this instruction:\n", dump_file);
11515 : 82 : print_rtl_single (dump_file, pat);
11516 : : }
11517 : : }
11518 : :
11519 : 48679727 : pat_without_clobbers = pat;
11520 : :
11521 : 48679727 : PATTERN (insn) = old_pat;
11522 : 48679727 : REG_NOTES (insn) = old_notes;
11523 : :
11524 : : /* Recognize all noop sets, these will be killed by followup pass. */
11525 : 48679727 : if (insn_code_number < 0 && GET_CODE (pat) == SET && set_noop_p (pat))
11526 : 220935 : insn_code_number = NOOP_MOVE_INSN_CODE, num_clobbers_to_add = 0;
11527 : :
11528 : : /* If we had any clobbers to add, make a new pattern than contains
11529 : : them. Then check to make sure that all of them are dead. */
11530 : 48679727 : if (num_clobbers_to_add)
11531 : : {
11532 : 1578149 : rtx newpat = gen_rtx_PARALLEL (VOIDmode,
11533 : : rtvec_alloc (GET_CODE (pat) == PARALLEL
11534 : : ? (XVECLEN (pat, 0)
11535 : : + num_clobbers_to_add)
11536 : : : num_clobbers_to_add + 1));
11537 : :
11538 : 1578149 : if (GET_CODE (pat) == PARALLEL)
11539 : 1461 : for (i = 0; i < XVECLEN (pat, 0); i++)
11540 : 974 : XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
11541 : : else
11542 : 1577662 : XVECEXP (newpat, 0, 0) = pat;
11543 : :
11544 : 1578149 : add_clobbers (newpat, insn_code_number);
11545 : :
11546 : 3030634 : for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
11547 : 3030634 : i < XVECLEN (newpat, 0); i++)
11548 : : {
11549 : 1598069 : if (REG_P (XEXP (XVECEXP (newpat, 0, i), 0))
11550 : 1598069 : && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
11551 : : return -1;
11552 : 1452485 : if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) != SCRATCH)
11553 : : {
11554 : 1406824 : gcc_assert (REG_P (XEXP (XVECEXP (newpat, 0, i), 0)));
11555 : 1406824 : notes = alloc_reg_note (REG_UNUSED,
11556 : : XEXP (XVECEXP (newpat, 0, i), 0), notes);
11557 : : }
11558 : : }
11559 : : pat = newpat;
11560 : : }
11561 : :
11562 : 48534143 : if (insn_code_number >= 0
11563 : 48534143 : && insn_code_number != NOOP_MOVE_INSN_CODE)
11564 : : {
11565 : : /* Create the reg dead notes if needed for the regs that were created via split. */
11566 : 10097072 : for (; old_nregs < new_nregs; old_nregs++)
11567 : 3200 : notes = alloc_reg_note (REG_DEAD, regno_reg_rtx[old_nregs], notes);
11568 : 10093872 : old_pat = PATTERN (insn);
11569 : 10093872 : old_notes = REG_NOTES (insn);
11570 : 10093872 : old_icode = INSN_CODE (insn);
11571 : 10093872 : PATTERN (insn) = pat;
11572 : 10093872 : REG_NOTES (insn) = notes;
11573 : 10093872 : INSN_CODE (insn) = insn_code_number;
11574 : :
11575 : : /* Allow targets to reject combined insn. */
11576 : 10093872 : if (!targetm.legitimate_combined_insn (insn))
11577 : : {
11578 : 3501 : if (dump_file && (dump_flags & TDF_DETAILS))
11579 : 0 : fputs ("Instruction not appropriate for target.",
11580 : : dump_file);
11581 : :
11582 : : /* Callers expect recog_for_combine to strip
11583 : : clobbers from the pattern on failure. */
11584 : : pat = pat_without_clobbers;
11585 : : notes = NULL_RTX;
11586 : :
11587 : : insn_code_number = -1;
11588 : : }
11589 : :
11590 : 10093872 : PATTERN (insn) = old_pat;
11591 : 10093872 : REG_NOTES (insn) = old_notes;
11592 : 10093872 : INSN_CODE (insn) = old_icode;
11593 : : }
11594 : :
11595 : 48534143 : *pnewpat = pat;
11596 : 48534143 : *pnotes = notes;
11597 : :
11598 : 48534143 : return insn_code_number;
11599 : : }
11600 : :
11601 : : /* Change every ZERO_EXTRACT and ZERO_EXTEND of a SUBREG that can be
11602 : : expressed as an AND and maybe an LSHIFTRT, to that formulation.
11603 : : Return whether anything was so changed. */
11604 : :
11605 : : static bool
11606 : 48850421 : change_zero_ext (rtx pat)
11607 : : {
11608 : 48850421 : bool changed = false;
11609 : 48850421 : rtx *src = &SET_SRC (pat);
11610 : :
11611 : 48850421 : subrtx_ptr_iterator::array_type array;
11612 : 338532121 : FOR_EACH_SUBRTX_PTR (iter, array, src, NONCONST)
11613 : : {
11614 : 289681700 : rtx x = **iter;
11615 : 289681700 : scalar_int_mode mode, inner_mode;
11616 : 289681700 : if (!is_a <scalar_int_mode> (GET_MODE (x), &mode))
11617 : 426157809 : continue;
11618 : 153205591 : int size;
11619 : :
11620 : 153205591 : if (GET_CODE (x) == ZERO_EXTRACT
11621 : 740160 : && CONST_INT_P (XEXP (x, 1))
11622 : 740138 : && CONST_INT_P (XEXP (x, 2))
11623 : 696354 : && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &inner_mode)
11624 : 153901945 : && GET_MODE_PRECISION (inner_mode) <= GET_MODE_PRECISION (mode))
11625 : : {
11626 : 696346 : size = INTVAL (XEXP (x, 1));
11627 : :
11628 : 696346 : int start = INTVAL (XEXP (x, 2));
11629 : 696346 : if (BITS_BIG_ENDIAN)
11630 : : start = GET_MODE_PRECISION (inner_mode) - size - start;
11631 : :
11632 : 696346 : if (start != 0)
11633 : 596250 : x = gen_rtx_LSHIFTRT (inner_mode, XEXP (x, 0),
11634 : : gen_int_shift_amount (inner_mode, start));
11635 : : else
11636 : : x = XEXP (x, 0);
11637 : :
11638 : 696346 : if (mode != inner_mode)
11639 : : {
11640 : 158 : if (REG_P (x) && HARD_REGISTER_P (x)
11641 : 210554 : && !can_change_dest_mode (x, 0, mode))
11642 : 0 : continue;
11643 : :
11644 : 210554 : x = gen_lowpart_SUBREG (mode, x);
11645 : : }
11646 : : }
11647 : 152509245 : else if (GET_CODE (x) == ZERO_EXTEND
11648 : 2307613 : && GET_CODE (XEXP (x, 0)) == SUBREG
11649 : 446495 : && SCALAR_INT_MODE_P (GET_MODE (SUBREG_REG (XEXP (x, 0))))
11650 : 445286 : && !paradoxical_subreg_p (XEXP (x, 0))
11651 : 152954531 : && subreg_lowpart_p (XEXP (x, 0)))
11652 : : {
11653 : 330591 : inner_mode = as_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)));
11654 : 330591 : size = GET_MODE_PRECISION (inner_mode);
11655 : 330591 : x = SUBREG_REG (XEXP (x, 0));
11656 : 330591 : if (GET_MODE (x) != mode)
11657 : : {
11658 : 11449 : if (REG_P (x) && HARD_REGISTER_P (x)
11659 : 11841 : && !can_change_dest_mode (x, 0, mode))
11660 : 0 : continue;
11661 : :
11662 : 11841 : x = gen_lowpart_SUBREG (mode, x);
11663 : : }
11664 : : }
11665 : 304357235 : else if (GET_CODE (x) == ZERO_EXTEND
11666 : 1977022 : && REG_P (XEXP (x, 0))
11667 : 1075275 : && HARD_REGISTER_P (XEXP (x, 0))
11668 : 152178727 : && can_change_dest_mode (XEXP (x, 0), 0, mode))
11669 : : {
11670 : 73 : inner_mode = as_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)));
11671 : 73 : size = GET_MODE_PRECISION (inner_mode);
11672 : 73 : x = gen_rtx_REG (mode, REGNO (XEXP (x, 0)));
11673 : : }
11674 : : else
11675 : 152178581 : continue;
11676 : :
11677 : 1440371 : if (!(GET_CODE (x) == LSHIFTRT
11678 : 413361 : && CONST_INT_P (XEXP (x, 1))
11679 : 413361 : && size + INTVAL (XEXP (x, 1)) == GET_MODE_PRECISION (mode)))
11680 : : {
11681 : 873351 : wide_int mask = wi::mask (size, false, GET_MODE_PRECISION (mode));
11682 : 873351 : x = gen_rtx_AND (mode, x, immed_wide_int_const (mask, mode));
11683 : 873351 : }
11684 : :
11685 : 1027010 : SUBST (**iter, x);
11686 : 1027010 : changed = true;
11687 : : }
11688 : :
11689 : 48850421 : if (changed)
11690 : 9296368 : FOR_EACH_SUBRTX_PTR (iter, array, src, NONCONST)
11691 : 8283673 : maybe_swap_commutative_operands (**iter);
11692 : :
11693 : 48850421 : rtx *dst = &SET_DEST (pat);
11694 : 48850421 : scalar_int_mode mode;
11695 : 48850421 : if (GET_CODE (*dst) == ZERO_EXTRACT
11696 : 9880 : && REG_P (XEXP (*dst, 0))
11697 : 359 : && is_a <scalar_int_mode> (GET_MODE (XEXP (*dst, 0)), &mode)
11698 : 359 : && CONST_INT_P (XEXP (*dst, 1))
11699 : 48850780 : && CONST_INT_P (XEXP (*dst, 2)))
11700 : : {
11701 : 244 : rtx reg = XEXP (*dst, 0);
11702 : 244 : int width = INTVAL (XEXP (*dst, 1));
11703 : 244 : int offset = INTVAL (XEXP (*dst, 2));
11704 : 244 : int reg_width = GET_MODE_PRECISION (mode);
11705 : 244 : if (BITS_BIG_ENDIAN)
11706 : : offset = reg_width - width - offset;
11707 : :
11708 : 244 : rtx x, y, z, w;
11709 : 244 : wide_int mask = wi::shifted_mask (offset, width, true, reg_width);
11710 : 244 : wide_int mask2 = wi::shifted_mask (offset, width, false, reg_width);
11711 : 244 : x = gen_rtx_AND (mode, reg, immed_wide_int_const (mask, mode));
11712 : 244 : if (offset)
11713 : 200 : y = gen_rtx_ASHIFT (mode, SET_SRC (pat), GEN_INT (offset));
11714 : : else
11715 : 44 : y = SET_SRC (pat);
11716 : 244 : z = gen_rtx_AND (mode, y, immed_wide_int_const (mask2, mode));
11717 : 244 : w = gen_rtx_IOR (mode, x, z);
11718 : 244 : SUBST (SET_DEST (pat), reg);
11719 : 244 : SUBST (SET_SRC (pat), w);
11720 : :
11721 : 244 : changed = true;
11722 : 244 : }
11723 : :
11724 : 48850421 : return changed;
11725 : 48850421 : }
11726 : :
11727 : : /* Like recog, but we receive the address of a pointer to a new pattern.
11728 : : We try to match the rtx that the pointer points to.
11729 : : If that fails, we may try to modify or replace the pattern,
11730 : : storing the replacement into the same pointer object.
11731 : :
11732 : : Modifications include deletion or addition of CLOBBERs. If the
11733 : : instruction will still not match, we change ZERO_EXTEND and ZERO_EXTRACT
11734 : : to the equivalent AND and perhaps LSHIFTRT patterns, and try with that
11735 : : (and undo if that fails).
11736 : :
11737 : : PNOTES is a pointer to a location where any REG_UNUSED notes added for
11738 : : the CLOBBERs are placed.
11739 : : If OLD_NREGS != NEW_NREGS, then PNOTES also includes REG_DEAD notes added.
11740 : :
11741 : : The value is the final insn code from the pattern ultimately matched,
11742 : : or -1. */
11743 : :
11744 : : static int
11745 : 47438012 : recog_for_combine (rtx *pnewpat, rtx_insn *insn, rtx *pnotes,
11746 : : unsigned int old_nregs, unsigned int new_nregs)
11747 : : {
11748 : 47438012 : rtx pat = *pnewpat;
11749 : 47438012 : int insn_code_number = recog_for_combine_1 (pnewpat, insn, pnotes,
11750 : : old_nregs, new_nregs);
11751 : 47438012 : if (insn_code_number >= 0 || check_asm_operands (pat))
11752 : 10164581 : return insn_code_number;
11753 : :
11754 : 37273431 : void *marker = get_undo_marker ();
11755 : 37273431 : bool changed = false;
11756 : :
11757 : 37273431 : if (GET_CODE (pat) == SET)
11758 : : {
11759 : : /* For an unrecognized single set of a constant, try placing it in
11760 : : the constant pool, if this function already uses one. */
11761 : 21991211 : rtx src = SET_SRC (pat);
11762 : 21991211 : if (CONSTANT_P (src)
11763 : 450658 : && !CONST_INT_P (src)
11764 : 402087 : && crtl->uses_const_pool
11765 : 353338 : && SET_DEST (pat) != pc_rtx)
11766 : : {
11767 : 353336 : machine_mode mode = GET_MODE (src);
11768 : 353336 : if (mode == VOIDmode)
11769 : 1183 : mode = GET_MODE (SET_DEST (pat));
11770 : 353336 : src = force_const_mem (mode, src);
11771 : 353336 : if (src)
11772 : : {
11773 : 353326 : SUBST (SET_SRC (pat), src);
11774 : 353326 : changed = true;
11775 : : }
11776 : : }
11777 : : else
11778 : 21637875 : changed = change_zero_ext (pat);
11779 : : }
11780 : 15282220 : else if (GET_CODE (pat) == PARALLEL)
11781 : : {
11782 : : int i;
11783 : 42701272 : for (i = 0; i < XVECLEN (pat, 0); i++)
11784 : : {
11785 : 27438125 : rtx set = XVECEXP (pat, 0, i);
11786 : 27438125 : if (GET_CODE (set) == SET)
11787 : 27212546 : changed |= change_zero_ext (set);
11788 : : }
11789 : : }
11790 : :
11791 : 37254348 : if (changed)
11792 : : {
11793 : 1243767 : insn_code_number = recog_for_combine_1 (pnewpat, insn, pnotes,
11794 : : old_nregs, new_nregs);
11795 : :
11796 : 1243767 : if (insn_code_number < 0)
11797 : 1095539 : undo_to_marker (marker);
11798 : : }
11799 : :
11800 : : return insn_code_number;
11801 : : }
11802 : :
11803 : : /* Like gen_lowpart_general but for use by combine. In combine it
11804 : : is not possible to create any new pseudoregs. However, it is
11805 : : safe to create invalid memory addresses, because combine will
11806 : : try to recognize them and all they will do is make the combine
11807 : : attempt fail.
11808 : :
11809 : : If for some reason this cannot do its job, an rtx
11810 : : (clobber (const_int 0)) is returned.
11811 : : An insn containing that will not be recognized. */
11812 : :
11813 : : static rtx
11814 : 151796494 : gen_lowpart_for_combine (machine_mode omode, rtx x)
11815 : : {
11816 : 151796494 : machine_mode imode = GET_MODE (x);
11817 : 151796494 : rtx result;
11818 : :
11819 : 151796494 : if (omode == imode)
11820 : : return x;
11821 : :
11822 : : /* We can only support MODE being wider than a word if X is a
11823 : : constant integer or has a mode the same size. */
11824 : 50207379 : if (maybe_gt (GET_MODE_SIZE (omode), UNITS_PER_WORD)
11825 : 23768172 : && ! (CONST_SCALAR_INT_P (x)
11826 : 9703324 : || known_eq (GET_MODE_SIZE (imode), GET_MODE_SIZE (omode))))
11827 : 2902196 : goto fail;
11828 : :
11829 : : /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
11830 : : won't know what to do. So we will strip off the SUBREG here and
11831 : : process normally. */
11832 : 20865976 : if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
11833 : : {
11834 : 15004 : x = SUBREG_REG (x);
11835 : :
11836 : : /* For use in case we fall down into the address adjustments
11837 : : further below, we need to adjust the known mode and size of
11838 : : x; imode and isize, since we just adjusted x. */
11839 : 15004 : imode = GET_MODE (x);
11840 : :
11841 : 15004 : if (imode == omode)
11842 : : return x;
11843 : : }
11844 : :
11845 : 20856801 : result = gen_lowpart_common (omode, x);
11846 : :
11847 : 20856801 : if (result)
11848 : : return result;
11849 : :
11850 : 8238724 : if (MEM_P (x))
11851 : : {
11852 : : /* Refuse to work on a volatile memory ref or one with a mode-dependent
11853 : : address. */
11854 : 2038331 : if (MEM_VOLATILE_P (x)
11855 : 4032365 : || mode_dependent_address_p (XEXP (x, 0), MEM_ADDR_SPACE (x)))
11856 : 44327 : goto fail;
11857 : :
11858 : : /* If we want to refer to something bigger than the original memref,
11859 : : generate a paradoxical subreg instead. That will force a reload
11860 : : of the original memref X. */
11861 : 1994004 : if (paradoxical_subreg_p (omode, imode)
11862 : 1994004 : && validate_subreg (omode, GET_MODE (x), x, 0))
11863 : 1778987 : return gen_rtx_SUBREG (omode, x, 0);
11864 : :
11865 : 215017 : poly_int64 offset = byte_lowpart_offset (omode, imode);
11866 : 215017 : return adjust_address_nv (x, omode, offset);
11867 : : }
11868 : :
11869 : : /* If X is a comparison operator, rewrite it in a new mode. This
11870 : : probably won't match, but may allow further simplifications. */
11871 : 6200393 : else if (COMPARISON_P (x)
11872 : 139530 : && SCALAR_INT_MODE_P (imode)
11873 : 52167 : && SCALAR_INT_MODE_P (omode))
11874 : 52156 : return gen_rtx_fmt_ee (GET_CODE (x), omode, XEXP (x, 0), XEXP (x, 1));
11875 : :
11876 : : /* If we couldn't simplify X any other way, just enclose it in a
11877 : : SUBREG. Normally, this SUBREG won't match, but some patterns may
11878 : : include an explicit SUBREG or we may simplify it further in combine. */
11879 : : else
11880 : : {
11881 : 6148237 : rtx res;
11882 : :
11883 : 6148237 : if (imode == VOIDmode)
11884 : : {
11885 : 8 : imode = int_mode_for_mode (omode).require ();
11886 : 8 : x = gen_lowpart_common (imode, x);
11887 : 8 : if (x == NULL)
11888 : 0 : goto fail;
11889 : : }
11890 : 6148237 : res = lowpart_subreg (omode, x, imode);
11891 : 6148237 : if (res)
11892 : : return res;
11893 : : }
11894 : :
11895 : 19601 : fail:
11896 : 2966124 : return gen_rtx_CLOBBER (omode, const0_rtx);
11897 : : }
11898 : :
11899 : : /* Like gen_lowpart_for_combine but returns NULL_RTX
11900 : : for an error instead of CLOBBER.
11901 : : Note no_emit is not called directly from combine but rather from
11902 : : simplify_rtx and is expecting a NULL on failure rather than
11903 : : a CLOBBER. */
11904 : :
11905 : : static rtx
11906 : 751756 : gen_lowpart_for_combine_no_emit (machine_mode omode, rtx x)
11907 : : {
11908 : 751756 : rtx tem = gen_lowpart_for_combine (omode, x);
11909 : 751756 : if (!tem || GET_CODE (tem) == CLOBBER)
11910 : 15366 : return NULL_RTX;
11911 : : return tem;
11912 : : }
11913 : :
11914 : :
11915 : : /* Try to simplify a comparison between OP0 and a constant OP1,
11916 : : where CODE is the comparison code that will be tested, into a
11917 : : (CODE OP0 const0_rtx) form.
11918 : :
11919 : : The result is a possibly different comparison code to use.
11920 : : *POP0 and *POP1 may be updated. */
11921 : :
11922 : : static enum rtx_code
11923 : 15547105 : simplify_compare_const (enum rtx_code code, machine_mode mode,
11924 : : rtx *pop0, rtx *pop1)
11925 : : {
11926 : 15547105 : scalar_int_mode int_mode;
11927 : 15547105 : rtx op0 = *pop0;
11928 : 15547105 : HOST_WIDE_INT const_op = INTVAL (*pop1);
11929 : :
11930 : : /* Get the constant we are comparing against and turn off all bits
11931 : : not on in our mode. */
11932 : 15547105 : if (mode != VOIDmode)
11933 : 15252032 : const_op = trunc_int_for_mode (const_op, mode);
11934 : :
11935 : : /* If we are comparing against a constant power of two and the value
11936 : : being compared can only have that single bit nonzero (e.g., it was
11937 : : `and'ed with that bit), we can replace this with a comparison
11938 : : with zero. */
11939 : 15547105 : if (const_op
11940 : 4158387 : && (code == EQ || code == NE || code == GEU || code == LTU
11941 : : /* This optimization is incorrect for signed >= INT_MIN or
11942 : : < INT_MIN, those are always true or always false. */
11943 : 24551 : || ((code == GE || code == LT) && const_op > 0))
11944 : 2791662 : && is_a <scalar_int_mode> (mode, &int_mode)
11945 : 2791662 : && GET_MODE_PRECISION (int_mode) - 1 < HOST_BITS_PER_WIDE_INT
11946 : 2772503 : && pow2p_hwi (const_op & GET_MODE_MASK (int_mode))
11947 : 16473752 : && (nonzero_bits (op0, int_mode)
11948 : 926647 : == (unsigned HOST_WIDE_INT) (const_op & GET_MODE_MASK (int_mode))))
11949 : : {
11950 : 4526 : code = (code == EQ || code == GE || code == GEU ? NE : EQ);
11951 : : const_op = 0;
11952 : : }
11953 : :
11954 : : /* Similarly, if we are comparing a value known to be either -1 or
11955 : : 0 with -1, change it to the opposite comparison against zero. */
11956 : 15542579 : if (const_op == -1
11957 : 229514 : && (code == EQ || code == NE || code == GT || code == LE
11958 : : || code == GEU || code == LTU)
11959 : 15763317 : && is_a <scalar_int_mode> (mode, &int_mode)
11960 : 15769563 : && num_sign_bit_copies (op0, int_mode) == GET_MODE_PRECISION (int_mode))
11961 : : {
11962 : 12031 : code = (code == EQ || code == LE || code == GEU ? NE : EQ);
11963 : : const_op = 0;
11964 : : }
11965 : :
11966 : : /* Do some canonicalizations based on the comparison code. We prefer
11967 : : comparisons against zero and then prefer equality comparisons.
11968 : : If we can reduce the size of a constant, we will do that too. */
11969 : 15536333 : switch (code)
11970 : : {
11971 : 268034 : case LT:
11972 : : /* < C is equivalent to <= (C - 1) */
11973 : 268034 : if (const_op > 0)
11974 : : {
11975 : 4886 : const_op -= 1;
11976 : 4886 : code = LE;
11977 : : /* ... fall through to LE case below. */
11978 : 400621 : gcc_fallthrough ();
11979 : : }
11980 : : else
11981 : : break;
11982 : :
11983 : 400621 : case LE:
11984 : : /* <= C is equivalent to < (C + 1); we do this for C < 0 */
11985 : 400621 : if (const_op < 0)
11986 : : {
11987 : 52 : const_op += 1;
11988 : 52 : code = LT;
11989 : : }
11990 : :
11991 : : /* If we are doing a <= 0 comparison on a value known to have
11992 : : a zero sign bit, we can replace this with == 0. */
11993 : 400569 : else if (const_op == 0
11994 : 274917 : && is_a <scalar_int_mode> (mode, &int_mode)
11995 : 274917 : && GET_MODE_PRECISION (int_mode) - 1 < HOST_BITS_PER_WIDE_INT
11996 : 675486 : && (nonzero_bits (op0, int_mode)
11997 : 274917 : & (HOST_WIDE_INT_1U << (GET_MODE_PRECISION (int_mode) - 1)))
11998 : 274917 : == 0)
11999 : : code = EQ;
12000 : : break;
12001 : :
12002 : 235955 : case GE:
12003 : : /* >= C is equivalent to > (C - 1). */
12004 : 235955 : if (const_op > 0)
12005 : : {
12006 : 2412 : const_op -= 1;
12007 : 2412 : code = GT;
12008 : : /* ... fall through to GT below. */
12009 : 246455 : gcc_fallthrough ();
12010 : : }
12011 : : else
12012 : : break;
12013 : :
12014 : 246455 : case GT:
12015 : : /* > C is equivalent to >= (C + 1); we do this for C < 0. */
12016 : 246455 : if (const_op < 0)
12017 : : {
12018 : 155 : const_op += 1;
12019 : 155 : code = GE;
12020 : : }
12021 : :
12022 : : /* If we are doing a > 0 comparison on a value known to have
12023 : : a zero sign bit, we can replace this with != 0. */
12024 : 246300 : else if (const_op == 0
12025 : 123047 : && is_a <scalar_int_mode> (mode, &int_mode)
12026 : 123047 : && GET_MODE_PRECISION (int_mode) - 1 < HOST_BITS_PER_WIDE_INT
12027 : 369347 : && (nonzero_bits (op0, int_mode)
12028 : 123047 : & (HOST_WIDE_INT_1U << (GET_MODE_PRECISION (int_mode) - 1)))
12029 : 123047 : == 0)
12030 : : code = NE;
12031 : : break;
12032 : :
12033 : 99956 : case LTU:
12034 : : /* < C is equivalent to <= (C - 1). */
12035 : 99956 : if (const_op > 0)
12036 : : {
12037 : 89865 : const_op -= 1;
12038 : 89865 : code = LEU;
12039 : : /* ... fall through ... */
12040 : 89865 : gcc_fallthrough ();
12041 : : }
12042 : : /* (unsigned) < 0x80000000 is equivalent to >= 0. */
12043 : 10091 : else if (is_a <scalar_int_mode> (mode, &int_mode)
12044 : 10091 : && GET_MODE_PRECISION (int_mode) - 1 < HOST_BITS_PER_WIDE_INT
12045 : 9321 : && (((unsigned HOST_WIDE_INT) const_op & GET_MODE_MASK (int_mode))
12046 : 9321 : == HOST_WIDE_INT_1U << (GET_MODE_PRECISION (int_mode) - 1)))
12047 : : {
12048 : : const_op = 0;
12049 : : code = GE;
12050 : : break;
12051 : : }
12052 : : else
12053 : : break;
12054 : :
12055 : 691960 : case LEU:
12056 : : /* unsigned <= 0 is equivalent to == 0 */
12057 : 691960 : if (const_op == 0)
12058 : : code = EQ;
12059 : : /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
12060 : 691624 : else if (is_a <scalar_int_mode> (mode, &int_mode)
12061 : 691624 : && GET_MODE_PRECISION (int_mode) - 1 < HOST_BITS_PER_WIDE_INT
12062 : 689683 : && ((unsigned HOST_WIDE_INT) const_op
12063 : : == ((HOST_WIDE_INT_1U
12064 : 689683 : << (GET_MODE_PRECISION (int_mode) - 1)) - 1)))
12065 : : {
12066 : : const_op = 0;
12067 : : code = GE;
12068 : : }
12069 : : break;
12070 : :
12071 : 31851 : case GEU:
12072 : : /* >= C is equivalent to > (C - 1). */
12073 : 31851 : if (const_op > 1)
12074 : : {
12075 : 22911 : const_op -= 1;
12076 : 22911 : code = GTU;
12077 : : /* ... fall through ... */
12078 : 22911 : gcc_fallthrough ();
12079 : : }
12080 : :
12081 : : /* (unsigned) >= 0x80000000 is equivalent to < 0. */
12082 : 8940 : else if (is_a <scalar_int_mode> (mode, &int_mode)
12083 : 8940 : && GET_MODE_PRECISION (int_mode) - 1 < HOST_BITS_PER_WIDE_INT
12084 : 7669 : && (((unsigned HOST_WIDE_INT) const_op & GET_MODE_MASK (int_mode))
12085 : 7669 : == HOST_WIDE_INT_1U << (GET_MODE_PRECISION (int_mode) - 1)))
12086 : : {
12087 : : const_op = 0;
12088 : : code = LT;
12089 : : break;
12090 : : }
12091 : : else
12092 : : break;
12093 : :
12094 : 528474 : case GTU:
12095 : : /* unsigned > 0 is equivalent to != 0 */
12096 : 528474 : if (const_op == 0)
12097 : : code = NE;
12098 : : /* (unsigned) > 0x7fffffff is equivalent to < 0. */
12099 : 528474 : else if (is_a <scalar_int_mode> (mode, &int_mode)
12100 : 528474 : && GET_MODE_PRECISION (int_mode) - 1 < HOST_BITS_PER_WIDE_INT
12101 : 527378 : && ((unsigned HOST_WIDE_INT) const_op
12102 : : == (HOST_WIDE_INT_1U
12103 : 527378 : << (GET_MODE_PRECISION (int_mode) - 1)) - 1))
12104 : : {
12105 : : const_op = 0;
12106 : : code = LT;
12107 : : }
12108 : : break;
12109 : :
12110 : : default:
12111 : : break;
12112 : : }
12113 : :
12114 : : /* Narrow non-symmetric comparison of memory and constant as e.g.
12115 : : x0...x7 <= 0x3fffffffffffffff into x0 <= 0x3f where x0 is the most
12116 : : significant byte. Likewise, transform x0...x7 >= 0x4000000000000000 into
12117 : : x0 >= 0x40. */
12118 : 14837440 : if ((code == LEU || code == LTU || code == GEU || code == GTU)
12119 : 1235777 : && is_a <scalar_int_mode> (GET_MODE (op0), &int_mode)
12120 : 1235756 : && HWI_COMPUTABLE_MODE_P (int_mode)
12121 : 1230678 : && MEM_P (op0)
12122 : 88588 : && !MEM_VOLATILE_P (op0)
12123 : : /* The optimization makes only sense for constants which are big enough
12124 : : so that we have a chance to chop off something at all. */
12125 : 87726 : && ((unsigned HOST_WIDE_INT) const_op & GET_MODE_MASK (int_mode)) > 0xff
12126 : : /* Ensure that we do not overflow during normalization. */
12127 : 26064 : && (code != GTU
12128 : 3705 : || ((unsigned HOST_WIDE_INT) const_op & GET_MODE_MASK (int_mode))
12129 : : < HOST_WIDE_INT_M1U)
12130 : 15573169 : && trunc_int_for_mode (const_op, int_mode) == const_op)
12131 : : {
12132 : 26064 : unsigned HOST_WIDE_INT n
12133 : 26064 : = (unsigned HOST_WIDE_INT) const_op & GET_MODE_MASK (int_mode);
12134 : 26064 : enum rtx_code adjusted_code;
12135 : :
12136 : : /* Normalize code to either LEU or GEU. */
12137 : 26064 : if (code == LTU)
12138 : : {
12139 : 323 : --n;
12140 : 323 : adjusted_code = LEU;
12141 : : }
12142 : 25741 : else if (code == GTU)
12143 : : {
12144 : 3705 : ++n;
12145 : 3705 : adjusted_code = GEU;
12146 : : }
12147 : : else
12148 : : adjusted_code = code;
12149 : :
12150 : 26064 : scalar_int_mode narrow_mode_iter;
12151 : 80877 : FOR_EACH_MODE_UNTIL (narrow_mode_iter, int_mode)
12152 : : {
12153 : 55454 : unsigned nbits = GET_MODE_PRECISION (int_mode)
12154 : 55454 : - GET_MODE_PRECISION (narrow_mode_iter);
12155 : 55454 : unsigned HOST_WIDE_INT mask = (HOST_WIDE_INT_1U << nbits) - 1;
12156 : 55454 : unsigned HOST_WIDE_INT lower_bits = n & mask;
12157 : 55454 : if ((adjusted_code == LEU && lower_bits == mask)
12158 : 55209 : || (adjusted_code == GEU && lower_bits == 0))
12159 : : {
12160 : 641 : n >>= nbits;
12161 : 641 : break;
12162 : : }
12163 : : }
12164 : :
12165 : 26064 : if (narrow_mode_iter < int_mode)
12166 : : {
12167 : 641 : if (dump_file && (dump_flags & TDF_DETAILS))
12168 : : {
12169 : 12 : fprintf (
12170 : : dump_file, "narrow comparison from mode %s to %s: (MEM %s "
12171 : : HOST_WIDE_INT_PRINT_HEX ") to (MEM %s "
12172 : 12 : HOST_WIDE_INT_PRINT_HEX ").\n", GET_MODE_NAME (int_mode),
12173 : 12 : GET_MODE_NAME (narrow_mode_iter), GET_RTX_NAME (code),
12174 : 12 : (unsigned HOST_WIDE_INT) const_op & GET_MODE_MASK (int_mode),
12175 : 12 : GET_RTX_NAME (adjusted_code), n);
12176 : : }
12177 : 641 : poly_int64 offset = (BYTES_BIG_ENDIAN
12178 : 641 : ? 0
12179 : 641 : : (GET_MODE_SIZE (int_mode)
12180 : 641 : - GET_MODE_SIZE (narrow_mode_iter)));
12181 : 641 : *pop0 = adjust_address_nv (op0, narrow_mode_iter, offset);
12182 : 641 : *pop1 = gen_int_mode (n, narrow_mode_iter);
12183 : 641 : return adjusted_code;
12184 : : }
12185 : : }
12186 : :
12187 : 15546464 : *pop1 = GEN_INT (const_op);
12188 : 15546464 : return code;
12189 : : }
12190 : :
12191 : : /* Simplify a comparison between *POP0 and *POP1 where CODE is the
12192 : : comparison code that will be tested.
12193 : :
12194 : : The result is a possibly different comparison code to use. *POP0 and
12195 : : *POP1 may be updated.
12196 : :
12197 : : It is possible that we might detect that a comparison is either always
12198 : : true or always false. However, we do not perform general constant
12199 : : folding in combine, so this knowledge isn't useful. Such tautologies
12200 : : should have been detected earlier. Hence we ignore all such cases. */
12201 : :
12202 : : static enum rtx_code
12203 : 23676227 : simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
12204 : : {
12205 : 23676227 : rtx op0 = *pop0;
12206 : 23676227 : rtx op1 = *pop1;
12207 : 23676227 : rtx tem, tem1;
12208 : 23676227 : int i;
12209 : 23676227 : scalar_int_mode mode, inner_mode, tmode;
12210 : 23676227 : opt_scalar_int_mode tmode_iter;
12211 : :
12212 : : /* Try a few ways of applying the same transformation to both operands. */
12213 : 23676511 : while (1)
12214 : : {
12215 : : /* The test below this one won't handle SIGN_EXTENDs on these machines,
12216 : : so check specially. */
12217 : 23676511 : if (!WORD_REGISTER_OPERATIONS
12218 : 23676511 : && code != GTU && code != GEU && code != LTU && code != LEU
12219 : 20404466 : && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
12220 : 1962 : && GET_CODE (XEXP (op0, 0)) == ASHIFT
12221 : 1475 : && GET_CODE (XEXP (op1, 0)) == ASHIFT
12222 : 723 : && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
12223 : 723 : && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
12224 : 723 : && is_a <scalar_int_mode> (GET_MODE (op0), &mode)
12225 : : && (is_a <scalar_int_mode>
12226 : 723 : (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))), &inner_mode))
12227 : 723 : && inner_mode == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0)))
12228 : 723 : && CONST_INT_P (XEXP (op0, 1))
12229 : 723 : && XEXP (op0, 1) == XEXP (op1, 1)
12230 : 90 : && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
12231 : 90 : && XEXP (op0, 1) == XEXP (XEXP (op1, 0), 1)
12232 : 90 : && (INTVAL (XEXP (op0, 1))
12233 : 90 : == (GET_MODE_PRECISION (mode)
12234 : 90 : - GET_MODE_PRECISION (inner_mode))))
12235 : : {
12236 : 90 : op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
12237 : 90 : op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
12238 : : }
12239 : :
12240 : : /* If both operands are the same constant shift, see if we can ignore the
12241 : : shift. We can if the shift is a rotate or if the bits shifted out of
12242 : : this shift are known to be zero for both inputs and if the type of
12243 : : comparison is compatible with the shift. */
12244 : 23676511 : if (GET_CODE (op0) == GET_CODE (op1)
12245 : 3597202 : && HWI_COMPUTABLE_MODE_P (GET_MODE (op0))
12246 : 3275242 : && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
12247 : 3275242 : || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
12248 : 1064 : && (code != GT && code != LT && code != GE && code != LE))
12249 : 3274228 : || (GET_CODE (op0) == ASHIFTRT
12250 : 1884 : && (code != GTU && code != LTU
12251 : 1876 : && code != GEU && code != LEU)))
12252 : 2886 : && CONST_INT_P (XEXP (op0, 1))
12253 : 2851 : && INTVAL (XEXP (op0, 1)) >= 0
12254 : 2851 : && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
12255 : 23679362 : && XEXP (op0, 1) == XEXP (op1, 1))
12256 : : {
12257 : 1385 : machine_mode mode = GET_MODE (op0);
12258 : 1385 : unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
12259 : 1385 : int shift_count = INTVAL (XEXP (op0, 1));
12260 : :
12261 : 1385 : if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
12262 : 840 : mask &= (mask >> shift_count) << shift_count;
12263 : 545 : else if (GET_CODE (op0) == ASHIFT)
12264 : 545 : mask = (mask & (mask << shift_count)) >> shift_count;
12265 : :
12266 : 1385 : if ((nonzero_bits (XEXP (op0, 0), mode) & ~mask) == 0
12267 : 1385 : && (nonzero_bits (XEXP (op1, 0), mode) & ~mask) == 0)
12268 : 127 : op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
12269 : : else
12270 : : break;
12271 : : }
12272 : :
12273 : : /* If both operands are AND's of a paradoxical SUBREG by constant, the
12274 : : SUBREGs are of the same mode, and, in both cases, the AND would
12275 : : be redundant if the comparison was done in the narrower mode,
12276 : : do the comparison in the narrower mode (e.g., we are AND'ing with 1
12277 : : and the operand's possibly nonzero bits are 0xffffff01; in that case
12278 : : if we only care about QImode, we don't need the AND). This case
12279 : : occurs if the output mode of an scc insn is not SImode and
12280 : : STORE_FLAG_VALUE == 1 (e.g., the 386).
12281 : :
12282 : : Similarly, check for a case where the AND's are ZERO_EXTEND
12283 : : operations from some narrower mode even though a SUBREG is not
12284 : : present. */
12285 : :
12286 : 23675126 : else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
12287 : 2608 : && CONST_INT_P (XEXP (op0, 1))
12288 : 2513 : && CONST_INT_P (XEXP (op1, 1)))
12289 : : {
12290 : 2497 : rtx inner_op0 = XEXP (op0, 0);
12291 : 2497 : rtx inner_op1 = XEXP (op1, 0);
12292 : 2497 : HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
12293 : 2497 : HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
12294 : 2497 : bool changed = false;
12295 : :
12296 : 2497 : if (paradoxical_subreg_p (inner_op0)
12297 : 999 : && GET_CODE (inner_op1) == SUBREG
12298 : 461 : && HWI_COMPUTABLE_MODE_P (GET_MODE (SUBREG_REG (inner_op0)))
12299 : 461 : && (GET_MODE (SUBREG_REG (inner_op0))
12300 : 461 : == GET_MODE (SUBREG_REG (inner_op1)))
12301 : 196 : && ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
12302 : : GET_MODE (SUBREG_REG (inner_op0)))) == 0
12303 : 1706 : && ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
12304 : 126 : GET_MODE (SUBREG_REG (inner_op1)))) == 0)
12305 : : {
12306 : 110 : op0 = SUBREG_REG (inner_op0);
12307 : 110 : op1 = SUBREG_REG (inner_op1);
12308 : :
12309 : : /* The resulting comparison is always unsigned since we masked
12310 : : off the original sign bit. */
12311 : 110 : code = unsigned_condition (code);
12312 : :
12313 : 110 : changed = true;
12314 : : }
12315 : :
12316 : 2387 : else if (c0 == c1)
12317 : 5140 : FOR_EACH_MODE_UNTIL (tmode,
12318 : : as_a <scalar_int_mode> (GET_MODE (op0)))
12319 : 3126 : if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
12320 : : {
12321 : 35 : op0 = gen_lowpart_or_truncate (tmode, inner_op0);
12322 : 35 : op1 = gen_lowpart_or_truncate (tmode, inner_op1);
12323 : 35 : code = unsigned_condition (code);
12324 : 35 : changed = true;
12325 : 35 : break;
12326 : : }
12327 : :
12328 : 2159 : if (! changed)
12329 : : break;
12330 : : }
12331 : :
12332 : : /* If both operands are NOT, we can strip off the outer operation
12333 : : and adjust the comparison code for swapped operands; similarly for
12334 : : NEG, except that this must be an equality comparison. */
12335 : 23672629 : else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
12336 : 23672628 : || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
12337 : 11 : && (code == EQ || code == NE)))
12338 : 12 : op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
12339 : :
12340 : : else
12341 : : break;
12342 : : }
12343 : :
12344 : : /* If the first operand is a constant, swap the operands and adjust the
12345 : : comparison code appropriately, but don't do this if the second operand
12346 : : is already a constant integer. */
12347 : 23676227 : if (swap_commutative_operands_p (op0, op1))
12348 : : {
12349 : 1546319 : std::swap (op0, op1);
12350 : 1546319 : code = swap_condition (code);
12351 : : }
12352 : :
12353 : : /* We now enter a loop during which we will try to simplify the comparison.
12354 : : For the most part, we only are concerned with comparisons with zero,
12355 : : but some things may really be comparisons with zero but not start
12356 : : out looking that way. */
12357 : :
12358 : 24903322 : while (CONST_INT_P (op1))
12359 : : {
12360 : 15966675 : machine_mode raw_mode = GET_MODE (op0);
12361 : 15966675 : scalar_int_mode int_mode;
12362 : 15966675 : int equality_comparison_p;
12363 : 15966675 : int sign_bit_comparison_p;
12364 : 15966675 : int unsigned_comparison_p;
12365 : 15966675 : HOST_WIDE_INT const_op;
12366 : :
12367 : : /* We only want to handle integral modes. This catches VOIDmode,
12368 : : CCmode, and the floating-point modes. An exception is that we
12369 : : can handle VOIDmode if OP0 is a COMPARE or a comparison
12370 : : operation. */
12371 : :
12372 : 15966675 : if (GET_MODE_CLASS (raw_mode) != MODE_INT
12373 : 1534860 : && ! (raw_mode == VOIDmode
12374 : 295113 : && (GET_CODE (op0) == COMPARE || COMPARISON_P (op0))))
12375 : : break;
12376 : :
12377 : : /* Try to simplify the compare to constant, possibly changing the
12378 : : comparison op, and/or changing op1 to zero. */
12379 : 14726888 : code = simplify_compare_const (code, raw_mode, &op0, &op1);
12380 : 14726888 : const_op = INTVAL (op1);
12381 : :
12382 : : /* Compute some predicates to simplify code below. */
12383 : :
12384 : 14726888 : equality_comparison_p = (code == EQ || code == NE);
12385 : 14726888 : sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
12386 : 14726888 : unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
12387 : 14726888 : || code == GEU);
12388 : :
12389 : : /* If this is a sign bit comparison and we can do arithmetic in
12390 : : MODE, say that we will only be needing the sign bit of OP0. */
12391 : 14726888 : if (sign_bit_comparison_p
12392 : 453328 : && is_a <scalar_int_mode> (raw_mode, &int_mode)
12393 : 15180216 : && HWI_COMPUTABLE_MODE_P (int_mode))
12394 : 452936 : op0 = force_to_mode (op0, int_mode,
12395 : : HOST_WIDE_INT_1U
12396 : 452936 : << (GET_MODE_PRECISION (int_mode) - 1), false);
12397 : :
12398 : 14726888 : if (COMPARISON_P (op0))
12399 : : {
12400 : : /* We can't do anything if OP0 is a condition code value, rather
12401 : : than an actual data value. */
12402 : 682213 : if (const_op != 0
12403 : 682213 : || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
12404 : : break;
12405 : :
12406 : : /* Get the two operands being compared. */
12407 : 105691 : if (GET_CODE (XEXP (op0, 0)) == COMPARE)
12408 : 0 : tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
12409 : : else
12410 : 105691 : tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
12411 : :
12412 : : /* Check for the cases where we simply want the result of the
12413 : : earlier test or the opposite of that result. */
12414 : 105691 : if (code == NE || code == EQ
12415 : 105691 : || (val_signbit_known_set_p (raw_mode, STORE_FLAG_VALUE)
12416 : 0 : && (code == LT || code == GE)))
12417 : : {
12418 : 105691 : enum rtx_code new_code;
12419 : 105691 : if (code == LT || code == NE)
12420 : 105691 : new_code = GET_CODE (op0);
12421 : : else
12422 : 0 : new_code = reversed_comparison_code (op0, NULL);
12423 : :
12424 : 105691 : if (new_code != UNKNOWN)
12425 : : {
12426 : 105691 : code = new_code;
12427 : 105691 : op0 = tem;
12428 : 105691 : op1 = tem1;
12429 : 25009013 : continue;
12430 : : }
12431 : : }
12432 : : break;
12433 : : }
12434 : :
12435 : 14044675 : if (raw_mode == VOIDmode)
12436 : : break;
12437 : 14044675 : scalar_int_mode mode = as_a <scalar_int_mode> (raw_mode);
12438 : :
12439 : : /* Now try cases based on the opcode of OP0. If none of the cases
12440 : : does a "continue", we exit this loop immediately after the
12441 : : switch. */
12442 : :
12443 : 14044675 : unsigned int mode_width = GET_MODE_PRECISION (mode);
12444 : 14044675 : unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
12445 : 14044675 : switch (GET_CODE (op0))
12446 : : {
12447 : 375733 : case ZERO_EXTRACT:
12448 : : /* If we are extracting a single bit from a variable position in
12449 : : a constant that has only a single bit set and are comparing it
12450 : : with zero, we can convert this into an equality comparison
12451 : : between the position and the location of the single bit. */
12452 : : /* Except we can't if SHIFT_COUNT_TRUNCATED is set, since we might
12453 : : have already reduced the shift count modulo the word size. */
12454 : 375733 : if (!SHIFT_COUNT_TRUNCATED
12455 : 375733 : && CONST_INT_P (XEXP (op0, 0))
12456 : 9492 : && XEXP (op0, 1) == const1_rtx
12457 : 9474 : && equality_comparison_p && const_op == 0
12458 : 385207 : && (i = exact_log2 (UINTVAL (XEXP (op0, 0)))) >= 0)
12459 : : {
12460 : 0 : if (BITS_BIG_ENDIAN)
12461 : : i = BITS_PER_WORD - 1 - i;
12462 : :
12463 : 0 : op0 = XEXP (op0, 2);
12464 : 0 : op1 = GEN_INT (i);
12465 : 0 : const_op = i;
12466 : :
12467 : : /* Result is nonzero iff shift count is equal to I. */
12468 : 0 : code = reverse_condition (code);
12469 : 0 : continue;
12470 : : }
12471 : :
12472 : : /* fall through */
12473 : :
12474 : 375737 : case SIGN_EXTRACT:
12475 : 375737 : tem = expand_compound_operation (op0);
12476 : 375737 : if (tem != op0)
12477 : : {
12478 : 340683 : op0 = tem;
12479 : 340683 : continue;
12480 : : }
12481 : : break;
12482 : :
12483 : 27386 : case NOT:
12484 : : /* If testing for equality, we can take the NOT of the constant. */
12485 : 38203 : if (equality_comparison_p
12486 : 27386 : && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
12487 : : {
12488 : 10817 : op0 = XEXP (op0, 0);
12489 : 10817 : op1 = tem;
12490 : 10817 : continue;
12491 : : }
12492 : :
12493 : : /* If just looking at the sign bit, reverse the sense of the
12494 : : comparison. */
12495 : 16569 : if (sign_bit_comparison_p)
12496 : : {
12497 : 16209 : op0 = XEXP (op0, 0);
12498 : 16209 : code = (code == GE ? LT : GE);
12499 : 16209 : continue;
12500 : : }
12501 : : break;
12502 : :
12503 : 320762 : case NEG:
12504 : : /* If testing for equality, we can take the NEG of the constant. */
12505 : 638066 : if (equality_comparison_p
12506 : 320762 : && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
12507 : : {
12508 : 317304 : op0 = XEXP (op0, 0);
12509 : 317304 : op1 = tem;
12510 : 317304 : continue;
12511 : : }
12512 : :
12513 : : /* The remaining cases only apply to comparisons with zero. */
12514 : 3458 : if (const_op != 0)
12515 : : break;
12516 : :
12517 : : /* When X is ABS or is known positive,
12518 : : (neg X) is < 0 if and only if X != 0. */
12519 : :
12520 : 2946 : if (sign_bit_comparison_p
12521 : 2908 : && (GET_CODE (XEXP (op0, 0)) == ABS
12522 : 2907 : || (mode_width <= HOST_BITS_PER_WIDE_INT
12523 : 2907 : && (nonzero_bits (XEXP (op0, 0), mode)
12524 : 2907 : & (HOST_WIDE_INT_1U << (mode_width - 1)))
12525 : 2907 : == 0)))
12526 : : {
12527 : 38 : op0 = XEXP (op0, 0);
12528 : 38 : code = (code == LT ? NE : EQ);
12529 : 38 : continue;
12530 : : }
12531 : :
12532 : : /* If we have NEG of something whose two high-order bits are the
12533 : : same, we know that "(-a) < 0" is equivalent to "a > 0". */
12534 : 2870 : if (num_sign_bit_copies (op0, mode) >= 2)
12535 : : {
12536 : 22 : op0 = XEXP (op0, 0);
12537 : 22 : code = swap_condition (code);
12538 : 22 : continue;
12539 : : }
12540 : : break;
12541 : :
12542 : 146 : case ROTATE:
12543 : : /* If we are testing equality and our count is a constant, we
12544 : : can perform the inverse operation on our RHS. */
12545 : 146 : if (equality_comparison_p && CONST_INT_P (XEXP (op0, 1))
12546 : 146 : && (tem = simplify_binary_operation (ROTATERT, mode,
12547 : : op1, XEXP (op0, 1))) != 0)
12548 : : {
12549 : 0 : op0 = XEXP (op0, 0);
12550 : 0 : op1 = tem;
12551 : 0 : continue;
12552 : : }
12553 : :
12554 : : /* If we are doing a < 0 or >= 0 comparison, it means we are testing
12555 : : a particular bit. Convert it to an AND of a constant of that
12556 : : bit. This will be converted into a ZERO_EXTRACT. */
12557 : 146 : if (const_op == 0 && sign_bit_comparison_p
12558 : 0 : && CONST_INT_P (XEXP (op0, 1))
12559 : 0 : && mode_width <= HOST_BITS_PER_WIDE_INT
12560 : 0 : && UINTVAL (XEXP (op0, 1)) < mode_width)
12561 : : {
12562 : 0 : op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
12563 : : (HOST_WIDE_INT_1U
12564 : : << (mode_width - 1
12565 : 0 : - INTVAL (XEXP (op0, 1)))));
12566 : 0 : code = (code == LT ? NE : EQ);
12567 : 0 : continue;
12568 : : }
12569 : :
12570 : : /* Fall through. */
12571 : :
12572 : 2372 : case ABS:
12573 : : /* ABS is ignorable inside an equality comparison with zero. */
12574 : 2372 : if (const_op == 0 && equality_comparison_p)
12575 : : {
12576 : 1 : op0 = XEXP (op0, 0);
12577 : 1 : continue;
12578 : : }
12579 : : break;
12580 : :
12581 : 1666 : case SIGN_EXTEND:
12582 : : /* Can simplify (compare (zero/sign_extend FOO) CONST) to
12583 : : (compare FOO CONST) if CONST fits in FOO's mode and we
12584 : : are either testing inequality or have an unsigned
12585 : : comparison with ZERO_EXTEND or a signed comparison with
12586 : : SIGN_EXTEND. But don't do it if we don't have a compare
12587 : : insn of the given mode, since we'd have to revert it
12588 : : later on, and then we wouldn't know whether to sign- or
12589 : : zero-extend. */
12590 : 1666 : if (is_int_mode (GET_MODE (XEXP (op0, 0)), &mode)
12591 : 1666 : && ! unsigned_comparison_p
12592 : 928 : && HWI_COMPUTABLE_MODE_P (mode)
12593 : 928 : && trunc_int_for_mode (const_op, mode) == const_op
12594 : 928 : && have_insn_for (COMPARE, mode))
12595 : : {
12596 : 928 : op0 = XEXP (op0, 0);
12597 : 928 : continue;
12598 : : }
12599 : : break;
12600 : :
12601 : 389601 : case SUBREG:
12602 : : /* Check for the case where we are comparing A - C1 with C2, that is
12603 : :
12604 : : (subreg:MODE (plus (A) (-C1))) op (C2)
12605 : :
12606 : : with C1 a constant, and try to lift the SUBREG, i.e. to do the
12607 : : comparison in the wider mode. One of the following two conditions
12608 : : must be true in order for this to be valid:
12609 : :
12610 : : 1. The mode extension results in the same bit pattern being added
12611 : : on both sides and the comparison is equality or unsigned. As
12612 : : C2 has been truncated to fit in MODE, the pattern can only be
12613 : : all 0s or all 1s.
12614 : :
12615 : : 2. The mode extension results in the sign bit being copied on
12616 : : each side.
12617 : :
12618 : : The difficulty here is that we have predicates for A but not for
12619 : : (A - C1) so we need to check that C1 is within proper bounds so
12620 : : as to perturbate A as little as possible. */
12621 : :
12622 : 389601 : if (mode_width <= HOST_BITS_PER_WIDE_INT
12623 : 389497 : && subreg_lowpart_p (op0)
12624 : 358440 : && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (op0)),
12625 : : &inner_mode)
12626 : 356898 : && GET_MODE_PRECISION (inner_mode) > mode_width
12627 : 356898 : && GET_CODE (SUBREG_REG (op0)) == PLUS
12628 : 389601 : && CONST_INT_P (XEXP (SUBREG_REG (op0), 1)))
12629 : : {
12630 : 0 : rtx a = XEXP (SUBREG_REG (op0), 0);
12631 : 0 : HOST_WIDE_INT c1 = -INTVAL (XEXP (SUBREG_REG (op0), 1));
12632 : :
12633 : 0 : if ((c1 > 0
12634 : 0 : && (unsigned HOST_WIDE_INT) c1
12635 : 0 : < HOST_WIDE_INT_1U << (mode_width - 1)
12636 : 0 : && (equality_comparison_p || unsigned_comparison_p)
12637 : : /* (A - C1) zero-extends if it is positive and sign-extends
12638 : : if it is negative, C2 both zero- and sign-extends. */
12639 : 0 : && (((nonzero_bits (a, inner_mode)
12640 : 0 : & ~GET_MODE_MASK (mode)) == 0
12641 : 0 : && const_op >= 0)
12642 : : /* (A - C1) sign-extends if it is positive and 1-extends
12643 : : if it is negative, C2 both sign- and 1-extends. */
12644 : 0 : || (num_sign_bit_copies (a, inner_mode)
12645 : 0 : > (unsigned int) (GET_MODE_PRECISION (inner_mode)
12646 : 0 : - mode_width)
12647 : 0 : && const_op < 0)))
12648 : 0 : || ((unsigned HOST_WIDE_INT) c1
12649 : 0 : < HOST_WIDE_INT_1U << (mode_width - 2)
12650 : : /* (A - C1) always sign-extends, like C2. */
12651 : 0 : && num_sign_bit_copies (a, inner_mode)
12652 : 0 : > (unsigned int) (GET_MODE_PRECISION (inner_mode)
12653 : 0 : - (mode_width - 1))))
12654 : : {
12655 : 0 : op0 = SUBREG_REG (op0);
12656 : 0 : continue;
12657 : : }
12658 : : }
12659 : :
12660 : : /* If the inner mode is narrower and we are extracting the low part,
12661 : : we can treat the SUBREG as if it were a ZERO_EXTEND ... */
12662 : 389601 : if (paradoxical_subreg_p (op0))
12663 : : {
12664 : : if (WORD_REGISTER_OPERATIONS
12665 : : && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (op0)),
12666 : : &inner_mode)
12667 : : && GET_MODE_PRECISION (inner_mode) < BITS_PER_WORD
12668 : : /* On WORD_REGISTER_OPERATIONS targets the bits
12669 : : beyond sub_mode aren't considered undefined,
12670 : : so optimize only if it is a MEM load when MEM loads
12671 : : zero extend, because then the upper bits are all zero. */
12672 : : && !(MEM_P (SUBREG_REG (op0))
12673 : : && load_extend_op (inner_mode) == ZERO_EXTEND))
12674 : : break;
12675 : : /* FALLTHROUGH to case ZERO_EXTEND */
12676 : : }
12677 : 389601 : else if (subreg_lowpart_p (op0)
12678 : 358544 : && GET_MODE_CLASS (mode) == MODE_INT
12679 : 358544 : && is_int_mode (GET_MODE (SUBREG_REG (op0)), &inner_mode)
12680 : 356898 : && (code == NE || code == EQ)
12681 : 270310 : && GET_MODE_PRECISION (inner_mode) <= HOST_BITS_PER_WIDE_INT
12682 : 264480 : && !paradoxical_subreg_p (op0)
12683 : 654081 : && (nonzero_bits (SUBREG_REG (op0), inner_mode)
12684 : 264480 : & ~GET_MODE_MASK (mode)) == 0)
12685 : : {
12686 : : /* Remove outer subregs that don't do anything. */
12687 : 105854 : tem = gen_lowpart (inner_mode, op1);
12688 : :
12689 : 105854 : if ((nonzero_bits (tem, inner_mode)
12690 : 105854 : & ~GET_MODE_MASK (mode)) == 0)
12691 : : {
12692 : 105235 : op0 = SUBREG_REG (op0);
12693 : 105235 : op1 = tem;
12694 : 105235 : continue;
12695 : : }
12696 : : break;
12697 : : }
12698 : : else
12699 : : break;
12700 : :
12701 : : /* FALLTHROUGH */
12702 : :
12703 : 40496 : case ZERO_EXTEND:
12704 : 40496 : if (is_int_mode (GET_MODE (XEXP (op0, 0)), &mode)
12705 : 40496 : && (unsigned_comparison_p || equality_comparison_p)
12706 : 40454 : && HWI_COMPUTABLE_MODE_P (mode)
12707 : 40454 : && (unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (mode)
12708 : 40454 : && const_op >= 0
12709 : 40445 : && have_insn_for (COMPARE, mode))
12710 : : {
12711 : 40445 : op0 = XEXP (op0, 0);
12712 : 40445 : continue;
12713 : : }
12714 : : break;
12715 : :
12716 : 455486 : case PLUS:
12717 : : /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
12718 : : this for equality comparisons due to pathological cases involving
12719 : : overflows. */
12720 : 502969 : if (equality_comparison_p
12721 : 455486 : && (tem = simplify_binary_operation (MINUS, mode,
12722 : : op1, XEXP (op0, 1))) != 0)
12723 : : {
12724 : 47483 : op0 = XEXP (op0, 0);
12725 : 47483 : op1 = tem;
12726 : 47483 : continue;
12727 : : }
12728 : :
12729 : : /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
12730 : 408003 : if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
12731 : 12550 : && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
12732 : : {
12733 : 0 : op0 = XEXP (XEXP (op0, 0), 0);
12734 : 0 : code = (code == LT ? EQ : NE);
12735 : 0 : continue;
12736 : : }
12737 : : break;
12738 : :
12739 : 180709 : case MINUS:
12740 : : /* We used to optimize signed comparisons against zero, but that
12741 : : was incorrect. Unsigned comparisons against zero (GTU, LEU)
12742 : : arrive here as equality comparisons, or (GEU, LTU) are
12743 : : optimized away. No need to special-case them. */
12744 : :
12745 : : /* (eq (minus A B) C) -> (eq A (plus B C)) or
12746 : : (eq B (minus A C)), whichever simplifies. We can only do
12747 : : this for equality comparisons due to pathological cases involving
12748 : : overflows. */
12749 : 215676 : if (equality_comparison_p
12750 : 180709 : && (tem = simplify_binary_operation (PLUS, mode,
12751 : : XEXP (op0, 1), op1)) != 0)
12752 : : {
12753 : 34967 : op0 = XEXP (op0, 0);
12754 : 34967 : op1 = tem;
12755 : 34967 : continue;
12756 : : }
12757 : :
12758 : 178792 : if (equality_comparison_p
12759 : 145742 : && (tem = simplify_binary_operation (MINUS, mode,
12760 : : XEXP (op0, 0), op1)) != 0)
12761 : : {
12762 : 33050 : op0 = XEXP (op0, 1);
12763 : 33050 : op1 = tem;
12764 : 33050 : continue;
12765 : : }
12766 : :
12767 : : /* The sign bit of (minus (ashiftrt X C) X), where C is the number
12768 : : of bits in X minus 1, is one iff X > 0. */
12769 : 16440 : if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
12770 : 462 : && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
12771 : 462 : && UINTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
12772 : 112716 : && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
12773 : : {
12774 : 0 : op0 = XEXP (op0, 1);
12775 : 0 : code = (code == GE ? LE : GT);
12776 : 0 : continue;
12777 : : }
12778 : : break;
12779 : :
12780 : 8499 : case XOR:
12781 : : /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
12782 : : if C is zero or B is a constant. */
12783 : 8646 : if (equality_comparison_p
12784 : 8499 : && (tem = simplify_binary_operation (XOR, mode,
12785 : : XEXP (op0, 1), op1)) != 0)
12786 : : {
12787 : 147 : op0 = XEXP (op0, 0);
12788 : 147 : op1 = tem;
12789 : 147 : continue;
12790 : : }
12791 : : break;
12792 : :
12793 : :
12794 : 387162 : case IOR:
12795 : : /* The sign bit of (ior (plus X (const_int -1)) X) is nonzero
12796 : : iff X <= 0. */
12797 : 7228 : if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
12798 : 1270 : && XEXP (XEXP (op0, 0), 1) == constm1_rtx
12799 : 387210 : && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
12800 : : {
12801 : 48 : op0 = XEXP (op0, 1);
12802 : 48 : code = (code == GE ? GT : LE);
12803 : 48 : continue;
12804 : : }
12805 : : break;
12806 : :
12807 : 1753021 : case AND:
12808 : : /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
12809 : : will be converted to a ZERO_EXTRACT later. */
12810 : 1753021 : if (const_op == 0 && equality_comparison_p
12811 : 1638364 : && GET_CODE (XEXP (op0, 0)) == ASHIFT
12812 : 60273 : && XEXP (XEXP (op0, 0), 0) == const1_rtx)
12813 : : {
12814 : 6793 : op0 = gen_rtx_LSHIFTRT (mode, XEXP (op0, 1),
12815 : : XEXP (XEXP (op0, 0), 1));
12816 : 6793 : op0 = simplify_and_const_int (NULL_RTX, mode, op0, 1);
12817 : 6793 : continue;
12818 : : }
12819 : :
12820 : : /* If we are comparing (and (lshiftrt X C1) C2) for equality with
12821 : : zero and X is a comparison and C1 and C2 describe only bits set
12822 : : in STORE_FLAG_VALUE, we can compare with X. */
12823 : 1746228 : if (const_op == 0 && equality_comparison_p
12824 : 1631571 : && mode_width <= HOST_BITS_PER_WIDE_INT
12825 : 1627678 : && CONST_INT_P (XEXP (op0, 1))
12826 : 1264628 : && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
12827 : 530019 : && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
12828 : 516481 : && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
12829 : 516481 : && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
12830 : : {
12831 : 516481 : mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
12832 : 516481 : << INTVAL (XEXP (XEXP (op0, 0), 1)));
12833 : 516481 : if ((~STORE_FLAG_VALUE & mask) == 0
12834 : 516481 : && (COMPARISON_P (XEXP (XEXP (op0, 0), 0))
12835 : 0 : || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
12836 : 0 : && COMPARISON_P (tem))))
12837 : : {
12838 : 0 : op0 = XEXP (XEXP (op0, 0), 0);
12839 : 0 : continue;
12840 : : }
12841 : : }
12842 : :
12843 : : /* If we are doing an equality comparison of an AND of a bit equal
12844 : : to the sign bit, replace this with a LT or GE comparison of
12845 : : the underlying value. */
12846 : 1746751 : if (equality_comparison_p
12847 : : && const_op == 0
12848 : 1631571 : && CONST_INT_P (XEXP (op0, 1))
12849 : 1264943 : && mode_width <= HOST_BITS_PER_WIDE_INT
12850 : 1746228 : && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
12851 : 1264628 : == HOST_WIDE_INT_1U << (mode_width - 1)))
12852 : : {
12853 : 523 : op0 = XEXP (op0, 0);
12854 : 523 : code = (code == EQ ? GE : LT);
12855 : 523 : continue;
12856 : : }
12857 : :
12858 : : /* If this AND operation is really a ZERO_EXTEND from a narrower
12859 : : mode, the constant fits within that mode, and this is either an
12860 : : equality or unsigned comparison, try to do this comparison in
12861 : : the narrower mode.
12862 : :
12863 : : Note that in:
12864 : :
12865 : : (ne:DI (and:DI (reg:DI 4) (const_int 0xffffffff)) (const_int 0))
12866 : : -> (ne:DI (reg:SI 4) (const_int 0))
12867 : :
12868 : : unless TARGET_TRULY_NOOP_TRUNCATION allows it or the register is
12869 : : known to hold a value of the required mode the
12870 : : transformation is invalid. */
12871 : 1761699 : if ((equality_comparison_p || unsigned_comparison_p)
12872 : 1730162 : && CONST_INT_P (XEXP (op0, 1))
12873 : 4004409 : && (i = exact_log2 ((UINTVAL (XEXP (op0, 1))
12874 : 1358889 : & GET_MODE_MASK (mode))
12875 : : + 1)) >= 0
12876 : 915809 : && const_op >> i == 0
12877 : 4375682 : && int_mode_for_size (i, 1).exists (&tmode))
12878 : : {
12879 : 15994 : op0 = gen_lowpart_or_truncate (tmode, XEXP (op0, 0));
12880 : 15994 : continue;
12881 : : }
12882 : :
12883 : : /* If this is (and:M1 (subreg:M1 X:M2 0) (const_int C1)) where C1
12884 : : fits in both M1 and M2 and the SUBREG is either paradoxical
12885 : : or represents the low part, permute the SUBREG and the AND
12886 : : and try again. */
12887 : 1729711 : if (GET_CODE (XEXP (op0, 0)) == SUBREG
12888 : 111581 : && CONST_INT_P (XEXP (op0, 1)))
12889 : : {
12890 : 106252 : unsigned HOST_WIDE_INT c1 = INTVAL (XEXP (op0, 1));
12891 : : /* Require an integral mode, to avoid creating something like
12892 : : (AND:SF ...). */
12893 : 149554 : if ((is_a <scalar_int_mode>
12894 : 106252 : (GET_MODE (SUBREG_REG (XEXP (op0, 0))), &tmode))
12895 : : /* It is unsafe to commute the AND into the SUBREG if the
12896 : : SUBREG is paradoxical and WORD_REGISTER_OPERATIONS is
12897 : : not defined. As originally written the upper bits
12898 : : have a defined value due to the AND operation.
12899 : : However, if we commute the AND inside the SUBREG then
12900 : : they no longer have defined values and the meaning of
12901 : : the code has been changed.
12902 : : Also C1 should not change value in the smaller mode,
12903 : : see PR67028 (a positive C1 can become negative in the
12904 : : smaller mode, so that the AND does no longer mask the
12905 : : upper bits). */
12906 : 106219 : && ((WORD_REGISTER_OPERATIONS
12907 : : && mode_width > GET_MODE_PRECISION (tmode)
12908 : : && mode_width <= BITS_PER_WORD
12909 : : && trunc_int_for_mode (c1, tmode) == (HOST_WIDE_INT) c1)
12910 : 106219 : || (mode_width <= GET_MODE_PRECISION (tmode)
12911 : 44996 : && subreg_lowpart_p (XEXP (op0, 0))))
12912 : 44970 : && mode_width <= HOST_BITS_PER_WIDE_INT
12913 : 44970 : && HWI_COMPUTABLE_MODE_P (tmode)
12914 : 44853 : && (c1 & ~mask) == 0
12915 : 43302 : && (c1 & ~GET_MODE_MASK (tmode)) == 0
12916 : 43302 : && c1 != mask
12917 : 43302 : && c1 != GET_MODE_MASK (tmode))
12918 : : {
12919 : 43302 : op0 = simplify_gen_binary (AND, tmode,
12920 : 43302 : SUBREG_REG (XEXP (op0, 0)),
12921 : 43302 : gen_int_mode (c1, tmode));
12922 : 43302 : op0 = gen_lowpart (mode, op0);
12923 : 43302 : continue;
12924 : : }
12925 : : }
12926 : :
12927 : : /* Convert (ne (and (not X) 1) 0) to (eq (and X 1) 0). */
12928 : 1686409 : if (const_op == 0 && equality_comparison_p
12929 : 1579506 : && XEXP (op0, 1) == const1_rtx
12930 : 668225 : && GET_CODE (XEXP (op0, 0)) == NOT)
12931 : : {
12932 : 4541 : op0 = simplify_and_const_int (NULL_RTX, mode,
12933 : : XEXP (XEXP (op0, 0), 0), 1);
12934 : 4541 : code = (code == NE ? EQ : NE);
12935 : 4541 : continue;
12936 : : }
12937 : :
12938 : : /* Convert (ne (and (lshiftrt (not X)) 1) 0) to
12939 : : (eq (and (lshiftrt X) 1) 0).
12940 : : Also handle the case where (not X) is expressed using xor. */
12941 : 1681868 : if (const_op == 0 && equality_comparison_p
12942 : 1574965 : && XEXP (op0, 1) == const1_rtx
12943 : 663684 : && GET_CODE (XEXP (op0, 0)) == LSHIFTRT)
12944 : : {
12945 : 515142 : rtx shift_op = XEXP (XEXP (op0, 0), 0);
12946 : 515142 : rtx shift_count = XEXP (XEXP (op0, 0), 1);
12947 : :
12948 : 517836 : if (GET_CODE (shift_op) == NOT
12949 : 515142 : || (GET_CODE (shift_op) == XOR
12950 : 4551 : && CONST_INT_P (XEXP (shift_op, 1))
12951 : 2694 : && CONST_INT_P (shift_count)
12952 : 2694 : && HWI_COMPUTABLE_MODE_P (mode)
12953 : 2694 : && (UINTVAL (XEXP (shift_op, 1))
12954 : : == HOST_WIDE_INT_1U
12955 : 2694 : << INTVAL (shift_count))))
12956 : : {
12957 : 2694 : op0
12958 : 2694 : = gen_rtx_LSHIFTRT (mode, XEXP (shift_op, 0), shift_count);
12959 : 2694 : op0 = simplify_and_const_int (NULL_RTX, mode, op0, 1);
12960 : 2694 : code = (code == NE ? EQ : NE);
12961 : 2694 : continue;
12962 : : }
12963 : : }
12964 : : break;
12965 : :
12966 : 49047 : case ASHIFT:
12967 : : /* If we have (compare (ashift FOO N) (const_int C)) and
12968 : : the high order N bits of FOO (N+1 if an inequality comparison)
12969 : : are known to be zero, we can do this by comparing FOO with C
12970 : : shifted right N bits so long as the low-order N bits of C are
12971 : : zero. */
12972 : 49047 : if (CONST_INT_P (XEXP (op0, 1))
12973 : 45497 : && INTVAL (XEXP (op0, 1)) >= 0
12974 : 45497 : && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
12975 : : < HOST_BITS_PER_WIDE_INT)
12976 : 45497 : && (((unsigned HOST_WIDE_INT) const_op
12977 : 45497 : & ((HOST_WIDE_INT_1U << INTVAL (XEXP (op0, 1)))
12978 : : - 1)) == 0)
12979 : 34284 : && mode_width <= HOST_BITS_PER_WIDE_INT
12980 : 83301 : && (nonzero_bits (XEXP (op0, 0), mode)
12981 : 34254 : & ~(mask >> (INTVAL (XEXP (op0, 1))
12982 : 34254 : + ! equality_comparison_p))) == 0)
12983 : : {
12984 : : /* We must perform a logical shift, not an arithmetic one,
12985 : : as we want the top N bits of C to be zero. */
12986 : 408 : unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode);
12987 : :
12988 : 408 : temp >>= INTVAL (XEXP (op0, 1));
12989 : 408 : op1 = gen_int_mode (temp, mode);
12990 : 408 : op0 = XEXP (op0, 0);
12991 : 408 : continue;
12992 : 408 : }
12993 : :
12994 : : /* If we are doing a sign bit comparison, it means we are testing
12995 : : a particular bit. Convert it to the appropriate AND. */
12996 : 48639 : if (sign_bit_comparison_p && CONST_INT_P (XEXP (op0, 1))
12997 : 1611 : && mode_width <= HOST_BITS_PER_WIDE_INT)
12998 : : {
12999 : 3222 : op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
13000 : : (HOST_WIDE_INT_1U
13001 : : << (mode_width - 1
13002 : 1611 : - INTVAL (XEXP (op0, 1)))));
13003 : 1611 : code = (code == LT ? NE : EQ);
13004 : 1611 : continue;
13005 : : }
13006 : :
13007 : : /* If this an equality comparison with zero and we are shifting
13008 : : the low bit to the sign bit, we can convert this to an AND of the
13009 : : low-order bit. */
13010 : 47028 : if (const_op == 0 && equality_comparison_p
13011 : 10108 : && CONST_INT_P (XEXP (op0, 1))
13012 : 7725 : && UINTVAL (XEXP (op0, 1)) == mode_width - 1)
13013 : : {
13014 : 105 : op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0), 1);
13015 : 105 : continue;
13016 : : }
13017 : : break;
13018 : :
13019 : 45165 : case ASHIFTRT:
13020 : : /* If this is an equality comparison with zero, we can do this
13021 : : as a logical shift, which might be much simpler. */
13022 : 45165 : if (equality_comparison_p && const_op == 0
13023 : 24146 : && CONST_INT_P (XEXP (op0, 1)))
13024 : : {
13025 : 47164 : op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
13026 : : XEXP (op0, 0),
13027 : 23582 : INTVAL (XEXP (op0, 1)));
13028 : 23582 : continue;
13029 : : }
13030 : :
13031 : : /* If OP0 is a sign extension and CODE is not an unsigned comparison,
13032 : : do the comparison in a narrower mode. */
13033 : 26683 : if (! unsigned_comparison_p
13034 : 19046 : && CONST_INT_P (XEXP (op0, 1))
13035 : 18446 : && GET_CODE (XEXP (op0, 0)) == ASHIFT
13036 : 5806 : && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
13037 : 5588 : && (int_mode_for_size (mode_width - INTVAL (XEXP (op0, 1)), 1)
13038 : 21583 : .exists (&tmode))
13039 : 21583 : && (((unsigned HOST_WIDE_INT) const_op
13040 : 5100 : + (GET_MODE_MASK (tmode) >> 1) + 1)
13041 : 5100 : <= GET_MODE_MASK (tmode)))
13042 : : {
13043 : 5100 : op0 = gen_lowpart (tmode, XEXP (XEXP (op0, 0), 0));
13044 : 5100 : continue;
13045 : : }
13046 : :
13047 : : /* Likewise if OP0 is a PLUS of a sign extension with a
13048 : : constant, which is usually represented with the PLUS
13049 : : between the shifts. */
13050 : 16483 : if (! unsigned_comparison_p
13051 : 13946 : && CONST_INT_P (XEXP (op0, 1))
13052 : 13346 : && GET_CODE (XEXP (op0, 0)) == PLUS
13053 : 54 : && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
13054 : 22 : && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
13055 : 2 : && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
13056 : 0 : && (int_mode_for_size (mode_width - INTVAL (XEXP (op0, 1)), 1)
13057 : 16483 : .exists (&tmode))
13058 : 16483 : && (((unsigned HOST_WIDE_INT) const_op
13059 : 0 : + (GET_MODE_MASK (tmode) >> 1) + 1)
13060 : 0 : <= GET_MODE_MASK (tmode)))
13061 : : {
13062 : 0 : rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0);
13063 : 0 : rtx add_const = XEXP (XEXP (op0, 0), 1);
13064 : 0 : rtx new_const = simplify_gen_binary (ASHIFTRT, mode,
13065 : : add_const, XEXP (op0, 1));
13066 : :
13067 : 0 : op0 = simplify_gen_binary (PLUS, tmode,
13068 : 0 : gen_lowpart (tmode, inner),
13069 : : new_const);
13070 : 0 : continue;
13071 : 0 : }
13072 : :
13073 : : /* FALLTHROUGH */
13074 : 133123 : case LSHIFTRT:
13075 : : /* If we have (compare (xshiftrt FOO N) (const_int C)) and
13076 : : the low order N bits of FOO are known to be zero, we can do this
13077 : : by comparing FOO with C shifted left N bits so long as no
13078 : : overflow occurs. Even if the low order N bits of FOO aren't known
13079 : : to be zero, if the comparison is >= or < we can use the same
13080 : : optimization and for > or <= by setting all the low
13081 : : order N bits in the comparison constant. */
13082 : 133123 : if (CONST_INT_P (XEXP (op0, 1))
13083 : 128671 : && INTVAL (XEXP (op0, 1)) > 0
13084 : 128671 : && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
13085 : 128311 : && mode_width <= HOST_BITS_PER_WIDE_INT
13086 : 133123 : && (((unsigned HOST_WIDE_INT) const_op
13087 : 255100 : + (GET_CODE (op0) != LSHIFTRT
13088 : 127550 : ? ((GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1)) >> 1)
13089 : : + 1)
13090 : : : 0))
13091 : 127550 : <= GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1))))
13092 : : {
13093 : 127400 : unsigned HOST_WIDE_INT low_bits
13094 : 127400 : = (nonzero_bits (XEXP (op0, 0), mode)
13095 : 127400 : & ((HOST_WIDE_INT_1U
13096 : 127400 : << INTVAL (XEXP (op0, 1))) - 1));
13097 : 127400 : if (low_bits == 0 || !equality_comparison_p)
13098 : : {
13099 : : /* If the shift was logical, then we must make the condition
13100 : : unsigned. */
13101 : 22052 : if (GET_CODE (op0) == LSHIFTRT)
13102 : 17980 : code = unsigned_condition (code);
13103 : :
13104 : 22052 : const_op = (unsigned HOST_WIDE_INT) const_op
13105 : 22052 : << INTVAL (XEXP (op0, 1));
13106 : 22052 : if (low_bits != 0
13107 : 3799 : && (code == GT || code == GTU
13108 : 926 : || code == LE || code == LEU))
13109 : 3731 : const_op
13110 : 3731 : |= ((HOST_WIDE_INT_1 << INTVAL (XEXP (op0, 1))) - 1);
13111 : 22052 : op1 = GEN_INT (const_op);
13112 : 22052 : op0 = XEXP (op0, 0);
13113 : 22052 : continue;
13114 : : }
13115 : : }
13116 : :
13117 : : /* If we are using this shift to extract just the sign bit, we
13118 : : can replace this with an LT or GE comparison. */
13119 : 111071 : if (const_op == 0
13120 : 94470 : && (equality_comparison_p || sign_bit_comparison_p)
13121 : 94434 : && CONST_INT_P (XEXP (op0, 1))
13122 : 90197 : && UINTVAL (XEXP (op0, 1)) == mode_width - 1)
13123 : : {
13124 : 47322 : op0 = XEXP (op0, 0);
13125 : 47322 : code = (code == NE || code == GT ? LT : GE);
13126 : 47322 : continue;
13127 : : }
13128 : : break;
13129 : :
13130 : : default:
13131 : : break;
13132 : : }
13133 : :
13134 : : break;
13135 : : }
13136 : :
13137 : : /* Now make any compound operations involved in this comparison. Then,
13138 : : check for an outermost SUBREG on OP0 that is not doing anything or is
13139 : : paradoxical. The latter transformation must only be performed when
13140 : : it is known that the "extra" bits will be the same in op0 and op1 or
13141 : : that they don't matter. There are three cases to consider:
13142 : :
13143 : : 1. SUBREG_REG (op0) is a register. In this case the bits are don't
13144 : : care bits and we can assume they have any convenient value. So
13145 : : making the transformation is safe.
13146 : :
13147 : : 2. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is UNKNOWN.
13148 : : In this case the upper bits of op0 are undefined. We should not make
13149 : : the simplification in that case as we do not know the contents of
13150 : : those bits.
13151 : :
13152 : : 3. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is not UNKNOWN.
13153 : : In that case we know those bits are zeros or ones. We must also be
13154 : : sure that they are the same as the upper bits of op1.
13155 : :
13156 : : We can never remove a SUBREG for a non-equality comparison because
13157 : : the sign bit is in a different place in the underlying object. */
13158 : :
13159 : 23676227 : rtx_code op0_mco_code = SET;
13160 : 23676227 : if (op1 == const0_rtx)
13161 : 10965687 : op0_mco_code = code == NE || code == EQ ? EQ : COMPARE;
13162 : :
13163 : 23676227 : op0 = make_compound_operation (op0, op0_mco_code);
13164 : 23676227 : op1 = make_compound_operation (op1, SET);
13165 : :
13166 : 436510 : if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
13167 : 404036 : && is_int_mode (GET_MODE (op0), &mode)
13168 : 379337 : && is_int_mode (GET_MODE (SUBREG_REG (op0)), &inner_mode)
13169 : 24052354 : && (code == NE || code == EQ))
13170 : : {
13171 : 212334 : if (paradoxical_subreg_p (op0))
13172 : : {
13173 : : /* For paradoxical subregs, allow case 1 as above. Case 3 isn't
13174 : : implemented. */
13175 : 0 : if (REG_P (SUBREG_REG (op0)))
13176 : : {
13177 : 0 : op0 = SUBREG_REG (op0);
13178 : 0 : op1 = gen_lowpart (inner_mode, op1);
13179 : : }
13180 : : }
13181 : 212334 : else if (GET_MODE_PRECISION (inner_mode) <= HOST_BITS_PER_WIDE_INT
13182 : 212334 : && (nonzero_bits (SUBREG_REG (op0), inner_mode)
13183 : 205445 : & ~GET_MODE_MASK (mode)) == 0)
13184 : : {
13185 : 32344 : tem = gen_lowpart (inner_mode, op1);
13186 : :
13187 : 32344 : if ((nonzero_bits (tem, inner_mode) & ~GET_MODE_MASK (mode)) == 0)
13188 : 23744 : op0 = SUBREG_REG (op0), op1 = tem;
13189 : : }
13190 : : }
13191 : :
13192 : : /* We now do the opposite procedure: Some machines don't have compare
13193 : : insns in all modes. If OP0's mode is an integer mode smaller than a
13194 : : word and we can't do a compare in that mode, see if there is a larger
13195 : : mode for which we can do the compare. There are a number of cases in
13196 : : which we can use the wider mode. */
13197 : :
13198 : 23676227 : if (is_int_mode (GET_MODE (op0), &mode)
13199 : 24463589 : && GET_MODE_SIZE (mode) < UNITS_PER_WORD
13200 : 8598289 : && ! have_insn_for (COMPARE, mode))
13201 : 0 : FOR_EACH_WIDER_MODE (tmode_iter, mode)
13202 : : {
13203 : 0 : tmode = tmode_iter.require ();
13204 : 0 : if (!HWI_COMPUTABLE_MODE_P (tmode))
13205 : : break;
13206 : 0 : if (have_insn_for (COMPARE, tmode))
13207 : : {
13208 : 0 : int zero_extended;
13209 : :
13210 : : /* If this is a test for negative, we can make an explicit
13211 : : test of the sign bit. Test this first so we can use
13212 : : a paradoxical subreg to extend OP0. */
13213 : :
13214 : 0 : if (op1 == const0_rtx && (code == LT || code == GE)
13215 : 0 : && HWI_COMPUTABLE_MODE_P (mode))
13216 : : {
13217 : 0 : unsigned HOST_WIDE_INT sign
13218 : 0 : = HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (mode) - 1);
13219 : 0 : op0 = simplify_gen_binary (AND, tmode,
13220 : 0 : gen_lowpart (tmode, op0),
13221 : 0 : gen_int_mode (sign, tmode));
13222 : 0 : code = (code == LT) ? NE : EQ;
13223 : : break;
13224 : : }
13225 : :
13226 : : /* If the only nonzero bits in OP0 and OP1 are those in the
13227 : : narrower mode and this is an equality or unsigned comparison,
13228 : : we can use the wider mode. Similarly for sign-extended
13229 : : values, in which case it is true for all comparisons. */
13230 : 0 : zero_extended = ((code == EQ || code == NE
13231 : 0 : || code == GEU || code == GTU
13232 : 0 : || code == LEU || code == LTU)
13233 : 0 : && (nonzero_bits (op0, tmode)
13234 : 0 : & ~GET_MODE_MASK (mode)) == 0
13235 : 0 : && ((CONST_INT_P (op1)
13236 : 0 : || (nonzero_bits (op1, tmode)
13237 : 0 : & ~GET_MODE_MASK (mode)) == 0)));
13238 : :
13239 : 0 : if (zero_extended
13240 : 0 : || ((num_sign_bit_copies (op0, tmode)
13241 : 0 : > (unsigned int) (GET_MODE_PRECISION (tmode)
13242 : 0 : - GET_MODE_PRECISION (mode)))
13243 : 0 : && (num_sign_bit_copies (op1, tmode)
13244 : 0 : > (unsigned int) (GET_MODE_PRECISION (tmode)
13245 : 0 : - GET_MODE_PRECISION (mode)))))
13246 : : {
13247 : : /* If OP0 is an AND and we don't have an AND in MODE either,
13248 : : make a new AND in the proper mode. */
13249 : 0 : if (GET_CODE (op0) == AND
13250 : 0 : && !have_insn_for (AND, mode))
13251 : 0 : op0 = simplify_gen_binary (AND, tmode,
13252 : 0 : gen_lowpart (tmode,
13253 : : XEXP (op0, 0)),
13254 : 0 : gen_lowpart (tmode,
13255 : : XEXP (op0, 1)));
13256 : : else
13257 : : {
13258 : 0 : if (zero_extended)
13259 : : {
13260 : 0 : op0 = simplify_gen_unary (ZERO_EXTEND, tmode,
13261 : : op0, mode);
13262 : 0 : op1 = simplify_gen_unary (ZERO_EXTEND, tmode,
13263 : : op1, mode);
13264 : : }
13265 : : else
13266 : : {
13267 : 0 : op0 = simplify_gen_unary (SIGN_EXTEND, tmode,
13268 : : op0, mode);
13269 : 0 : op1 = simplify_gen_unary (SIGN_EXTEND, tmode,
13270 : : op1, mode);
13271 : : }
13272 : : break;
13273 : : }
13274 : : }
13275 : : }
13276 : : }
13277 : :
13278 : : /* We may have changed the comparison operands. Re-canonicalize. */
13279 : 23676227 : if (swap_commutative_operands_p (op0, op1))
13280 : : {
13281 : 86333 : std::swap (op0, op1);
13282 : 86333 : code = swap_condition (code);
13283 : : }
13284 : :
13285 : : /* If this machine only supports a subset of valid comparisons, see if we
13286 : : can convert an unsupported one into a supported one. */
13287 : 23676227 : target_canonicalize_comparison (&code, &op0, &op1, 0);
13288 : :
13289 : 23676227 : *pop0 = op0;
13290 : 23676227 : *pop1 = op1;
13291 : :
13292 : 23676227 : return code;
13293 : : }
13294 : :
13295 : : /* Utility function for record_value_for_reg. Count number of
13296 : : rtxs in X. */
13297 : : static int
13298 : 1938 : count_rtxs (rtx x)
13299 : : {
13300 : 1938 : enum rtx_code code = GET_CODE (x);
13301 : 1938 : const char *fmt;
13302 : 1938 : int i, j, ret = 1;
13303 : :
13304 : 1938 : if (GET_RTX_CLASS (code) == RTX_BIN_ARITH
13305 : 1938 : || GET_RTX_CLASS (code) == RTX_COMM_ARITH)
13306 : : {
13307 : 78 : rtx x0 = XEXP (x, 0);
13308 : 78 : rtx x1 = XEXP (x, 1);
13309 : :
13310 : 78 : if (x0 == x1)
13311 : 0 : return 1 + 2 * count_rtxs (x0);
13312 : :
13313 : 78 : if ((GET_RTX_CLASS (GET_CODE (x1)) == RTX_BIN_ARITH
13314 : 78 : || GET_RTX_CLASS (GET_CODE (x1)) == RTX_COMM_ARITH)
13315 : 4 : && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
13316 : 0 : return 2 + 2 * count_rtxs (x0)
13317 : 0 : + count_rtxs (x == XEXP (x1, 0)
13318 : 0 : ? XEXP (x1, 1) : XEXP (x1, 0));
13319 : :
13320 : 78 : if ((GET_RTX_CLASS (GET_CODE (x0)) == RTX_BIN_ARITH
13321 : 78 : || GET_RTX_CLASS (GET_CODE (x0)) == RTX_COMM_ARITH)
13322 : 8 : && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
13323 : 0 : return 2 + 2 * count_rtxs (x1)
13324 : 0 : + count_rtxs (x == XEXP (x0, 0)
13325 : 0 : ? XEXP (x0, 1) : XEXP (x0, 0));
13326 : : }
13327 : :
13328 : 1938 : fmt = GET_RTX_FORMAT (code);
13329 : 4666 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
13330 : 2728 : if (fmt[i] == 'e')
13331 : 1109 : ret += count_rtxs (XEXP (x, i));
13332 : 1619 : else if (fmt[i] == 'E')
13333 : 280 : for (j = 0; j < XVECLEN (x, i); j++)
13334 : 220 : ret += count_rtxs (XVECEXP (x, i, j));
13335 : :
13336 : : return ret;
13337 : : }
13338 : :
13339 : : /* Utility function for following routine. Called when X is part of a value
13340 : : being stored into last_set_value. Sets last_set_table_tick
13341 : : for each register mentioned. Similar to mention_regs in cse.cc */
13342 : :
13343 : : static void
13344 : 242694005 : update_table_tick (rtx x)
13345 : : {
13346 : 243329239 : enum rtx_code code = GET_CODE (x);
13347 : 243329239 : const char *fmt = GET_RTX_FORMAT (code);
13348 : 243329239 : int i, j;
13349 : :
13350 : 243329239 : if (code == REG)
13351 : : {
13352 : 81947216 : unsigned int regno = REGNO (x);
13353 : 81947216 : unsigned int endregno = END_REGNO (x);
13354 : 81947216 : unsigned int r;
13355 : :
13356 : 164007192 : for (r = regno; r < endregno; r++)
13357 : : {
13358 : 82059976 : reg_stat_type *rsp = ®_stat[r];
13359 : 82059976 : rsp->last_set_table_tick = label_tick;
13360 : : }
13361 : :
13362 : : return;
13363 : : }
13364 : :
13365 : 416161536 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
13366 : 255536013 : if (fmt[i] == 'e')
13367 : : {
13368 : : /* Check for identical subexpressions. If x contains
13369 : : identical subexpression we only have to traverse one of
13370 : : them. */
13371 : 150592941 : if (i == 0 && ARITHMETIC_P (x))
13372 : : {
13373 : : /* Note that at this point x1 has already been
13374 : : processed. */
13375 : 58673494 : rtx x0 = XEXP (x, 0);
13376 : 58673494 : rtx x1 = XEXP (x, 1);
13377 : :
13378 : : /* If x0 and x1 are identical then there is no need to
13379 : : process x0. */
13380 : 58673494 : if (x0 == x1)
13381 : : break;
13382 : :
13383 : : /* If x0 is identical to a subexpression of x1 then while
13384 : : processing x1, x0 has already been processed. Thus we
13385 : : are done with x. */
13386 : 58552371 : if (ARITHMETIC_P (x1)
13387 : 406079 : && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
13388 : : break;
13389 : :
13390 : : /* If x1 is identical to a subexpression of x0 then we
13391 : : still have to process the rest of x0. */
13392 : 58552228 : if (ARITHMETIC_P (x0)
13393 : 16076079 : && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
13394 : : {
13395 : 635234 : update_table_tick (XEXP (x0, x1 == XEXP (x0, 0) ? 1 : 0));
13396 : 635234 : break;
13397 : : }
13398 : : }
13399 : :
13400 : 149836441 : update_table_tick (XEXP (x, i));
13401 : : }
13402 : 104943072 : else if (fmt[i] == 'E')
13403 : 9612552 : for (j = 0; j < XVECLEN (x, i); j++)
13404 : 7047975 : update_table_tick (XVECEXP (x, i, j));
13405 : : }
13406 : :
13407 : : /* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
13408 : : are saying that the register is clobbered and we no longer know its
13409 : : value. If INSN is zero, don't update reg_stat[].last_set; this is
13410 : : only permitted with VALUE also zero and is used to invalidate the
13411 : : register. */
13412 : :
13413 : : static void
13414 : 113237313 : record_value_for_reg (rtx reg, rtx_insn *insn, rtx value)
13415 : : {
13416 : 113237313 : unsigned int regno = REGNO (reg);
13417 : 113237313 : unsigned int endregno = END_REGNO (reg);
13418 : 113237313 : unsigned int i;
13419 : 113237313 : reg_stat_type *rsp;
13420 : :
13421 : : /* If VALUE contains REG and we have a previous value for REG, substitute
13422 : : the previous value. */
13423 : 113237313 : if (value && insn && reg_overlap_mentioned_p (reg, value))
13424 : : {
13425 : 6127733 : rtx tem;
13426 : :
13427 : : /* Set things up so get_last_value is allowed to see anything set up to
13428 : : our insn. */
13429 : 6127733 : subst_low_luid = DF_INSN_LUID (insn);
13430 : 6127733 : tem = get_last_value (reg);
13431 : :
13432 : : /* If TEM is simply a binary operation with two CLOBBERs as operands,
13433 : : it isn't going to be useful and will take a lot of time to process,
13434 : : so just use the CLOBBER. */
13435 : :
13436 : 6127733 : if (tem)
13437 : : {
13438 : 2450985 : if (ARITHMETIC_P (tem)
13439 : 2229497 : && GET_CODE (XEXP (tem, 0)) == CLOBBER
13440 : 1061061 : && GET_CODE (XEXP (tem, 1)) == CLOBBER)
13441 : : tem = XEXP (tem, 0);
13442 : 2449773 : else if (count_occurrences (value, reg, 1) >= 2)
13443 : : {
13444 : : /* If there are two or more occurrences of REG in VALUE,
13445 : : prevent the value from growing too much. */
13446 : 609 : if (count_rtxs (tem) > param_max_last_value_rtl)
13447 : 0 : tem = gen_rtx_CLOBBER (GET_MODE (tem), const0_rtx);
13448 : : }
13449 : :
13450 : 2450985 : value = replace_rtx (copy_rtx (value), reg, tem);
13451 : : }
13452 : : }
13453 : :
13454 : : /* For each register modified, show we don't know its value, that
13455 : : we don't know about its bitwise content, that its value has been
13456 : : updated, and that we don't know the location of the death of the
13457 : : register. */
13458 : 226825259 : for (i = regno; i < endregno; i++)
13459 : : {
13460 : 113587946 : rsp = ®_stat[i];
13461 : :
13462 : 113587946 : if (insn)
13463 : 103456665 : rsp->last_set = insn;
13464 : :
13465 : 113587946 : rsp->last_set_value = 0;
13466 : 113587946 : rsp->last_set_mode = VOIDmode;
13467 : 113587946 : rsp->last_set_nonzero_bits = 0;
13468 : 113587946 : rsp->last_set_sign_bit_copies = 0;
13469 : 113587946 : rsp->last_death = 0;
13470 : 113587946 : rsp->truncated_to_mode = VOIDmode;
13471 : : }
13472 : :
13473 : : /* Mark registers that are being referenced in this value. */
13474 : 113237313 : if (value)
13475 : 85809589 : update_table_tick (value);
13476 : :
13477 : : /* Now update the status of each register being set.
13478 : : If someone is using this register in this block, set this register
13479 : : to invalid since we will get confused between the two lives in this
13480 : : basic block. This makes using this register always invalid. In cse, we
13481 : : scan the table to invalidate all entries using this register, but this
13482 : : is too much work for us. */
13483 : :
13484 : 226825259 : for (i = regno; i < endregno; i++)
13485 : : {
13486 : 113587946 : rsp = ®_stat[i];
13487 : 113587946 : rsp->last_set_label = label_tick;
13488 : 113587946 : if (!insn
13489 : 103456665 : || (value && rsp->last_set_table_tick >= label_tick_ebb_start))
13490 : 20470324 : rsp->last_set_invalid = true;
13491 : : else
13492 : 93117622 : rsp->last_set_invalid = false;
13493 : : }
13494 : :
13495 : : /* The value being assigned might refer to X (like in "x++;"). In that
13496 : : case, we must replace it with (clobber (const_int 0)) to prevent
13497 : : infinite loops. */
13498 : 113237313 : rsp = ®_stat[regno];
13499 : 113237313 : if (value && !get_last_value_validate (&value, insn, label_tick, false))
13500 : : {
13501 : 11100908 : value = copy_rtx (value);
13502 : 11100908 : if (!get_last_value_validate (&value, insn, label_tick, true))
13503 : 0 : value = 0;
13504 : : }
13505 : :
13506 : : /* For the main register being modified, update the value, the mode, the
13507 : : nonzero bits, and the number of sign bit copies. */
13508 : :
13509 : 113237313 : rsp->last_set_value = value;
13510 : :
13511 : 113237313 : if (value)
13512 : : {
13513 : 85809589 : machine_mode mode = GET_MODE (reg);
13514 : 85809589 : subst_low_luid = DF_INSN_LUID (insn);
13515 : 85809589 : rsp->last_set_mode = mode;
13516 : 85809589 : if (GET_MODE_CLASS (mode) == MODE_INT
13517 : 85809589 : && HWI_COMPUTABLE_MODE_P (mode))
13518 : 64470684 : mode = nonzero_bits_mode;
13519 : 85809589 : rsp->last_set_nonzero_bits = nonzero_bits (value, mode);
13520 : 85809589 : rsp->last_set_sign_bit_copies
13521 : 85809589 : = num_sign_bit_copies (value, GET_MODE (reg));
13522 : : }
13523 : 113237313 : }
13524 : :
13525 : : /* Called via note_stores from record_dead_and_set_regs to handle one
13526 : : SET or CLOBBER in an insn. DATA is the instruction in which the
13527 : : set is occurring. */
13528 : :
13529 : : static void
13530 : 135374963 : record_dead_and_set_regs_1 (rtx dest, const_rtx setter, void *data)
13531 : : {
13532 : 135374963 : rtx_insn *record_dead_insn = (rtx_insn *) data;
13533 : :
13534 : 135374963 : if (GET_CODE (dest) == SUBREG)
13535 : 5 : dest = SUBREG_REG (dest);
13536 : :
13537 : 135374963 : if (!record_dead_insn)
13538 : : {
13539 : 5005678 : if (REG_P (dest))
13540 : 5005678 : record_value_for_reg (dest, NULL, NULL_RTX);
13541 : 5005678 : return;
13542 : : }
13543 : :
13544 : 130369285 : if (REG_P (dest))
13545 : : {
13546 : : /* If we are setting the whole register, we know its value. */
13547 : 103287842 : if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
13548 : 85660226 : record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
13549 : : /* We can handle a SUBREG if it's the low part, but we must be
13550 : : careful with paradoxical SUBREGs on RISC architectures because
13551 : : we cannot strip e.g. an extension around a load and record the
13552 : : naked load since the RTL middle-end considers that the upper bits
13553 : : are defined according to LOAD_EXTEND_OP. */
13554 : 17627616 : else if (GET_CODE (setter) == SET
13555 : 579100 : && GET_CODE (SET_DEST (setter)) == SUBREG
13556 : 567597 : && SUBREG_REG (SET_DEST (setter)) == dest
13557 : 919151 : && known_le (GET_MODE_PRECISION (GET_MODE (dest)),
13558 : : BITS_PER_WORD)
13559 : 17732043 : && subreg_lowpart_p (SET_DEST (setter)))
13560 : : {
13561 : 104427 : if (WORD_REGISTER_OPERATIONS
13562 : : && word_register_operation_p (SET_SRC (setter))
13563 : : && paradoxical_subreg_p (SET_DEST (setter)))
13564 : : record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
13565 : 104427 : else if (!partial_subreg_p (SET_DEST (setter)))
13566 : 92429 : record_value_for_reg (dest, record_dead_insn,
13567 : 92429 : gen_lowpart (GET_MODE (dest),
13568 : 92429 : SET_SRC (setter)));
13569 : : else
13570 : : {
13571 : 11998 : record_value_for_reg (dest, record_dead_insn,
13572 : 11998 : gen_lowpart (GET_MODE (dest),
13573 : 11998 : SET_SRC (setter)));
13574 : :
13575 : 11998 : unsigned HOST_WIDE_INT mask;
13576 : 11998 : reg_stat_type *rsp = ®_stat[REGNO (dest)];
13577 : 11998 : mask = GET_MODE_MASK (GET_MODE (SET_DEST (setter)));
13578 : 11998 : rsp->last_set_nonzero_bits |= ~mask;
13579 : 11998 : rsp->last_set_sign_bit_copies = 1;
13580 : : }
13581 : : }
13582 : : /* Otherwise show that we don't know the value. */
13583 : : else
13584 : 17523189 : record_value_for_reg (dest, record_dead_insn, NULL_RTX);
13585 : : }
13586 : 27081443 : else if (MEM_P (dest)
13587 : : /* Ignore pushes, they clobber nothing. */
13588 : 27081443 : && ! push_operand (dest, GET_MODE (dest)))
13589 : 13875012 : mem_last_set = DF_INSN_LUID (record_dead_insn);
13590 : : }
13591 : :
13592 : : /* Update the records of when each REG was most recently set or killed
13593 : : for the things done by INSN. This is the last thing done in processing
13594 : : INSN in the combiner loop.
13595 : :
13596 : : We update reg_stat[], in particular fields last_set, last_set_value,
13597 : : last_set_mode, last_set_nonzero_bits, last_set_sign_bit_copies,
13598 : : last_death, and also the similar information mem_last_set (which insn
13599 : : most recently modified memory) and last_call_luid (which insn was the
13600 : : most recent subroutine call). */
13601 : :
13602 : : static void
13603 : 172439329 : record_dead_and_set_regs (rtx_insn *insn)
13604 : : {
13605 : 172439329 : rtx link;
13606 : 172439329 : unsigned int i;
13607 : :
13608 : 308043566 : for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
13609 : : {
13610 : 135604237 : if (REG_NOTE_KIND (link) == REG_DEAD
13611 : 77579560 : && REG_P (XEXP (link, 0)))
13612 : : {
13613 : 77579560 : unsigned int regno = REGNO (XEXP (link, 0));
13614 : 77579560 : unsigned int endregno = END_REGNO (XEXP (link, 0));
13615 : :
13616 : 155355051 : for (i = regno; i < endregno; i++)
13617 : : {
13618 : 77775491 : reg_stat_type *rsp;
13619 : :
13620 : 77775491 : rsp = ®_stat[i];
13621 : 77775491 : rsp->last_death = insn;
13622 : : }
13623 : : }
13624 : 58024677 : else if (REG_NOTE_KIND (link) == REG_INC)
13625 : 0 : record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
13626 : : }
13627 : :
13628 : 172439329 : if (CALL_P (insn))
13629 : : {
13630 : 9289285 : HARD_REG_SET callee_clobbers
13631 : 9289285 : = insn_callee_abi (insn).full_and_partial_reg_clobbers ();
13632 : 9289285 : hard_reg_set_iterator hrsi;
13633 : 766918290 : EXECUTE_IF_SET_IN_HARD_REG_SET (callee_clobbers, 0, i, hrsi)
13634 : : {
13635 : 757629005 : reg_stat_type *rsp;
13636 : :
13637 : : /* ??? We could try to preserve some information from the last
13638 : : set of register I if the call doesn't actually clobber
13639 : : (reg:last_set_mode I), which might be true for ABIs with
13640 : : partial clobbers. However, it would be difficult to
13641 : : update last_set_nonzero_bits and last_sign_bit_copies
13642 : : to account for the part of I that actually was clobbered.
13643 : : It wouldn't help much anyway, since we rarely see this
13644 : : situation before RA. */
13645 : 757629005 : rsp = ®_stat[i];
13646 : 757629005 : rsp->last_set_invalid = true;
13647 : 757629005 : rsp->last_set = insn;
13648 : 757629005 : rsp->last_set_value = 0;
13649 : 757629005 : rsp->last_set_mode = VOIDmode;
13650 : 757629005 : rsp->last_set_nonzero_bits = 0;
13651 : 757629005 : rsp->last_set_sign_bit_copies = 0;
13652 : 757629005 : rsp->last_death = 0;
13653 : 757629005 : rsp->truncated_to_mode = VOIDmode;
13654 : : }
13655 : :
13656 : 9289285 : last_call_luid = mem_last_set = DF_INSN_LUID (insn);
13657 : :
13658 : : /* We can't combine into a call pattern. Remember, though, that
13659 : : the return value register is set at this LUID. We could
13660 : : still replace a register with the return value from the
13661 : : wrong subroutine call! */
13662 : 9289285 : note_stores (insn, record_dead_and_set_regs_1, NULL_RTX);
13663 : : }
13664 : : else
13665 : 163150044 : note_stores (insn, record_dead_and_set_regs_1, insn);
13666 : 172439329 : }
13667 : :
13668 : : /* If a SUBREG has the promoted bit set, it is in fact a property of the
13669 : : register present in the SUBREG, so for each such SUBREG go back and
13670 : : adjust nonzero and sign bit information of the registers that are
13671 : : known to have some zero/sign bits set.
13672 : :
13673 : : This is needed because when combine blows the SUBREGs away, the
13674 : : information on zero/sign bits is lost and further combines can be
13675 : : missed because of that. */
13676 : :
13677 : : static void
13678 : 5827 : record_promoted_value (rtx_insn *insn, rtx subreg)
13679 : : {
13680 : 5827 : struct insn_link *links;
13681 : 5827 : rtx set;
13682 : 5827 : unsigned int regno = REGNO (SUBREG_REG (subreg));
13683 : 5827 : machine_mode mode = GET_MODE (subreg);
13684 : :
13685 : 5827 : if (!HWI_COMPUTABLE_MODE_P (mode))
13686 : : return;
13687 : :
13688 : 6437 : for (links = LOG_LINKS (insn); links;)
13689 : : {
13690 : 5777 : reg_stat_type *rsp;
13691 : :
13692 : 5777 : insn = links->insn;
13693 : 5777 : set = single_set (insn);
13694 : :
13695 : 5777 : if (! set || !REG_P (SET_DEST (set))
13696 : 5777 : || REGNO (SET_DEST (set)) != regno
13697 : 11080 : || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg)))
13698 : : {
13699 : 474 : links = links->next;
13700 : 474 : continue;
13701 : : }
13702 : :
13703 : 5303 : rsp = ®_stat[regno];
13704 : 5303 : if (rsp->last_set == insn)
13705 : : {
13706 : 5303 : if (SUBREG_PROMOTED_UNSIGNED_P (subreg))
13707 : 5303 : rsp->last_set_nonzero_bits &= GET_MODE_MASK (mode);
13708 : : }
13709 : :
13710 : 5303 : if (REG_P (SET_SRC (set)))
13711 : : {
13712 : 136 : regno = REGNO (SET_SRC (set));
13713 : 136 : links = LOG_LINKS (insn);
13714 : : }
13715 : : else
13716 : : break;
13717 : : }
13718 : : }
13719 : :
13720 : : /* Check if X, a register, is known to contain a value already
13721 : : truncated to MODE. In this case we can use a subreg to refer to
13722 : : the truncated value even though in the generic case we would need
13723 : : an explicit truncation. */
13724 : :
13725 : : static bool
13726 : 0 : reg_truncated_to_mode (machine_mode mode, const_rtx x)
13727 : : {
13728 : 0 : reg_stat_type *rsp = ®_stat[REGNO (x)];
13729 : 0 : machine_mode truncated = rsp->truncated_to_mode;
13730 : :
13731 : 0 : if (truncated == 0
13732 : 0 : || rsp->truncation_label < label_tick_ebb_start)
13733 : : return false;
13734 : 0 : if (!partial_subreg_p (mode, truncated))
13735 : : return true;
13736 : 0 : if (TRULY_NOOP_TRUNCATION_MODES_P (mode, truncated))
13737 : : return true;
13738 : : return false;
13739 : : }
13740 : :
13741 : : /* If X is a hard reg or a subreg record the mode that the register is
13742 : : accessed in. For non-TARGET_TRULY_NOOP_TRUNCATION targets we might be
13743 : : able to turn a truncate into a subreg using this information. Return true
13744 : : if traversing X is complete. */
13745 : :
13746 : : static bool
13747 : 198196501 : record_truncated_value (rtx x)
13748 : : {
13749 : 198196501 : machine_mode truncated_mode;
13750 : 198196501 : reg_stat_type *rsp;
13751 : :
13752 : 198196501 : if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x)))
13753 : : {
13754 : 1770358 : machine_mode original_mode = GET_MODE (SUBREG_REG (x));
13755 : 1770358 : truncated_mode = GET_MODE (x);
13756 : :
13757 : 1770358 : if (!partial_subreg_p (truncated_mode, original_mode))
13758 : : return true;
13759 : :
13760 : 1056995 : truncated_mode = GET_MODE (x);
13761 : 1056995 : if (TRULY_NOOP_TRUNCATION_MODES_P (truncated_mode, original_mode))
13762 : : return true;
13763 : :
13764 : 0 : x = SUBREG_REG (x);
13765 : 0 : }
13766 : : /* ??? For hard-regs we now record everything. We might be able to
13767 : : optimize this using last_set_mode. */
13768 : 196426143 : else if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
13769 : 20833330 : truncated_mode = GET_MODE (x);
13770 : : else
13771 : : return false;
13772 : :
13773 : 20833330 : rsp = ®_stat[REGNO (x)];
13774 : 20833330 : if (rsp->truncated_to_mode == 0
13775 : 9639311 : || rsp->truncation_label < label_tick_ebb_start
13776 : 29229157 : || partial_subreg_p (truncated_mode, rsp->truncated_to_mode))
13777 : : {
13778 : 12438096 : rsp->truncated_to_mode = truncated_mode;
13779 : 12438096 : rsp->truncation_label = label_tick;
13780 : : }
13781 : :
13782 : : return true;
13783 : : }
13784 : :
13785 : : /* Callback for note_uses. Find hardregs and subregs of pseudos and
13786 : : the modes they are used in. This can help turning TRUNCATEs into
13787 : : SUBREGs. */
13788 : :
13789 : : static void
13790 : 75645647 : record_truncated_values (rtx *loc, void *data ATTRIBUTE_UNUSED)
13791 : : {
13792 : 75645647 : subrtx_var_iterator::array_type array;
13793 : 273842148 : FOR_EACH_SUBRTX_VAR (iter, array, *loc, NONCONST)
13794 : 198196501 : if (record_truncated_value (*iter))
13795 : 22603688 : iter.skip_subrtxes ();
13796 : 75645647 : }
13797 : :
13798 : : /* Scan X for promoted SUBREGs. For each one found,
13799 : : note what it implies to the registers used in it. */
13800 : :
13801 : : static void
13802 : 360235704 : check_promoted_subreg (rtx_insn *insn, rtx x)
13803 : : {
13804 : 360235704 : if (GET_CODE (x) == SUBREG
13805 : 2131382 : && SUBREG_PROMOTED_VAR_P (x)
13806 : 360241531 : && REG_P (SUBREG_REG (x)))
13807 : 5827 : record_promoted_value (insn, x);
13808 : : else
13809 : : {
13810 : 360229877 : const char *format = GET_RTX_FORMAT (GET_CODE (x));
13811 : 360229877 : int i, j;
13812 : :
13813 : 866913760 : for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
13814 : 506683883 : switch (format[i])
13815 : : {
13816 : 269738613 : case 'e':
13817 : 269738613 : check_promoted_subreg (insn, XEXP (x, i));
13818 : 269738613 : break;
13819 : 11684562 : case 'V':
13820 : 11684562 : case 'E':
13821 : 11684562 : if (XVEC (x, i) != 0)
13822 : 36138646 : for (j = 0; j < XVECLEN (x, i); j++)
13823 : 24454084 : check_promoted_subreg (insn, XVECEXP (x, i, j));
13824 : : break;
13825 : : }
13826 : : }
13827 : 360235704 : }
13828 : :
13829 : : /* Verify that all the registers and memory references mentioned in *LOC are
13830 : : still valid. *LOC was part of a value set in INSN when label_tick was
13831 : : equal to TICK. Return false if some are not. If REPLACE is true, replace
13832 : : the invalid references with (clobber (const_int 0)) and return true. This
13833 : : replacement is useful because we often can get useful information about
13834 : : the form of a value (e.g., if it was produced by a shift that always
13835 : : produces -1 or 0) even though we don't know exactly what registers it
13836 : : was produced from. */
13837 : :
13838 : : static bool
13839 : 488200596 : get_last_value_validate (rtx *loc, rtx_insn *insn, int tick, bool replace)
13840 : : {
13841 : 488200596 : rtx x = *loc;
13842 : 488200596 : const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
13843 : 488200596 : int len = GET_RTX_LENGTH (GET_CODE (x));
13844 : 488200596 : int i, j;
13845 : :
13846 : 488200596 : if (REG_P (x))
13847 : : {
13848 : 156242549 : unsigned int regno = REGNO (x);
13849 : 156242549 : unsigned int endregno = END_REGNO (x);
13850 : 156242549 : unsigned int j;
13851 : :
13852 : 288545328 : for (j = regno; j < endregno; j++)
13853 : : {
13854 : 156268548 : reg_stat_type *rsp = ®_stat[j];
13855 : 156268548 : if (rsp->last_set_invalid
13856 : : /* If this is a pseudo-register that was only set once and not
13857 : : live at the beginning of the function, it is always valid. */
13858 : 258268194 : || (! (regno >= FIRST_PSEUDO_REGISTER
13859 : 117649777 : && regno < reg_n_sets_max
13860 : 117615704 : && REG_N_SETS (regno) == 1
13861 : 203999292 : && (!REGNO_REG_SET_P
13862 : : (DF_LR_IN (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb),
13863 : : regno)))
13864 : 30607885 : && rsp->last_set_label > tick))
13865 : : {
13866 : 23965769 : if (replace)
13867 : 12380057 : *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
13868 : 23965769 : return replace;
13869 : : }
13870 : : }
13871 : :
13872 : : return true;
13873 : : }
13874 : : /* If this is a memory reference, make sure that there were no stores after
13875 : : it that might have clobbered the value. We don't have alias info, so we
13876 : : assume any store invalidates it. Moreover, we only have local UIDs, so
13877 : : we also assume that there were stores in the intervening basic blocks. */
13878 : 34184325 : else if (MEM_P (x) && !MEM_READONLY_P (x)
13879 : 364062294 : && (tick != label_tick || DF_INSN_LUID (insn) <= mem_last_set))
13880 : : {
13881 : 7442625 : if (replace)
13882 : 3723788 : *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
13883 : 7442625 : return replace;
13884 : : }
13885 : :
13886 : 810488372 : for (i = 0; i < len; i++)
13887 : : {
13888 : 497708851 : if (fmt[i] == 'e')
13889 : : {
13890 : : /* Check for identical subexpressions. If x contains
13891 : : identical subexpression we only have to traverse one of
13892 : : them. */
13893 : 309235572 : if (i == 1 && ARITHMETIC_P (x))
13894 : : {
13895 : : /* Note that at this point x0 has already been checked
13896 : : and found valid. */
13897 : 114576740 : rtx x0 = XEXP (x, 0);
13898 : 114576740 : rtx x1 = XEXP (x, 1);
13899 : :
13900 : : /* If x0 and x1 are identical then x is also valid. */
13901 : 114576740 : if (x0 == x1)
13902 : : return true;
13903 : :
13904 : : /* If x1 is identical to a subexpression of x0 then
13905 : : while checking x0, x1 has already been checked. Thus
13906 : : it is valid and so as x. */
13907 : 114197787 : if (ARITHMETIC_P (x0)
13908 : 32487942 : && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
13909 : : return true;
13910 : :
13911 : : /* If x0 is identical to a subexpression of x1 then x is
13912 : : valid iff the rest of x1 is valid. */
13913 : 112276159 : if (ARITHMETIC_P (x1)
13914 : 1259572 : && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
13915 : 452 : return
13916 : 492 : get_last_value_validate (&XEXP (x1,
13917 : : x0 == XEXP (x1, 0) ? 1 : 0),
13918 : 452 : insn, tick, replace);
13919 : : }
13920 : :
13921 : 306934539 : if (!get_last_value_validate (&XEXP (x, i), insn, tick, replace))
13922 : : return false;
13923 : : }
13924 : 188473279 : else if (fmt[i] == 'E')
13925 : 29081266 : for (j = 0; j < XVECLEN (x, i); j++)
13926 : 23051979 : if (!get_last_value_validate (&XVECEXP (x, i, j),
13927 : : insn, tick, replace))
13928 : : return false;
13929 : : }
13930 : :
13931 : : /* If we haven't found a reason for it to be invalid, it is valid. */
13932 : : return true;
13933 : : }
13934 : :
13935 : : /* Get the last value assigned to X, if known. Some registers
13936 : : in the value may be replaced with (clobber (const_int 0)) if their value
13937 : : is known longer known reliably. */
13938 : :
13939 : : static rtx
13940 : 222832724 : get_last_value (const_rtx x)
13941 : : {
13942 : 222832724 : unsigned int regno;
13943 : 222832724 : rtx value;
13944 : 222832724 : reg_stat_type *rsp;
13945 : :
13946 : : /* If this is a non-paradoxical SUBREG, get the value of its operand and
13947 : : then convert it to the desired mode. If this is a paradoxical SUBREG,
13948 : : we cannot predict what values the "extra" bits might have. */
13949 : 222832724 : if (GET_CODE (x) == SUBREG
13950 : 12065053 : && subreg_lowpart_p (x)
13951 : 11572339 : && !paradoxical_subreg_p (x)
13952 : 229543765 : && (value = get_last_value (SUBREG_REG (x))) != 0)
13953 : 3418712 : return gen_lowpart (GET_MODE (x), value);
13954 : :
13955 : 219414012 : if (!REG_P (x))
13956 : : return 0;
13957 : :
13958 : 191298327 : regno = REGNO (x);
13959 : 191298327 : rsp = ®_stat[regno];
13960 : 191298327 : value = rsp->last_set_value;
13961 : :
13962 : : /* If we don't have a value, or if it isn't for this basic block and
13963 : : it's either a hard register, set more than once, or it's a live
13964 : : at the beginning of the function, return 0.
13965 : :
13966 : : Because if it's not live at the beginning of the function then the reg
13967 : : is always set before being used (is never used without being set).
13968 : : And, if it's set only once, and it's always set before use, then all
13969 : : uses must have the same last value, even if it's not from this basic
13970 : : block. */
13971 : :
13972 : 191298327 : if (value == 0
13973 : 191298327 : || (rsp->last_set_label < label_tick_ebb_start
13974 : 78288353 : && (regno < FIRST_PSEUDO_REGISTER
13975 : 77418500 : || regno >= reg_n_sets_max
13976 : 77418500 : || REG_N_SETS (regno) != 1
13977 : 16493528 : || REGNO_REG_SET_P
13978 : : (DF_LR_IN (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb), regno))))
13979 : 114230016 : return 0;
13980 : :
13981 : : /* If the value was set in a later insn than the ones we are processing,
13982 : : we can't use it even if the register was only set once. */
13983 : 77068311 : if (rsp->last_set_label == label_tick
13984 : 77068311 : && DF_INSN_LUID (rsp->last_set) >= subst_low_luid)
13985 : : return 0;
13986 : :
13987 : : /* If fewer bits were set than what we are asked for now, we cannot use
13988 : : the value. */
13989 : 57100910 : if (maybe_lt (GET_MODE_PRECISION (rsp->last_set_mode),
13990 : 57100910 : GET_MODE_PRECISION (GET_MODE (x))))
13991 : : return 0;
13992 : :
13993 : : /* If the value has all its registers valid, return it. */
13994 : 57099488 : if (get_last_value_validate (&value, rsp->last_set,
13995 : : rsp->last_set_label, false))
13996 : 52895847 : return value;
13997 : :
13998 : : /* Otherwise, make a copy and replace any invalid register with
13999 : : (clobber (const_int 0)). If that fails for some reason, return 0. */
14000 : :
14001 : 4203641 : value = copy_rtx (value);
14002 : 4203641 : if (get_last_value_validate (&value, rsp->last_set,
14003 : : rsp->last_set_label, true))
14004 : 4203641 : return value;
14005 : :
14006 : : return 0;
14007 : : }
14008 : :
14009 : : /* Define three variables used for communication between the following
14010 : : routines. */
14011 : :
14012 : : static unsigned int reg_dead_regno, reg_dead_endregno;
14013 : : static int reg_dead_flag;
14014 : : rtx reg_dead_reg;
14015 : :
14016 : : /* Function called via note_stores from reg_dead_at_p.
14017 : :
14018 : : If DEST is within [reg_dead_regno, reg_dead_endregno), set
14019 : : reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
14020 : :
14021 : : static void
14022 : 617137 : reg_dead_at_p_1 (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
14023 : : {
14024 : 617137 : unsigned int regno, endregno;
14025 : :
14026 : 617137 : if (!REG_P (dest))
14027 : : return;
14028 : :
14029 : 566038 : regno = REGNO (dest);
14030 : 566038 : endregno = END_REGNO (dest);
14031 : 566038 : if (reg_dead_endregno > regno && reg_dead_regno < endregno)
14032 : 291168 : reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
14033 : : }
14034 : :
14035 : : /* Return true if REG is known to be dead at INSN.
14036 : :
14037 : : We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
14038 : : referencing REG, it is dead. If we hit a SET referencing REG, it is
14039 : : live. Otherwise, see if it is live or dead at the start of the basic
14040 : : block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
14041 : : must be assumed to be always live. */
14042 : :
14043 : : static bool
14044 : 1552408 : reg_dead_at_p (rtx reg, rtx_insn *insn)
14045 : : {
14046 : 1552408 : basic_block block;
14047 : 1552408 : unsigned int i;
14048 : :
14049 : : /* Set variables for reg_dead_at_p_1. */
14050 : 1552408 : reg_dead_regno = REGNO (reg);
14051 : 1552408 : reg_dead_endregno = END_REGNO (reg);
14052 : 1552408 : reg_dead_reg = reg;
14053 : :
14054 : 1552408 : reg_dead_flag = 0;
14055 : :
14056 : : /* Check that reg isn't mentioned in NEWPAT_USED_REGS. For fixed registers
14057 : : we allow the machine description to decide whether use-and-clobber
14058 : : patterns are OK. */
14059 : 1552408 : if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
14060 : : {
14061 : 3104816 : for (i = reg_dead_regno; i < reg_dead_endregno; i++)
14062 : 1552408 : if (!fixed_regs[i] && TEST_HARD_REG_BIT (newpat_used_regs, i))
14063 : : return false;
14064 : : }
14065 : :
14066 : : /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, or
14067 : : beginning of basic block. */
14068 : 1552408 : block = BLOCK_FOR_INSN (insn);
14069 : 745591 : for (;;)
14070 : : {
14071 : 2297999 : if (INSN_P (insn))
14072 : : {
14073 : 2145569 : if (find_regno_note (insn, REG_UNUSED, reg_dead_regno))
14074 : : return true;
14075 : :
14076 : 796607 : note_stores (insn, reg_dead_at_p_1, NULL);
14077 : 796607 : if (reg_dead_flag)
14078 : 145584 : return reg_dead_flag == 1 ? 1 : 0;
14079 : :
14080 : 651023 : if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
14081 : : return true;
14082 : : }
14083 : :
14084 : 775033 : if (insn == BB_HEAD (block))
14085 : : break;
14086 : :
14087 : 745591 : insn = PREV_INSN (insn);
14088 : : }
14089 : :
14090 : : /* Look at live-in sets for the basic block that we were in. */
14091 : 58884 : for (i = reg_dead_regno; i < reg_dead_endregno; i++)
14092 : 29442 : if (REGNO_REG_SET_P (df_get_live_in (block), i))
14093 : : return false;
14094 : :
14095 : : return true;
14096 : : }
14097 : :
14098 : : /* Note hard registers in X that are used. */
14099 : :
14100 : : static void
14101 : 286958927 : mark_used_regs_combine (rtx x)
14102 : : {
14103 : 331733182 : RTX_CODE code = GET_CODE (x);
14104 : 331733182 : unsigned int regno;
14105 : 331733182 : int i;
14106 : :
14107 : 331733182 : switch (code)
14108 : : {
14109 : : case LABEL_REF:
14110 : : case SYMBOL_REF:
14111 : : case CONST:
14112 : : CASE_CONST_ANY:
14113 : : case PC:
14114 : : case ADDR_VEC:
14115 : : case ADDR_DIFF_VEC:
14116 : : case ASM_INPUT:
14117 : : return;
14118 : :
14119 : 7189317 : case CLOBBER:
14120 : : /* If we are clobbering a MEM, mark any hard registers inside the
14121 : : address as used. */
14122 : 7189317 : if (MEM_P (XEXP (x, 0)))
14123 : 5574 : mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
14124 : : return;
14125 : :
14126 : 75901415 : case REG:
14127 : 75901415 : regno = REGNO (x);
14128 : : /* A hard reg in a wide mode may really be multiple registers.
14129 : : If so, mark all of them just like the first. */
14130 : 75901415 : if (regno < FIRST_PSEUDO_REGISTER)
14131 : : {
14132 : : /* None of this applies to the stack, frame or arg pointers. */
14133 : 9027715 : if (regno == STACK_POINTER_REGNUM
14134 : 9027715 : || (!HARD_FRAME_POINTER_IS_FRAME_POINTER
14135 : : && regno == HARD_FRAME_POINTER_REGNUM)
14136 : 8096770 : || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
14137 : 1091765 : && regno == ARG_POINTER_REGNUM && fixed_regs[regno])
14138 : 7005005 : || regno == FRAME_POINTER_REGNUM)
14139 : : return;
14140 : :
14141 : 1679565 : add_to_hard_reg_set (&newpat_used_regs, GET_MODE (x), regno);
14142 : : }
14143 : : return;
14144 : :
14145 : 44768681 : case SET:
14146 : 44768681 : {
14147 : : /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
14148 : : the address. */
14149 : 44768681 : rtx testreg = SET_DEST (x);
14150 : :
14151 : 44768681 : while (GET_CODE (testreg) == SUBREG
14152 : 44784089 : || GET_CODE (testreg) == ZERO_EXTRACT
14153 : 89883304 : || GET_CODE (testreg) == STRICT_LOW_PART)
14154 : 338878 : testreg = XEXP (testreg, 0);
14155 : :
14156 : 44768681 : if (MEM_P (testreg))
14157 : 4840255 : mark_used_regs_combine (XEXP (testreg, 0));
14158 : :
14159 : 44768681 : mark_used_regs_combine (SET_SRC (x));
14160 : : }
14161 : 44768681 : return;
14162 : :
14163 : 132470165 : default:
14164 : 132470165 : break;
14165 : : }
14166 : :
14167 : : /* Recursively scan the operands of this expression. */
14168 : :
14169 : 132470165 : {
14170 : 132470165 : const char *fmt = GET_RTX_FORMAT (code);
14171 : :
14172 : 384281280 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
14173 : : {
14174 : 251811115 : if (fmt[i] == 'e')
14175 : 204417681 : mark_used_regs_combine (XEXP (x, i));
14176 : 47393434 : else if (fmt[i] == 'E')
14177 : : {
14178 : : int j;
14179 : :
14180 : 64858621 : for (j = 0; j < XVECLEN (x, i); j++)
14181 : 44934904 : mark_used_regs_combine (XVECEXP (x, i, j));
14182 : : }
14183 : : }
14184 : : }
14185 : : }
14186 : :
14187 : : /* Remove register number REGNO from the dead registers list of INSN.
14188 : :
14189 : : Return the note used to record the death, if there was one. */
14190 : :
14191 : : rtx
14192 : 3126170 : remove_death (unsigned int regno, rtx_insn *insn)
14193 : : {
14194 : 3126170 : rtx note = find_regno_note (insn, REG_DEAD, regno);
14195 : :
14196 : 3126170 : if (note)
14197 : 465665 : remove_note (insn, note);
14198 : :
14199 : 3126170 : return note;
14200 : : }
14201 : :
14202 : : /* For each register (hardware or pseudo) used within expression X, if its
14203 : : death is in an instruction with luid between FROM_LUID (inclusive) and
14204 : : TO_INSN (exclusive), put a REG_DEAD note for that register in the
14205 : : list headed by PNOTES.
14206 : :
14207 : : That said, don't move registers killed by maybe_kill_insn.
14208 : :
14209 : : This is done when X is being merged by combination into TO_INSN. These
14210 : : notes will then be distributed as needed. */
14211 : :
14212 : : static void
14213 : 24453287 : move_deaths (rtx x, rtx maybe_kill_insn, int from_luid, rtx_insn *to_insn,
14214 : : rtx *pnotes)
14215 : : {
14216 : 24986507 : const char *fmt;
14217 : 24986507 : int len, i;
14218 : 24986507 : enum rtx_code code = GET_CODE (x);
14219 : :
14220 : 24986507 : if (code == REG)
14221 : : {
14222 : 6167943 : unsigned int regno = REGNO (x);
14223 : 6167943 : rtx_insn *where_dead = reg_stat[regno].last_death;
14224 : :
14225 : : /* If we do not know where the register died, it may still die between
14226 : : FROM_LUID and TO_INSN. If so, find it. This is PR83304. */
14227 : 6167943 : if (!where_dead || DF_INSN_LUID (where_dead) >= DF_INSN_LUID (to_insn))
14228 : : {
14229 : 3312256 : rtx_insn *insn = prev_real_nondebug_insn (to_insn);
14230 : 3312256 : while (insn
14231 : 4930845 : && BLOCK_FOR_INSN (insn) == BLOCK_FOR_INSN (to_insn)
14232 : 9127978 : && DF_INSN_LUID (insn) >= from_luid)
14233 : : {
14234 : 2233291 : if (dead_or_set_regno_p (insn, regno))
14235 : : {
14236 : 583008 : if (find_regno_note (insn, REG_DEAD, regno))
14237 : 6167943 : where_dead = insn;
14238 : : break;
14239 : : }
14240 : :
14241 : 1650283 : insn = prev_real_nondebug_insn (insn);
14242 : : }
14243 : : }
14244 : :
14245 : : /* Don't move the register if it gets killed in between from and to. */
14246 : 148001 : if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
14247 : 6210386 : && ! reg_referenced_p (x, maybe_kill_insn))
14248 : : return;
14249 : :
14250 : 6125500 : if (where_dead
14251 : 3216672 : && BLOCK_FOR_INSN (where_dead) == BLOCK_FOR_INSN (to_insn)
14252 : 3036062 : && DF_INSN_LUID (where_dead) >= from_luid
14253 : 9161343 : && DF_INSN_LUID (where_dead) < DF_INSN_LUID (to_insn))
14254 : : {
14255 : 2750705 : rtx note = remove_death (regno, where_dead);
14256 : :
14257 : : /* It is possible for the call above to return 0. This can occur
14258 : : when last_death points to I2 or I1 that we combined with.
14259 : : In that case make a new note.
14260 : :
14261 : : We must also check for the case where X is a hard register
14262 : : and NOTE is a death note for a range of hard registers
14263 : : including X. In that case, we must put REG_DEAD notes for
14264 : : the remaining registers in place of NOTE. */
14265 : :
14266 : 2750705 : if (note != 0 && regno < FIRST_PSEUDO_REGISTER
14267 : 2750705 : && partial_subreg_p (GET_MODE (x), GET_MODE (XEXP (note, 0))))
14268 : : {
14269 : 0 : unsigned int deadregno = REGNO (XEXP (note, 0));
14270 : 0 : unsigned int deadend = END_REGNO (XEXP (note, 0));
14271 : 0 : unsigned int ourend = END_REGNO (x);
14272 : 0 : unsigned int i;
14273 : :
14274 : 0 : for (i = deadregno; i < deadend; i++)
14275 : 0 : if (i < regno || i >= ourend)
14276 : 0 : add_reg_note (where_dead, REG_DEAD, regno_reg_rtx[i]);
14277 : : }
14278 : :
14279 : : /* If we didn't find any note, or if we found a REG_DEAD note that
14280 : : covers only part of the given reg, and we have a multi-reg hard
14281 : : register, then to be safe we must check for REG_DEAD notes
14282 : : for each register other than the first. They could have
14283 : : their own REG_DEAD notes lying around. */
14284 : 2750705 : else if ((note == 0
14285 : : || (note != 0
14286 : 90247 : && partial_subreg_p (GET_MODE (XEXP (note, 0)),
14287 : 90247 : GET_MODE (x))))
14288 : 2660458 : && regno < FIRST_PSEUDO_REGISTER
14289 : 3065999 : && REG_NREGS (x) > 1)
14290 : : {
14291 : 0 : unsigned int ourend = END_REGNO (x);
14292 : 0 : unsigned int i, offset;
14293 : 0 : rtx oldnotes = 0;
14294 : :
14295 : 0 : if (note)
14296 : 0 : offset = hard_regno_nregs (regno, GET_MODE (XEXP (note, 0)));
14297 : : else
14298 : : offset = 1;
14299 : :
14300 : 0 : for (i = regno + offset; i < ourend; i++)
14301 : 0 : move_deaths (regno_reg_rtx[i],
14302 : : maybe_kill_insn, from_luid, to_insn, &oldnotes);
14303 : : }
14304 : :
14305 : 2750705 : if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
14306 : : {
14307 : 90223 : XEXP (note, 1) = *pnotes;
14308 : 90223 : *pnotes = note;
14309 : : }
14310 : : else
14311 : 2660482 : *pnotes = alloc_reg_note (REG_DEAD, x, *pnotes);
14312 : : }
14313 : :
14314 : 6125500 : return;
14315 : : }
14316 : :
14317 : 18818564 : else if (GET_CODE (x) == SET)
14318 : : {
14319 : 4204712 : rtx dest = SET_DEST (x);
14320 : :
14321 : 4204712 : move_deaths (SET_SRC (x), maybe_kill_insn, from_luid, to_insn, pnotes);
14322 : :
14323 : : /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
14324 : : that accesses one word of a multi-word item, some
14325 : : piece of everything register in the expression is used by
14326 : : this insn, so remove any old death. */
14327 : : /* ??? So why do we test for equality of the sizes? */
14328 : :
14329 : 4204712 : if (GET_CODE (dest) == ZERO_EXTRACT
14330 : 4204285 : || GET_CODE (dest) == STRICT_LOW_PART
14331 : 8407526 : || (GET_CODE (dest) == SUBREG
14332 : 74951 : && !read_modify_subreg_p (dest)))
14333 : : {
14334 : 61626 : move_deaths (dest, maybe_kill_insn, from_luid, to_insn, pnotes);
14335 : 61626 : return;
14336 : : }
14337 : :
14338 : : /* If this is some other SUBREG, we know it replaces the entire
14339 : : value, so use that as the destination. */
14340 : 4143086 : if (GET_CODE (dest) == SUBREG)
14341 : 15223 : dest = SUBREG_REG (dest);
14342 : :
14343 : : /* If this is a MEM, adjust deaths of anything used in the address.
14344 : : For a REG (the only other possibility), the entire value is
14345 : : being replaced so the old value is not used in this insn. */
14346 : :
14347 : 4143086 : if (MEM_P (dest))
14348 : 471594 : move_deaths (XEXP (dest, 0), maybe_kill_insn, from_luid,
14349 : : to_insn, pnotes);
14350 : : return;
14351 : : }
14352 : :
14353 : 14613852 : else if (GET_CODE (x) == CLOBBER)
14354 : : return;
14355 : :
14356 : 14018054 : len = GET_RTX_LENGTH (code);
14357 : 14018054 : fmt = GET_RTX_FORMAT (code);
14358 : :
14359 : 36519013 : for (i = 0; i < len; i++)
14360 : : {
14361 : 22500959 : if (fmt[i] == 'E')
14362 : : {
14363 : 981400 : int j;
14364 : 3486234 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
14365 : 2504834 : move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_luid,
14366 : : to_insn, pnotes);
14367 : : }
14368 : 21519559 : else if (fmt[i] == 'e')
14369 : 13605830 : move_deaths (XEXP (x, i), maybe_kill_insn, from_luid, to_insn, pnotes);
14370 : : }
14371 : : }
14372 : :
14373 : : /* Return true if X is the target of a bit-field assignment in BODY, the
14374 : : pattern of an insn. X must be a REG. */
14375 : :
14376 : : static bool
14377 : 4735696 : reg_bitfield_target_p (rtx x, rtx body)
14378 : : {
14379 : 4735696 : int i;
14380 : :
14381 : 4735696 : if (GET_CODE (body) == SET)
14382 : : {
14383 : 3467764 : rtx dest = SET_DEST (body);
14384 : 3467764 : rtx target;
14385 : 3467764 : unsigned int regno, tregno, endregno, endtregno;
14386 : :
14387 : 3467764 : if (GET_CODE (dest) == ZERO_EXTRACT)
14388 : 434 : target = XEXP (dest, 0);
14389 : 3467330 : else if (GET_CODE (dest) == STRICT_LOW_PART)
14390 : 1720 : target = SUBREG_REG (XEXP (dest, 0));
14391 : : else
14392 : : return false;
14393 : :
14394 : 2154 : if (GET_CODE (target) == SUBREG)
14395 : 227 : target = SUBREG_REG (target);
14396 : :
14397 : 2154 : if (!REG_P (target))
14398 : : return false;
14399 : :
14400 : 2075 : tregno = REGNO (target), regno = REGNO (x);
14401 : 2075 : if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
14402 : 2065 : return target == x;
14403 : :
14404 : 10 : endtregno = end_hard_regno (GET_MODE (target), tregno);
14405 : 10 : endregno = end_hard_regno (GET_MODE (x), regno);
14406 : :
14407 : 10 : return endregno > tregno && regno < endtregno;
14408 : : }
14409 : :
14410 : 1267932 : else if (GET_CODE (body) == PARALLEL)
14411 : 1898247 : for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
14412 : 1276133 : if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
14413 : : return true;
14414 : :
14415 : : return false;
14416 : : }
14417 : :
14418 : : /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
14419 : : as appropriate. I3 and I2 are the insns resulting from the combination
14420 : : insns including FROM (I2 may be zero).
14421 : :
14422 : : ELIM_I2 and ELIM_I1 are either zero or registers that we know will
14423 : : not need REG_DEAD notes because they are being substituted for. This
14424 : : saves searching in the most common cases.
14425 : :
14426 : : Each note in the list is either ignored or placed on some insns, depending
14427 : : on the type of note. */
14428 : :
14429 : : static void
14430 : 9879608 : distribute_notes (rtx notes, rtx_insn *from_insn, rtx_insn *i3, rtx_insn *i2,
14431 : : rtx elim_i2, rtx elim_i1, rtx elim_i0)
14432 : : {
14433 : 9879608 : rtx note, next_note;
14434 : 9879608 : rtx tem_note;
14435 : 9879608 : rtx_insn *tem_insn;
14436 : :
14437 : 22973895 : for (note = notes; note; note = next_note)
14438 : : {
14439 : 13094287 : rtx_insn *place = 0, *place2 = 0;
14440 : :
14441 : 13094287 : next_note = XEXP (note, 1);
14442 : 13094287 : switch (REG_NOTE_KIND (note))
14443 : : {
14444 : : case REG_BR_PROB:
14445 : : case REG_BR_PRED:
14446 : : /* Doesn't matter much where we put this, as long as it's somewhere.
14447 : : It is preferable to keep these notes on branches, which is most
14448 : : likely to be i3. */
14449 : : place = i3;
14450 : : break;
14451 : :
14452 : 0 : case REG_NON_LOCAL_GOTO:
14453 : 0 : if (JUMP_P (i3))
14454 : : place = i3;
14455 : : else
14456 : : {
14457 : 0 : gcc_assert (i2 && JUMP_P (i2));
14458 : : place = i2;
14459 : : }
14460 : : break;
14461 : :
14462 : 22293 : case REG_EH_REGION:
14463 : 22293 : {
14464 : : /* The landing pad handling needs to be kept in sync with the
14465 : : prerequisite checking in try_combine. */
14466 : 22293 : int lp_nr = INTVAL (XEXP (note, 0));
14467 : : /* A REG_EH_REGION note transfering control can only ever come
14468 : : from i3. */
14469 : 22293 : if (lp_nr > 0)
14470 : 12792 : gcc_assert (from_insn == i3);
14471 : : /* We are making sure there is a single effective REG_EH_REGION
14472 : : note and it's valid to put it on i3. */
14473 : 22293 : if (!insn_could_throw_p (from_insn)
14474 : 22293 : && !(lp_nr == INT_MIN && can_nonlocal_goto (from_insn)))
14475 : : /* Throw away stray notes on insns that can never throw or
14476 : : make a nonlocal goto. */
14477 : : ;
14478 : : else
14479 : : {
14480 : 22212 : if (CALL_P (i3))
14481 : : place = i3;
14482 : : else
14483 : : {
14484 : 2087 : gcc_assert (cfun->can_throw_non_call_exceptions);
14485 : : /* If i3 can still trap preserve the note, otherwise we've
14486 : : combined things such that we can now prove that the
14487 : : instructions can't trap. Drop the note in this case. */
14488 : 2087 : if (may_trap_p (i3))
14489 : : place = i3;
14490 : : }
14491 : : }
14492 : : break;
14493 : : }
14494 : :
14495 : 126025 : case REG_ARGS_SIZE:
14496 : : /* ??? How to distribute between i3-i1. Assume i3 contains the
14497 : : entire adjustment. Assert i3 contains at least some adjust. */
14498 : 126025 : if (!noop_move_p (i3))
14499 : : {
14500 : 126024 : poly_int64 old_size, args_size = get_args_size (note);
14501 : : /* fixup_args_size_notes looks at REG_NORETURN note,
14502 : : so ensure the note is placed there first. */
14503 : 126024 : if (CALL_P (i3))
14504 : : {
14505 : : rtx *np;
14506 : 1629 : for (np = &next_note; *np; np = &XEXP (*np, 1))
14507 : 20 : if (REG_NOTE_KIND (*np) == REG_NORETURN)
14508 : : {
14509 : 9 : rtx n = *np;
14510 : 9 : *np = XEXP (n, 1);
14511 : 9 : XEXP (n, 1) = REG_NOTES (i3);
14512 : 9 : REG_NOTES (i3) = n;
14513 : 9 : break;
14514 : : }
14515 : : }
14516 : 126024 : old_size = fixup_args_size_notes (PREV_INSN (i3), i3, args_size);
14517 : : /* emit_call_1 adds for !ACCUMULATE_OUTGOING_ARGS
14518 : : REG_ARGS_SIZE note to all noreturn calls, allow that here. */
14519 : 126024 : gcc_assert (maybe_ne (old_size, args_size)
14520 : : || (CALL_P (i3)
14521 : : && !ACCUMULATE_OUTGOING_ARGS
14522 : : && find_reg_note (i3, REG_NORETURN, NULL_RTX)));
14523 : : }
14524 : : break;
14525 : :
14526 : 82555 : case REG_NORETURN:
14527 : 82555 : case REG_SETJMP:
14528 : 82555 : case REG_TM:
14529 : 82555 : case REG_CALL_DECL:
14530 : 82555 : case REG_UNTYPED_CALL:
14531 : 82555 : case REG_CALL_NOCF_CHECK:
14532 : : /* These notes must remain with the call. It should not be
14533 : : possible for both I2 and I3 to be a call. */
14534 : 82555 : if (CALL_P (i3))
14535 : : place = i3;
14536 : : else
14537 : : {
14538 : 0 : gcc_assert (i2 && CALL_P (i2));
14539 : : place = i2;
14540 : : }
14541 : : break;
14542 : :
14543 : 1996803 : case REG_UNUSED:
14544 : : /* Any clobbers for i3 may still exist, and so we must process
14545 : : REG_UNUSED notes from that insn.
14546 : :
14547 : : Any clobbers from i2 or i1 can only exist if they were added by
14548 : : recog_for_combine. In that case, recog_for_combine created the
14549 : : necessary REG_UNUSED notes. Trying to keep any original
14550 : : REG_UNUSED notes from these insns can cause incorrect output
14551 : : if it is for the same register as the original i3 dest.
14552 : : In that case, we will notice that the register is set in i3,
14553 : : and then add a REG_UNUSED note for the destination of i3, which
14554 : : is wrong. However, it is possible to have REG_UNUSED notes from
14555 : : i2 or i1 for register which were both used and clobbered, so
14556 : : we keep notes from i2 or i1 if they will turn into REG_DEAD
14557 : : notes. */
14558 : :
14559 : : /* If this register is set or clobbered between FROM_INSN and I3,
14560 : : we should not create a note for it. */
14561 : 1996803 : if (reg_set_between_p (XEXP (note, 0), from_insn, i3))
14562 : : break;
14563 : :
14564 : : /* If this register is set or clobbered in I3, put the note there
14565 : : unless there is one already. */
14566 : 1915234 : if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
14567 : : {
14568 : 1113517 : if (from_insn != i3)
14569 : : break;
14570 : :
14571 : 618976 : if (! (REG_P (XEXP (note, 0))
14572 : 618976 : ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
14573 : 0 : : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
14574 : : place = i3;
14575 : : }
14576 : : /* Otherwise, if this register is used by I3, then this register
14577 : : now dies here, so we must put a REG_DEAD note here unless there
14578 : : is one already. */
14579 : 801717 : else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
14580 : : {
14581 : 7675 : if (! (REG_P (XEXP (note, 0))
14582 : 7675 : ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
14583 : 0 : : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
14584 : : {
14585 : 7438 : PUT_REG_NOTE_KIND (note, REG_DEAD);
14586 : 7438 : place = i3;
14587 : : }
14588 : : }
14589 : :
14590 : : /* A SET or CLOBBER of the REG_UNUSED reg has been removed,
14591 : : but we can't tell which at this point. We must reset any
14592 : : expectations we had about the value that was previously
14593 : : stored in the reg. ??? Ideally, we'd adjust REG_N_SETS
14594 : : and, if appropriate, restore its previous value, but we
14595 : : don't have enough information for that at this point. */
14596 : : else
14597 : : {
14598 : 794042 : record_value_for_reg (XEXP (note, 0), NULL, NULL_RTX);
14599 : :
14600 : : /* Otherwise, if this register is now referenced in i2
14601 : : then the register used to be modified in one of the
14602 : : original insns. If it was i3 (say, in an unused
14603 : : parallel), it's now completely gone, so the note can
14604 : : be discarded. But if it was modified in i2, i1 or i0
14605 : : and we still reference it in i2, then we're
14606 : : referencing the previous value, and since the
14607 : : register was modified and REG_UNUSED, we know that
14608 : : the previous value is now dead. So, if we only
14609 : : reference the register in i2, we change the note to
14610 : : REG_DEAD, to reflect the previous value. However, if
14611 : : we're also setting or clobbering the register as
14612 : : scratch, we know (because the register was not
14613 : : referenced in i3) that it's unused, just as it was
14614 : : unused before, and we place the note in i2. */
14615 : 19527 : if (from_insn != i3 && i2 && INSN_P (i2)
14616 : 813569 : && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
14617 : : {
14618 : 7 : if (!reg_set_p (XEXP (note, 0), PATTERN (i2)))
14619 : 7 : PUT_REG_NOTE_KIND (note, REG_DEAD);
14620 : 7 : if (! (REG_P (XEXP (note, 0))
14621 : 7 : ? find_regno_note (i2, REG_NOTE_KIND (note),
14622 : 7 : REGNO (XEXP (note, 0)))
14623 : 0 : : find_reg_note (i2, REG_NOTE_KIND (note),
14624 : : XEXP (note, 0))))
14625 : : place = i2;
14626 : : }
14627 : : }
14628 : :
14629 : : break;
14630 : :
14631 : 390692 : case REG_EQUAL:
14632 : 390692 : case REG_EQUIV:
14633 : 390692 : case REG_NOALIAS:
14634 : : /* These notes say something about results of an insn. We can
14635 : : only support them if they used to be on I3 in which case they
14636 : : remain on I3. Otherwise they are ignored.
14637 : :
14638 : : If the note refers to an expression that is not a constant, we
14639 : : must also ignore the note since we cannot tell whether the
14640 : : equivalence is still true. It might be possible to do
14641 : : slightly better than this (we only have a problem if I2DEST
14642 : : or I1DEST is present in the expression), but it doesn't
14643 : : seem worth the trouble. */
14644 : :
14645 : 390692 : if (from_insn == i3
14646 : 181864 : && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
14647 : : place = i3;
14648 : : break;
14649 : :
14650 : 0 : case REG_INC:
14651 : : /* These notes say something about how a register is used. They must
14652 : : be present on any use of the register in I2 or I3. */
14653 : 0 : if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
14654 : 0 : place = i3;
14655 : :
14656 : 0 : if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
14657 : : {
14658 : 0 : if (place)
14659 : : place2 = i2;
14660 : : else
14661 : : place = i2;
14662 : : }
14663 : : break;
14664 : :
14665 : 7752 : case REG_LABEL_TARGET:
14666 : 7752 : case REG_LABEL_OPERAND:
14667 : : /* This can show up in several ways -- either directly in the
14668 : : pattern, or hidden off in the constant pool with (or without?)
14669 : : a REG_EQUAL note. */
14670 : : /* ??? Ignore the without-reg_equal-note problem for now. */
14671 : 7752 : if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
14672 : 7752 : || ((tem_note = find_reg_note (i3, REG_EQUAL, NULL_RTX))
14673 : 0 : && GET_CODE (XEXP (tem_note, 0)) == LABEL_REF
14674 : 0 : && label_ref_label (XEXP (tem_note, 0)) == XEXP (note, 0)))
14675 : : place = i3;
14676 : :
14677 : 7752 : if (i2
14678 : 7752 : && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
14679 : 0 : || ((tem_note = find_reg_note (i2, REG_EQUAL, NULL_RTX))
14680 : 0 : && GET_CODE (XEXP (tem_note, 0)) == LABEL_REF
14681 : 0 : && label_ref_label (XEXP (tem_note, 0)) == XEXP (note, 0))))
14682 : : {
14683 : 0 : if (place)
14684 : : place2 = i2;
14685 : : else
14686 : : place = i2;
14687 : : }
14688 : :
14689 : : /* For REG_LABEL_TARGET on a JUMP_P, we prefer to put the note
14690 : : as a JUMP_LABEL or decrement LABEL_NUSES if it's already
14691 : : there. */
14692 : 7752 : if (place && JUMP_P (place)
14693 : 6721 : && REG_NOTE_KIND (note) == REG_LABEL_TARGET
14694 : 0 : && (JUMP_LABEL (place) == NULL
14695 : 0 : || JUMP_LABEL (place) == XEXP (note, 0)))
14696 : : {
14697 : 0 : rtx label = JUMP_LABEL (place);
14698 : :
14699 : 0 : if (!label)
14700 : 0 : JUMP_LABEL (place) = XEXP (note, 0);
14701 : 0 : else if (LABEL_P (label))
14702 : 0 : LABEL_NUSES (label)--;
14703 : : }
14704 : :
14705 : 7752 : if (place2 && JUMP_P (place2)
14706 : 0 : && REG_NOTE_KIND (note) == REG_LABEL_TARGET
14707 : 0 : && (JUMP_LABEL (place2) == NULL
14708 : 0 : || JUMP_LABEL (place2) == XEXP (note, 0)))
14709 : : {
14710 : 0 : rtx label = JUMP_LABEL (place2);
14711 : :
14712 : 0 : if (!label)
14713 : 0 : JUMP_LABEL (place2) = XEXP (note, 0);
14714 : 0 : else if (LABEL_P (label))
14715 : 0 : LABEL_NUSES (label)--;
14716 : : place2 = 0;
14717 : : }
14718 : : break;
14719 : :
14720 : : case REG_NONNEG:
14721 : : /* This note says something about the value of a register prior
14722 : : to the execution of an insn. It is too much trouble to see
14723 : : if the note is still correct in all situations. It is better
14724 : : to simply delete it. */
14725 : : break;
14726 : :
14727 : 10428554 : case REG_DEAD:
14728 : : /* If we replaced the right hand side of FROM_INSN with a
14729 : : REG_EQUAL note, the original use of the dying register
14730 : : will not have been combined into I3 and I2. In such cases,
14731 : : FROM_INSN is guaranteed to be the first of the combined
14732 : : instructions, so we simply need to search back before
14733 : : FROM_INSN for the previous use or set of this register,
14734 : : then alter the notes there appropriately.
14735 : :
14736 : : If the register is used as an input in I3, it dies there.
14737 : : Similarly for I2, if it is nonzero and adjacent to I3.
14738 : :
14739 : : If the register is not used as an input in either I3 or I2
14740 : : and it is not one of the registers we were supposed to eliminate,
14741 : : there are two possibilities. We might have a non-adjacent I2
14742 : : or we might have somehow eliminated an additional register
14743 : : from a computation. For example, we might have had A & B where
14744 : : we discover that B will always be zero. In this case we will
14745 : : eliminate the reference to A.
14746 : :
14747 : : In both cases, we must search to see if we can find a previous
14748 : : use of A and put the death note there. */
14749 : :
14750 : 10428554 : if (from_insn
14751 : 7297576 : && from_insn == i2mod
14752 : 10430344 : && !reg_overlap_mentioned_p (XEXP (note, 0), i2mod_new_rhs))
14753 : : tem_insn = from_insn;
14754 : : else
14755 : : {
14756 : 10427067 : if (from_insn
14757 : 7296089 : && CALL_P (from_insn)
14758 : 10660907 : && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
14759 : : place = from_insn;
14760 : 10275775 : else if (i2 && reg_set_p (XEXP (note, 0), PATTERN (i2)))
14761 : : {
14762 : : /* If the new I2 sets the same register that is marked
14763 : : dead in the note, we do not in general know where to
14764 : : put the note. One important case we _can_ handle is
14765 : : when the note comes from I3. */
14766 : 38488 : if (from_insn == i3)
14767 : : place = i3;
14768 : : else
14769 : : break;
14770 : : }
14771 : 10237287 : else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
14772 : : place = i3;
14773 : 105595 : else if (i2 != 0 && next_nonnote_nondebug_insn (i2) == i3
14774 : 4084011 : && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
14775 : : place = i2;
14776 : 3936341 : else if ((rtx_equal_p (XEXP (note, 0), elim_i2)
14777 : 3824535 : && !(i2mod
14778 : 32107 : && reg_overlap_mentioned_p (XEXP (note, 0),
14779 : : i2mod_old_rhs)))
14780 : 143923 : || rtx_equal_p (XEXP (note, 0), elim_i1)
14781 : 3990036 : || rtx_equal_p (XEXP (note, 0), elim_i0))
14782 : : break;
14783 : 234755 : tem_insn = i3;
14784 : : }
14785 : :
14786 : 234755 : if (place == 0)
14787 : : {
14788 : 50227 : basic_block bb = this_basic_block;
14789 : :
14790 : 2376207 : for (tem_insn = PREV_INSN (tem_insn); place == 0; tem_insn = PREV_INSN (tem_insn))
14791 : : {
14792 : 2376207 : if (!NONDEBUG_INSN_P (tem_insn))
14793 : : {
14794 : 1753280 : if (tem_insn == BB_HEAD (bb))
14795 : : break;
14796 : 1719137 : continue;
14797 : : }
14798 : :
14799 : : /* If the register is being set at TEM_INSN, see if that is all
14800 : : TEM_INSN is doing. If so, delete TEM_INSN. Otherwise, make this
14801 : : into a REG_UNUSED note instead. Don't delete sets to
14802 : : global register vars. */
14803 : 622927 : if ((REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER
14804 : 1477 : || !global_regs[REGNO (XEXP (note, 0))])
14805 : 624404 : && reg_set_p (XEXP (note, 0), PATTERN (tem_insn)))
14806 : : {
14807 : 15092 : rtx set = single_set (tem_insn);
14808 : 15092 : rtx inner_dest = 0;
14809 : :
14810 : 15092 : if (set != 0)
14811 : 12216 : for (inner_dest = SET_DEST (set);
14812 : 12225 : (GET_CODE (inner_dest) == STRICT_LOW_PART
14813 : 12225 : || GET_CODE (inner_dest) == SUBREG
14814 : 12225 : || GET_CODE (inner_dest) == ZERO_EXTRACT);
14815 : 9 : inner_dest = XEXP (inner_dest, 0))
14816 : : ;
14817 : :
14818 : : /* Verify that it was the set, and not a clobber that
14819 : : modified the register.
14820 : :
14821 : : If we cannot delete the setter due to side
14822 : : effects, mark the user with an UNUSED note instead
14823 : : of deleting it. */
14824 : :
14825 : 12216 : if (set != 0 && ! side_effects_p (SET_SRC (set))
14826 : 11844 : && rtx_equal_p (XEXP (note, 0), inner_dest))
14827 : : {
14828 : : /* Move the notes and links of TEM_INSN elsewhere.
14829 : : This might delete other dead insns recursively.
14830 : : First set the pattern to something that won't use
14831 : : any register. */
14832 : 11741 : rtx old_notes = REG_NOTES (tem_insn);
14833 : :
14834 : 11741 : PATTERN (tem_insn) = pc_rtx;
14835 : 11741 : REG_NOTES (tem_insn) = NULL;
14836 : :
14837 : 11741 : distribute_notes (old_notes, tem_insn, tem_insn, NULL,
14838 : : NULL_RTX, NULL_RTX, NULL_RTX);
14839 : 11741 : distribute_links (LOG_LINKS (tem_insn));
14840 : :
14841 : 11741 : unsigned int regno = REGNO (XEXP (note, 0));
14842 : 11741 : reg_stat_type *rsp = ®_stat[regno];
14843 : 11741 : if (rsp->last_set == tem_insn)
14844 : 10365 : record_value_for_reg (XEXP (note, 0), NULL, NULL_RTX);
14845 : :
14846 : 11741 : SET_INSN_DELETED (tem_insn);
14847 : 11741 : if (tem_insn == i2)
14848 : 606843 : i2 = NULL;
14849 : : }
14850 : : else
14851 : : {
14852 : 3351 : PUT_REG_NOTE_KIND (note, REG_UNUSED);
14853 : :
14854 : : /* If there isn't already a REG_UNUSED note, put one
14855 : : here. Do not place a REG_DEAD note, even if
14856 : : the register is also used here; that would not
14857 : : match the algorithm used in lifetime analysis
14858 : : and can cause the consistency check in the
14859 : : scheduler to fail. */
14860 : 3351 : if (! find_regno_note (tem_insn, REG_UNUSED,
14861 : 3351 : REGNO (XEXP (note, 0))))
14862 : 1825 : place = tem_insn;
14863 : : break;
14864 : : }
14865 : : }
14866 : 607835 : else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem_insn))
14867 : 607835 : || (CALL_P (tem_insn)
14868 : 16103 : && find_reg_fusage (tem_insn, USE, XEXP (note, 0))))
14869 : : {
14870 : 12733 : place = tem_insn;
14871 : :
14872 : : /* If we are doing a 3->2 combination, and we have a
14873 : : register which formerly died in i3 and was not used
14874 : : by i2, which now no longer dies in i3 and is used in
14875 : : i2 but does not die in i2, and place is between i2
14876 : : and i3, then we may need to move a link from place to
14877 : : i2. */
14878 : 3685 : if (i2 && DF_INSN_LUID (place) > DF_INSN_LUID (i2)
14879 : 78 : && from_insn
14880 : 78 : && DF_INSN_LUID (from_insn) > DF_INSN_LUID (i2)
14881 : 12811 : && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
14882 : : {
14883 : 78 : struct insn_link *links = LOG_LINKS (place);
14884 : 78 : LOG_LINKS (place) = NULL;
14885 : 78 : distribute_links (links);
14886 : : }
14887 : : break;
14888 : : }
14889 : :
14890 : 606843 : if (tem_insn == BB_HEAD (bb))
14891 : : break;
14892 : : }
14893 : :
14894 : : }
14895 : :
14896 : : /* If the register is set or already dead at PLACE, we needn't do
14897 : : anything with this note if it is still a REG_DEAD note.
14898 : : We check here if it is set at all, not if is it totally replaced,
14899 : : which is what `dead_or_set_p' checks, so also check for it being
14900 : : set partially. */
14901 : :
14902 : 6537914 : if (place && REG_NOTE_KIND (note) == REG_DEAD)
14903 : : {
14904 : 6500420 : unsigned int regno = REGNO (XEXP (note, 0));
14905 : 6500420 : reg_stat_type *rsp = ®_stat[regno];
14906 : :
14907 : 6500420 : if (dead_or_set_p (place, XEXP (note, 0))
14908 : 6500420 : || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
14909 : : {
14910 : : /* Unless the register previously died in PLACE, clear
14911 : : last_death. [I no longer understand why this is
14912 : : being done.] */
14913 : 3040883 : if (rsp->last_death != place)
14914 : 590227 : rsp->last_death = 0;
14915 : : place = 0;
14916 : : }
14917 : : else
14918 : 3459537 : rsp->last_death = place;
14919 : :
14920 : : /* If this is a death note for a hard reg that is occupying
14921 : : multiple registers, ensure that we are still using all
14922 : : parts of the object. If we find a piece of the object
14923 : : that is unused, we must arrange for an appropriate REG_DEAD
14924 : : note to be added for it. However, we can't just emit a USE
14925 : : and tag the note to it, since the register might actually
14926 : : be dead; so we recurse, and the recursive call then finds
14927 : : the previous insn that used this register. */
14928 : :
14929 : 3459537 : if (place && REG_NREGS (XEXP (note, 0)) > 1)
14930 : : {
14931 : 777 : unsigned int endregno = END_REGNO (XEXP (note, 0));
14932 : 777 : bool all_used = true;
14933 : 777 : unsigned int i;
14934 : :
14935 : 2331 : for (i = regno; i < endregno; i++)
14936 : 1554 : if ((! refers_to_regno_p (i, PATTERN (place))
14937 : 1554 : && ! find_regno_fusage (place, USE, i))
14938 : 3108 : || dead_or_set_regno_p (place, i))
14939 : : {
14940 : : all_used = false;
14941 : : break;
14942 : : }
14943 : :
14944 : 777 : if (! all_used)
14945 : : {
14946 : : /* Put only REG_DEAD notes for pieces that are
14947 : : not already dead or set. */
14948 : :
14949 : 0 : for (i = regno; i < endregno;
14950 : 0 : i += hard_regno_nregs (i, reg_raw_mode[i]))
14951 : : {
14952 : 0 : rtx piece = regno_reg_rtx[i];
14953 : 0 : basic_block bb = this_basic_block;
14954 : :
14955 : 0 : if (! dead_or_set_p (place, piece)
14956 : 0 : && ! reg_bitfield_target_p (piece,
14957 : 0 : PATTERN (place)))
14958 : : {
14959 : 0 : rtx new_note = alloc_reg_note (REG_DEAD, piece,
14960 : : NULL_RTX);
14961 : :
14962 : 0 : distribute_notes (new_note, place, place,
14963 : : NULL, NULL_RTX, NULL_RTX,
14964 : : NULL_RTX);
14965 : : }
14966 : 0 : else if (! refers_to_regno_p (i, PATTERN (place))
14967 : 0 : && ! find_regno_fusage (place, USE, i))
14968 : 0 : for (tem_insn = PREV_INSN (place); ;
14969 : 0 : tem_insn = PREV_INSN (tem_insn))
14970 : : {
14971 : 0 : if (!NONDEBUG_INSN_P (tem_insn))
14972 : : {
14973 : 0 : if (tem_insn == BB_HEAD (bb))
14974 : : break;
14975 : 0 : continue;
14976 : : }
14977 : 0 : if (dead_or_set_p (tem_insn, piece)
14978 : 0 : || reg_bitfield_target_p (piece,
14979 : 0 : PATTERN (tem_insn)))
14980 : : {
14981 : 0 : add_reg_note (tem_insn, REG_UNUSED, piece);
14982 : 0 : break;
14983 : : }
14984 : : }
14985 : : }
14986 : :
14987 : : place = 0;
14988 : : }
14989 : : }
14990 : : }
14991 : : break;
14992 : :
14993 : 0 : default:
14994 : : /* Any other notes should not be present at this point in the
14995 : : compilation. */
14996 : 0 : gcc_unreachable ();
14997 : : }
14998 : :
14999 : 4244636 : if (place)
15000 : : {
15001 : 4221211 : XEXP (note, 1) = REG_NOTES (place);
15002 : 4221211 : REG_NOTES (place) = note;
15003 : :
15004 : : /* Set added_notes_insn to the earliest insn we added a note to. */
15005 : 4221211 : if (added_notes_insn == 0
15006 : 4221211 : || DF_INSN_LUID (added_notes_insn) > DF_INSN_LUID (place))
15007 : 2810162 : added_notes_insn = place;
15008 : : }
15009 : :
15010 : 13094287 : if (place2)
15011 : : {
15012 : 0 : add_shallow_copy_of_reg_note (place2, note);
15013 : :
15014 : : /* Set added_notes_insn to the earliest insn we added a note to. */
15015 : 0 : if (added_notes_insn == 0
15016 : 0 : || DF_INSN_LUID (added_notes_insn) > DF_INSN_LUID (place2))
15017 : 0 : added_notes_insn = place2;
15018 : : }
15019 : : }
15020 : 9879608 : }
15021 : :
15022 : : /* Similarly to above, distribute the LOG_LINKS that used to be present on
15023 : : I3, I2, and I1 to new locations. This is also called to add a link
15024 : : pointing at I3 when I3's destination is changed.
15025 : :
15026 : : If START is nonnull and an insn, we know that the next location for each
15027 : : link is no earlier than START. LIMIT is the maximum number of nondebug
15028 : : instructions that can be scanned when looking for the next use of a
15029 : : definition. */
15030 : :
15031 : : static void
15032 : 16074937 : distribute_links (struct insn_link *links, rtx_insn *start, int limit)
15033 : : {
15034 : 16074937 : struct insn_link *link, *next_link;
15035 : :
15036 : 23655020 : for (link = links; link; link = next_link)
15037 : : {
15038 : 7580083 : rtx_insn *place = 0;
15039 : 7580083 : rtx_insn *insn;
15040 : 7580083 : rtx set, reg;
15041 : :
15042 : 7580083 : next_link = link->next;
15043 : :
15044 : : /* If the insn that this link points to is a NOTE, ignore it. */
15045 : 7580083 : if (NOTE_P (link->insn))
15046 : 4045722 : continue;
15047 : :
15048 : 3534361 : set = 0;
15049 : 3534361 : rtx pat = PATTERN (link->insn);
15050 : 3534361 : if (GET_CODE (pat) == SET)
15051 : : set = pat;
15052 : 626998 : else if (GET_CODE (pat) == PARALLEL)
15053 : : {
15054 : : int i;
15055 : 740756 : for (i = 0; i < XVECLEN (pat, 0); i++)
15056 : : {
15057 : 736614 : set = XVECEXP (pat, 0, i);
15058 : 736614 : if (GET_CODE (set) != SET)
15059 : 4151 : continue;
15060 : :
15061 : 732463 : reg = SET_DEST (set);
15062 : 732463 : while (GET_CODE (reg) == ZERO_EXTRACT
15063 : 740971 : || GET_CODE (reg) == STRICT_LOW_PART
15064 : 1481862 : || GET_CODE (reg) == SUBREG)
15065 : 8515 : reg = XEXP (reg, 0);
15066 : :
15067 : 732463 : if (!REG_P (reg))
15068 : 43946 : continue;
15069 : :
15070 : 688517 : if (REGNO (reg) == link->regno)
15071 : : break;
15072 : : }
15073 : 625208 : if (i == XVECLEN (pat, 0))
15074 : 4142 : continue;
15075 : : }
15076 : : else
15077 : 1790 : continue;
15078 : :
15079 : 3528429 : reg = SET_DEST (set);
15080 : :
15081 : 3528429 : while (GET_CODE (reg) == ZERO_EXTRACT
15082 : 3550725 : || GET_CODE (reg) == STRICT_LOW_PART
15083 : 7101580 : || GET_CODE (reg) == SUBREG)
15084 : 22855 : reg = XEXP (reg, 0);
15085 : :
15086 : 3528429 : if (reg == pc_rtx)
15087 : 540 : continue;
15088 : :
15089 : : /* A LOG_LINK is defined as being placed on the first insn that uses
15090 : : a register and points to the insn that sets the register. Start
15091 : : searching at the next insn after the target of the link and stop
15092 : : when we reach a set of the register or the end of the basic block.
15093 : :
15094 : : Note that this correctly handles the link that used to point from
15095 : : I3 to I2. Also note that not much searching is typically done here
15096 : : since most links don't point very far away. */
15097 : :
15098 : 3527889 : int count = 0;
15099 : 3527889 : insn = start;
15100 : 3527889 : if (!insn || NOTE_P (insn))
15101 : 3478468 : insn = NEXT_INSN (link->insn);
15102 : : else
15103 : 49421 : count = link->insn_count;
15104 : 11188270 : for (;
15105 : 14716159 : (insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
15106 : 10159071 : || BB_HEAD (this_basic_block->next_bb) != insn));
15107 : 11188270 : insn = NEXT_INSN (insn))
15108 : 14676146 : if (DEBUG_INSN_P (insn))
15109 : 2813313 : continue;
15110 : 11862833 : else if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
15111 : : {
15112 : 3333991 : if (reg_referenced_p (reg, PATTERN (insn)))
15113 : 3333991 : place = insn;
15114 : : break;
15115 : : }
15116 : 8528842 : else if (CALL_P (insn)
15117 : 8528842 : && find_reg_fusage (insn, USE, reg))
15118 : : {
15119 : : place = insn;
15120 : : break;
15121 : : }
15122 : 8375056 : else if (INSN_P (insn) && reg_set_p (reg, insn))
15123 : : break;
15124 : 8374957 : else if (count >= limit)
15125 : : break;
15126 : : else
15127 : 8374957 : count += 1;
15128 : 3527889 : link->insn_count = count;
15129 : :
15130 : : /* If we found a place to put the link, place it there unless there
15131 : : is already a link to the same insn as LINK at that point. */
15132 : :
15133 : 3527889 : if (place)
15134 : : {
15135 : 3487777 : struct insn_link *link2;
15136 : :
15137 : 4493309 : FOR_EACH_LOG_LINK (link2, place)
15138 : 1022334 : if (link2->insn == link->insn && link2->regno == link->regno)
15139 : : break;
15140 : :
15141 : 3487777 : if (link2 == NULL)
15142 : : {
15143 : 3470975 : link->next = LOG_LINKS (place);
15144 : 3470975 : LOG_LINKS (place) = link;
15145 : :
15146 : : /* Set added_links_insn to the earliest insn we added a
15147 : : link to. */
15148 : 3470975 : if (added_links_insn == 0
15149 : 3470975 : || DF_INSN_LUID (added_links_insn) > DF_INSN_LUID (place))
15150 : 2753257 : added_links_insn = place;
15151 : : }
15152 : : }
15153 : : }
15154 : 16074937 : }
15155 : :
15156 : : /* Check for any register or memory mentioned in EQUIV that is not
15157 : : mentioned in EXPR. This is used to restrict EQUIV to "specializations"
15158 : : of EXPR where some registers may have been replaced by constants. */
15159 : :
15160 : : static bool
15161 : 2552337 : unmentioned_reg_p (rtx equiv, rtx expr)
15162 : : {
15163 : 2552337 : subrtx_iterator::array_type array;
15164 : 6780526 : FOR_EACH_SUBRTX (iter, array, equiv, NONCONST)
15165 : : {
15166 : 5550696 : const_rtx x = *iter;
15167 : 3804934 : if ((REG_P (x) || MEM_P (x))
15168 : 5942514 : && !reg_mentioned_p (x, expr))
15169 : 1322507 : return true;
15170 : : }
15171 : 1229830 : return false;
15172 : 2552337 : }
15173 : :
15174 : : /* Make pseudo-to-pseudo copies after every hard-reg-to-pseudo-copy, because
15175 : : the reg-to-reg copy can usefully combine with later instructions, but we
15176 : : do not want to combine the hard reg into later instructions, for that
15177 : : restricts register allocation. */
15178 : : static void
15179 : 1044865 : make_more_copies (void)
15180 : : {
15181 : 1044865 : basic_block bb;
15182 : :
15183 : 11481657 : FOR_EACH_BB_FN (bb, cfun)
15184 : : {
15185 : 10436792 : rtx_insn *insn;
15186 : :
15187 : 134946343 : FOR_BB_INSNS (bb, insn)
15188 : : {
15189 : 124509551 : if (!NONDEBUG_INSN_P (insn))
15190 : 65596735 : continue;
15191 : :
15192 : 58912816 : rtx set = single_set (insn);
15193 : 58912816 : if (!set)
15194 : 3982802 : continue;
15195 : :
15196 : 54930014 : rtx dest = SET_DEST (set);
15197 : 54930014 : if (!(REG_P (dest) && !HARD_REGISTER_P (dest)))
15198 : 31409365 : continue;
15199 : :
15200 : 23520649 : rtx src = SET_SRC (set);
15201 : 23520649 : if (!(REG_P (src) && HARD_REGISTER_P (src)))
15202 : 20532553 : continue;
15203 : 2988096 : if (TEST_HARD_REG_BIT (fixed_reg_set, REGNO (src)))
15204 : 9498 : continue;
15205 : :
15206 : 2978598 : rtx new_reg = gen_reg_rtx (GET_MODE (dest));
15207 : :
15208 : : /* The "original" pseudo copies have important attributes
15209 : : attached, like pointerness. We want that for these copies
15210 : : too, for use by insn recognition and later passes. */
15211 : 2978598 : set_reg_attrs_from_value (new_reg, dest);
15212 : :
15213 : 2978598 : rtx_insn *new_insn = gen_move_insn (new_reg, src);
15214 : 2978598 : SET_SRC (set) = new_reg;
15215 : 2978598 : emit_insn_before (new_insn, insn);
15216 : 2978598 : df_insn_rescan (insn);
15217 : : }
15218 : : }
15219 : 1044865 : }
15220 : :
15221 : : /* Try combining insns through substitution. */
15222 : : static void
15223 : 1044865 : rest_of_handle_combine (void)
15224 : : {
15225 : 1044865 : make_more_copies ();
15226 : :
15227 : 1044865 : df_set_flags (DF_LR_RUN_DCE + DF_DEFER_INSN_RESCAN);
15228 : 1044865 : df_note_add_problem ();
15229 : 1044865 : df_analyze ();
15230 : :
15231 : 1044865 : regstat_init_n_sets_and_refs ();
15232 : 1044865 : reg_n_sets_max = max_reg_num ();
15233 : :
15234 : 1044865 : bool rebuild_jump_labels_after_combine
15235 : 1044865 : = combine_instructions (get_insns (), max_reg_num ());
15236 : :
15237 : : /* Combining insns may have turned an indirect jump into a
15238 : : direct jump. Rebuild the JUMP_LABEL fields of jumping
15239 : : instructions. */
15240 : 1044865 : if (rebuild_jump_labels_after_combine)
15241 : : {
15242 : 2506 : if (dom_info_available_p (CDI_DOMINATORS))
15243 : 0 : free_dominance_info (CDI_DOMINATORS);
15244 : 2506 : timevar_push (TV_JUMP);
15245 : 2506 : rebuild_jump_labels (get_insns ());
15246 : 2506 : cleanup_cfg (0);
15247 : 2506 : timevar_pop (TV_JUMP);
15248 : : }
15249 : :
15250 : 1044865 : regstat_free_n_sets_and_refs ();
15251 : 1044865 : }
15252 : :
15253 : : namespace {
15254 : :
15255 : : const pass_data pass_data_combine =
15256 : : {
15257 : : RTL_PASS, /* type */
15258 : : "combine", /* name */
15259 : : OPTGROUP_NONE, /* optinfo_flags */
15260 : : TV_COMBINE, /* tv_id */
15261 : : PROP_cfglayout, /* properties_required */
15262 : : 0, /* properties_provided */
15263 : : 0, /* properties_destroyed */
15264 : : 0, /* todo_flags_start */
15265 : : TODO_df_finish, /* todo_flags_finish */
15266 : : };
15267 : :
15268 : : class pass_combine : public rtl_opt_pass
15269 : : {
15270 : : public:
15271 : 286437 : pass_combine (gcc::context *ctxt)
15272 : 572874 : : rtl_opt_pass (pass_data_combine, ctxt)
15273 : : {}
15274 : :
15275 : : /* opt_pass methods: */
15276 : 1487143 : bool gate (function *) final override { return (optimize > 0); }
15277 : 1044865 : unsigned int execute (function *) final override
15278 : : {
15279 : 1044865 : rest_of_handle_combine ();
15280 : 1044865 : return 0;
15281 : : }
15282 : :
15283 : : }; // class pass_combine
15284 : :
15285 : : } // anon namespace
15286 : :
15287 : : rtl_opt_pass *
15288 : 286437 : make_pass_combine (gcc::context *ctxt)
15289 : : {
15290 : 286437 : return new pass_combine (ctxt);
15291 : : }
|