Branch data Line data Source code
1 : : /* Optimize by combining instructions for GNU compiler.
2 : : Copyright (C) 1987-2025 Free Software Foundation, Inc.
3 : :
4 : : This file is part of GCC.
5 : :
6 : : GCC is free software; you can redistribute it and/or modify it under
7 : : the terms of the GNU General Public License as published by the Free
8 : : Software Foundation; either version 3, or (at your option) any later
9 : : version.
10 : :
11 : : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : : for more details.
15 : :
16 : : You should have received a copy of the GNU General Public License
17 : : along with GCC; see the file COPYING3. If not see
18 : : <http://www.gnu.org/licenses/>. */
19 : :
20 : : /* This module is essentially the "combiner" phase of the U. of Arizona
21 : : Portable Optimizer, but redone to work on our list-structured
22 : : representation for RTL instead of their string representation.
23 : :
24 : : The LOG_LINKS of each insn identify the most recent assignment
25 : : to each REG used in the insn. It is a list of previous insns,
26 : : each of which contains a SET for a REG that is used in this insn
27 : : and not used or set in between. LOG_LINKs never cross basic blocks.
28 : : They were set up by the preceding pass (lifetime analysis).
29 : :
30 : : We try to combine each pair of insns joined by a logical link.
31 : : We also try to combine triplets of insns A, B and C when C has
32 : : a link back to B and B has a link back to A. Likewise for a
33 : : small number of quadruplets of insns A, B, C and D for which
34 : : there's high likelihood of success.
35 : :
36 : : We check (with modified_between_p) to avoid combining in such a way
37 : : as to move a computation to a place where its value would be different.
38 : :
39 : : Combination is done by mathematically substituting the previous
40 : : insn(s) values for the regs they set into the expressions in
41 : : the later insns that refer to these regs. If the result is a valid insn
42 : : for our target machine, according to the machine description,
43 : : we install it, delete the earlier insns, and update the data flow
44 : : information (LOG_LINKS and REG_NOTES) for what we did.
45 : :
46 : : There are a few exceptions where the dataflow information isn't
47 : : completely updated (however this is only a local issue since it is
48 : : regenerated before the next pass that uses it):
49 : :
50 : : - reg_live_length is not updated
51 : : - reg_n_refs is not adjusted in the rare case when a register is
52 : : no longer required in a computation
53 : : - there are extremely rare cases (see distribute_notes) when a
54 : : REG_DEAD note is lost
55 : : - a LOG_LINKS entry that refers to an insn with multiple SETs may be
56 : : removed because there is no way to know which register it was
57 : : linking
58 : :
59 : : To simplify substitution, we combine only when the earlier insn(s)
60 : : consist of only a single assignment. To simplify updating afterward,
61 : : we never combine when a subroutine call appears in the middle. */
62 : :
63 : : #include "config.h"
64 : : #include "system.h"
65 : : #include "coretypes.h"
66 : : #include "backend.h"
67 : : #include "target.h"
68 : : #include "rtl.h"
69 : : #include "tree.h"
70 : : #include "cfghooks.h"
71 : : #include "predict.h"
72 : : #include "df.h"
73 : : #include "memmodel.h"
74 : : #include "tm_p.h"
75 : : #include "optabs.h"
76 : : #include "regs.h"
77 : : #include "emit-rtl.h"
78 : : #include "recog.h"
79 : : #include "cgraph.h"
80 : : #include "stor-layout.h"
81 : : #include "cfgrtl.h"
82 : : #include "cfgcleanup.h"
83 : : /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
84 : : #include "explow.h"
85 : : #include "insn-attr.h"
86 : : #include "rtlhooks-def.h"
87 : : #include "expr.h"
88 : : #include "tree-pass.h"
89 : : #include "valtrack.h"
90 : : #include "rtl-iter.h"
91 : : #include "print-rtl.h"
92 : : #include "function-abi.h"
93 : : #include "rtlanal.h"
94 : :
95 : : /* Number of attempts to combine instructions in this function. */
96 : :
97 : : static int combine_attempts;
98 : :
99 : : /* Number of attempts that got as far as substitution in this function. */
100 : :
101 : : static int combine_merges;
102 : :
103 : : /* Number of instructions combined with added SETs in this function. */
104 : :
105 : : static int combine_extras;
106 : :
107 : : /* Number of instructions combined in this function. */
108 : :
109 : : static int combine_successes;
110 : :
111 : : /* combine_instructions may try to replace the right hand side of the
112 : : second instruction with the value of an associated REG_EQUAL note
113 : : before throwing it at try_combine. That is problematic when there
114 : : is a REG_DEAD note for a register used in the old right hand side
115 : : and can cause distribute_notes to do wrong things. This is the
116 : : second instruction if it has been so modified, null otherwise. */
117 : :
118 : : static rtx_insn *i2mod;
119 : :
120 : : /* When I2MOD is nonnull, this is a copy of the old right hand side. */
121 : :
122 : : static rtx i2mod_old_rhs;
123 : :
124 : : /* When I2MOD is nonnull, this is a copy of the new right hand side. */
125 : :
126 : : static rtx i2mod_new_rhs;
127 : :
128 : : struct reg_stat_type {
129 : : /* Record last point of death of (hard or pseudo) register n. */
130 : : rtx_insn *last_death;
131 : :
132 : : /* Record last point of modification of (hard or pseudo) register n. */
133 : : rtx_insn *last_set;
134 : :
135 : : /* The next group of fields allows the recording of the last value assigned
136 : : to (hard or pseudo) register n. We use this information to see if an
137 : : operation being processed is redundant given a prior operation performed
138 : : on the register. For example, an `and' with a constant is redundant if
139 : : all the zero bits are already known to be turned off.
140 : :
141 : : We use an approach similar to that used by cse, but change it in the
142 : : following ways:
143 : :
144 : : (1) We do not want to reinitialize at each label.
145 : : (2) It is useful, but not critical, to know the actual value assigned
146 : : to a register. Often just its form is helpful.
147 : :
148 : : Therefore, we maintain the following fields:
149 : :
150 : : last_set_value the last value assigned
151 : : last_set_label records the value of label_tick when the
152 : : register was assigned
153 : : last_set_table_tick records the value of label_tick when a
154 : : value using the register is assigned
155 : : last_set_invalid set to true when it is not valid
156 : : to use the value of this register in some
157 : : register's value
158 : :
159 : : To understand the usage of these tables, it is important to understand
160 : : the distinction between the value in last_set_value being valid and
161 : : the register being validly contained in some other expression in the
162 : : table.
163 : :
164 : : (The next two parameters are out of date).
165 : :
166 : : reg_stat[i].last_set_value is valid if it is nonzero, and either
167 : : reg_n_sets[i] is 1 or reg_stat[i].last_set_label == label_tick.
168 : :
169 : : Register I may validly appear in any expression returned for the value
170 : : of another register if reg_n_sets[i] is 1. It may also appear in the
171 : : value for register J if reg_stat[j].last_set_invalid is zero, or
172 : : reg_stat[i].last_set_label < reg_stat[j].last_set_label.
173 : :
174 : : If an expression is found in the table containing a register which may
175 : : not validly appear in an expression, the register is replaced by
176 : : something that won't match, (clobber (const_int 0)). */
177 : :
178 : : /* Record last value assigned to (hard or pseudo) register n. */
179 : :
180 : : rtx last_set_value;
181 : :
182 : : /* Record the value of label_tick when an expression involving register n
183 : : is placed in last_set_value. */
184 : :
185 : : int last_set_table_tick;
186 : :
187 : : /* Record the value of label_tick when the value for register n is placed in
188 : : last_set_value. */
189 : :
190 : : int last_set_label;
191 : :
192 : : /* These fields are maintained in parallel with last_set_value and are
193 : : used to store the mode in which the register was last set, the bits
194 : : that were known to be zero when it was last set, and the number of
195 : : sign bits copies it was known to have when it was last set. */
196 : :
197 : : unsigned HOST_WIDE_INT last_set_nonzero_bits;
198 : : char last_set_sign_bit_copies;
199 : : ENUM_BITFIELD(machine_mode) last_set_mode : MACHINE_MODE_BITSIZE;
200 : :
201 : : /* Set to true if references to register n in expressions should not be
202 : : used. last_set_invalid is set nonzero when this register is being
203 : : assigned to and last_set_table_tick == label_tick. */
204 : :
205 : : bool last_set_invalid;
206 : :
207 : : /* Some registers that are set more than once and used in more than one
208 : : basic block are nevertheless always set in similar ways. For example,
209 : : a QImode register may be loaded from memory in two places on a machine
210 : : where byte loads zero extend.
211 : :
212 : : We record in the following fields if a register has some leading bits
213 : : that are always equal to the sign bit, and what we know about the
214 : : nonzero bits of a register, specifically which bits are known to be
215 : : zero.
216 : :
217 : : If an entry is zero, it means that we don't know anything special. */
218 : :
219 : : unsigned char sign_bit_copies;
220 : :
221 : : unsigned HOST_WIDE_INT nonzero_bits;
222 : :
223 : : /* Record the value of the label_tick when the last truncation
224 : : happened. The field truncated_to_mode is only valid if
225 : : truncation_label == label_tick. */
226 : :
227 : : int truncation_label;
228 : :
229 : : /* Record the last truncation seen for this register. If truncation
230 : : is not a nop to this mode we might be able to save an explicit
231 : : truncation if we know that value already contains a truncated
232 : : value. */
233 : :
234 : : ENUM_BITFIELD(machine_mode) truncated_to_mode : MACHINE_MODE_BITSIZE;
235 : : };
236 : :
237 : :
238 : : static vec<reg_stat_type> reg_stat;
239 : :
240 : : /* One plus the highest pseudo for which we track REG_N_SETS.
241 : : regstat_init_n_sets_and_refs allocates the array for REG_N_SETS just once,
242 : : but during combine_split_insns new pseudos can be created. As we don't have
243 : : updated DF information in that case, it is hard to initialize the array
244 : : after growing. The combiner only cares about REG_N_SETS (regno) == 1,
245 : : so instead of growing the arrays, just assume all newly created pseudos
246 : : during combine might be set multiple times. */
247 : :
248 : : static unsigned int reg_n_sets_max;
249 : :
250 : : /* Record the luid of the last insn that invalidated memory
251 : : (anything that writes memory, and subroutine calls, but not pushes). */
252 : :
253 : : static int mem_last_set;
254 : :
255 : : /* Record the luid of the last CALL_INSN
256 : : so we can tell whether a potential combination crosses any calls. */
257 : :
258 : : static int last_call_luid;
259 : :
260 : : /* When `subst' is called, this is the insn that is being modified
261 : : (by combining in a previous insn). The PATTERN of this insn
262 : : is still the old pattern partially modified and it should not be
263 : : looked at, but this may be used to examine the successors of the insn
264 : : to judge whether a simplification is valid. */
265 : :
266 : : static rtx_insn *subst_insn;
267 : :
268 : : /* This is the lowest LUID that `subst' is currently dealing with.
269 : : get_last_value will not return a value if the register was set at or
270 : : after this LUID. If not for this mechanism, we could get confused if
271 : : I2 or I1 in try_combine were an insn that used the old value of a register
272 : : to obtain a new value. In that case, we might erroneously get the
273 : : new value of the register when we wanted the old one. */
274 : :
275 : : static int subst_low_luid;
276 : :
277 : : /* This contains any hard registers that are used in newpat; reg_dead_at_p
278 : : must consider all these registers to be always live. */
279 : :
280 : : static HARD_REG_SET newpat_used_regs;
281 : :
282 : : /* This is an insn to which a LOG_LINKS entry has been added. If this
283 : : insn is the earlier than I2 or I3, combine should rescan starting at
284 : : that location. */
285 : :
286 : : static rtx_insn *added_links_insn;
287 : :
288 : : /* And similarly, for notes. */
289 : :
290 : : static rtx_insn *added_notes_insn;
291 : :
292 : : /* Basic block in which we are performing combines. */
293 : : static basic_block this_basic_block;
294 : : static bool optimize_this_for_speed_p;
295 : :
296 : :
297 : : /* Length of the currently allocated uid_insn_cost array. */
298 : :
299 : : static int max_uid_known;
300 : :
301 : : /* The following array records the insn_cost for every insn
302 : : in the instruction stream. */
303 : :
304 : : static int *uid_insn_cost;
305 : :
306 : : /* The following array records the LOG_LINKS for every insn in the
307 : : instruction stream as struct insn_link pointers. */
308 : :
309 : : struct insn_link {
310 : : rtx_insn *insn;
311 : : unsigned int regno;
312 : : struct insn_link *next;
313 : : };
314 : :
315 : : static struct insn_link **uid_log_links;
316 : :
317 : : static inline int
318 : 712845746 : insn_uid_check (const_rtx insn)
319 : : {
320 : 712845746 : int uid = INSN_UID (insn);
321 : 712845746 : gcc_checking_assert (uid <= max_uid_known);
322 : 712845746 : return uid;
323 : : }
324 : :
325 : : #define INSN_COST(INSN) (uid_insn_cost[insn_uid_check (INSN)])
326 : : #define LOG_LINKS(INSN) (uid_log_links[insn_uid_check (INSN)])
327 : :
328 : : #define FOR_EACH_LOG_LINK(L, INSN) \
329 : : for ((L) = LOG_LINKS (INSN); (L); (L) = (L)->next)
330 : :
331 : : /* Links for LOG_LINKS are allocated from this obstack. */
332 : :
333 : : static struct obstack insn_link_obstack;
334 : :
335 : : /* Allocate a link. */
336 : :
337 : : static inline struct insn_link *
338 : 36410791 : alloc_insn_link (rtx_insn *insn, unsigned int regno, struct insn_link *next)
339 : : {
340 : 36410791 : struct insn_link *l
341 : 36410791 : = (struct insn_link *) obstack_alloc (&insn_link_obstack,
342 : : sizeof (struct insn_link));
343 : 36410791 : l->insn = insn;
344 : 36410791 : l->regno = regno;
345 : 36410791 : l->next = next;
346 : 36410791 : return l;
347 : : }
348 : :
349 : : /* Incremented for each basic block. */
350 : :
351 : : static int label_tick;
352 : :
353 : : /* Reset to label_tick for each extended basic block in scanning order. */
354 : :
355 : : static int label_tick_ebb_start;
356 : :
357 : : /* Mode used to compute significance in reg_stat[].nonzero_bits. It is the
358 : : largest integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
359 : :
360 : : static scalar_int_mode nonzero_bits_mode;
361 : :
362 : : /* Nonzero when reg_stat[].nonzero_bits and reg_stat[].sign_bit_copies can
363 : : be safely used. It is zero while computing them and after combine has
364 : : completed. This former test prevents propagating values based on
365 : : previously set values, which can be incorrect if a variable is modified
366 : : in a loop. */
367 : :
368 : : static int nonzero_sign_valid;
369 : :
370 : :
371 : : /* Record one modification to rtl structure
372 : : to be undone by storing old_contents into *where. */
373 : :
374 : : enum undo_kind { UNDO_RTX, UNDO_INT, UNDO_MODE, UNDO_LINKS };
375 : :
376 : : struct undo
377 : : {
378 : : struct undo *next;
379 : : enum undo_kind kind;
380 : : union { rtx r; int i; machine_mode m; struct insn_link *l; } old_contents;
381 : : union { rtx *r; int *i; int regno; struct insn_link **l; } where;
382 : : };
383 : :
384 : : /* Record a bunch of changes to be undone, up to MAX_UNDO of them.
385 : : num_undo says how many are currently recorded.
386 : :
387 : : other_insn is nonzero if we have modified some other insn in the process
388 : : of working on subst_insn. It must be verified too. */
389 : :
390 : : struct undobuf
391 : : {
392 : : struct undo *undos;
393 : : struct undo *frees;
394 : : rtx_insn *other_insn;
395 : : };
396 : :
397 : : static struct undobuf undobuf;
398 : :
399 : : /* Number of times the pseudo being substituted for
400 : : was found and replaced. */
401 : :
402 : : static int n_occurrences;
403 : :
404 : : static rtx reg_nonzero_bits_for_combine (const_rtx, scalar_int_mode,
405 : : scalar_int_mode,
406 : : unsigned HOST_WIDE_INT *);
407 : : static rtx reg_num_sign_bit_copies_for_combine (const_rtx, scalar_int_mode,
408 : : scalar_int_mode,
409 : : unsigned int *);
410 : : static void do_SUBST (rtx *, rtx);
411 : : static void do_SUBST_INT (int *, int);
412 : : static void init_reg_last (void);
413 : : static void setup_incoming_promotions (rtx_insn *);
414 : : static void set_nonzero_bits_and_sign_copies (rtx, const_rtx, void *);
415 : : static bool cant_combine_insn_p (rtx_insn *);
416 : : static bool can_combine_p (rtx_insn *, rtx_insn *, rtx_insn *, rtx_insn *,
417 : : rtx_insn *, rtx_insn *, rtx *, rtx *);
418 : : static bool combinable_i3pat (rtx_insn *, rtx *, rtx, rtx, rtx,
419 : : bool, bool, rtx *);
420 : : static bool contains_muldiv (rtx);
421 : : static rtx_insn *try_combine (rtx_insn *, rtx_insn *, rtx_insn *, rtx_insn *,
422 : : bool *, rtx_insn *);
423 : : static void undo_all (void);
424 : : static void undo_commit (void);
425 : : static rtx *find_split_point (rtx *, rtx_insn *, bool);
426 : : static rtx subst (rtx, rtx, rtx, bool, bool, bool);
427 : : static rtx combine_simplify_rtx (rtx, machine_mode, bool, bool);
428 : : static rtx simplify_if_then_else (rtx);
429 : : static rtx simplify_set (rtx);
430 : : static rtx simplify_logical (rtx);
431 : : static rtx expand_compound_operation (rtx);
432 : : static const_rtx expand_field_assignment (const_rtx);
433 : : static rtx make_extraction (machine_mode, rtx, HOST_WIDE_INT, rtx,
434 : : unsigned HOST_WIDE_INT, bool, bool, bool);
435 : : static int get_pos_from_mask (unsigned HOST_WIDE_INT,
436 : : unsigned HOST_WIDE_INT *);
437 : : static rtx canon_reg_for_combine (rtx, rtx);
438 : : static rtx force_int_to_mode (rtx, scalar_int_mode, scalar_int_mode,
439 : : scalar_int_mode, unsigned HOST_WIDE_INT, bool);
440 : : static rtx force_to_mode (rtx, machine_mode,
441 : : unsigned HOST_WIDE_INT, bool);
442 : : static rtx if_then_else_cond (rtx, rtx *, rtx *);
443 : : static rtx known_cond (rtx, enum rtx_code, rtx, rtx);
444 : : static bool rtx_equal_for_field_assignment_p (rtx, rtx, bool = false);
445 : : static rtx make_field_assignment (rtx);
446 : : static rtx apply_distributive_law (rtx);
447 : : static rtx distribute_and_simplify_rtx (rtx, int);
448 : : static rtx simplify_and_const_int_1 (scalar_int_mode, rtx,
449 : : unsigned HOST_WIDE_INT);
450 : : static rtx simplify_and_const_int (rtx, scalar_int_mode, rtx,
451 : : unsigned HOST_WIDE_INT);
452 : : static bool merge_outer_ops (enum rtx_code *, HOST_WIDE_INT *, enum rtx_code,
453 : : HOST_WIDE_INT, machine_mode, bool *);
454 : : static rtx simplify_shift_const_1 (enum rtx_code, machine_mode, rtx, int);
455 : : static rtx simplify_shift_const (rtx, enum rtx_code, machine_mode, rtx,
456 : : int);
457 : : static int recog_for_combine (rtx *, rtx_insn *, rtx *, unsigned = 0, unsigned = 0);
458 : : static rtx gen_lowpart_for_combine (machine_mode, rtx);
459 : : static enum rtx_code simplify_compare_const (enum rtx_code, machine_mode,
460 : : rtx *, rtx *);
461 : : static enum rtx_code simplify_comparison (enum rtx_code, rtx *, rtx *);
462 : : static void update_table_tick (rtx);
463 : : static void record_value_for_reg (rtx, rtx_insn *, rtx);
464 : : static void check_promoted_subreg (rtx_insn *, rtx);
465 : : static void record_dead_and_set_regs_1 (rtx, const_rtx, void *);
466 : : static void record_dead_and_set_regs (rtx_insn *);
467 : : static bool get_last_value_validate (rtx *, rtx_insn *, int, bool);
468 : : static rtx get_last_value (const_rtx);
469 : : static void reg_dead_at_p_1 (rtx, const_rtx, void *);
470 : : static bool reg_dead_at_p (rtx, rtx_insn *);
471 : : static void move_deaths (rtx, rtx, int, rtx_insn *, rtx *);
472 : : static bool reg_bitfield_target_p (rtx, rtx);
473 : : static void distribute_notes (rtx, rtx_insn *, rtx_insn *, rtx_insn *,
474 : : rtx, rtx, rtx);
475 : : static void distribute_links (struct insn_link *);
476 : : static void mark_used_regs_combine (rtx);
477 : : static void record_promoted_value (rtx_insn *, rtx);
478 : : static bool unmentioned_reg_p (rtx, rtx);
479 : : static void record_truncated_values (rtx *, void *);
480 : : static bool reg_truncated_to_mode (machine_mode, const_rtx);
481 : : static rtx gen_lowpart_or_truncate (machine_mode, rtx);
482 : :
483 : :
484 : : /* It is not safe to use ordinary gen_lowpart in combine.
485 : : See comments in gen_lowpart_for_combine. */
486 : : #undef RTL_HOOKS_GEN_LOWPART
487 : : #define RTL_HOOKS_GEN_LOWPART gen_lowpart_for_combine
488 : :
489 : : /* Our implementation of gen_lowpart never emits a new pseudo. */
490 : : #undef RTL_HOOKS_GEN_LOWPART_NO_EMIT
491 : : #define RTL_HOOKS_GEN_LOWPART_NO_EMIT gen_lowpart_for_combine
492 : :
493 : : #undef RTL_HOOKS_REG_NONZERO_REG_BITS
494 : : #define RTL_HOOKS_REG_NONZERO_REG_BITS reg_nonzero_bits_for_combine
495 : :
496 : : #undef RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES
497 : : #define RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES reg_num_sign_bit_copies_for_combine
498 : :
499 : : #undef RTL_HOOKS_REG_TRUNCATED_TO_MODE
500 : : #define RTL_HOOKS_REG_TRUNCATED_TO_MODE reg_truncated_to_mode
501 : :
502 : : static const struct rtl_hooks combine_rtl_hooks = RTL_HOOKS_INITIALIZER;
503 : :
504 : :
505 : : /* Convenience wrapper for the canonicalize_comparison target hook.
506 : : Target hooks cannot use enum rtx_code. */
507 : : static inline void
508 : 21342621 : target_canonicalize_comparison (enum rtx_code *code, rtx *op0, rtx *op1,
509 : : bool op0_preserve_value)
510 : : {
511 : 21342621 : int code_int = (int)*code;
512 : 21342621 : targetm.canonicalize_comparison (&code_int, op0, op1, op0_preserve_value);
513 : 21342621 : *code = (enum rtx_code)code_int;
514 : 753005 : }
515 : :
516 : : /* Try to split PATTERN found in INSN. This returns NULL_RTX if
517 : : PATTERN cannot be split. Otherwise, it returns an insn sequence.
518 : : Updates OLD_NREGS with the max number of regs before the split
519 : : and NEW_NREGS after the split.
520 : : This is a wrapper around split_insns which ensures that the
521 : : reg_stat vector is made larger if the splitter creates a new
522 : : register. */
523 : :
524 : : static rtx_insn *
525 : 11048790 : combine_split_insns (rtx pattern, rtx_insn *insn,
526 : : unsigned int *old_nregs,
527 : : unsigned int *new_regs)
528 : : {
529 : 11048790 : rtx_insn *ret;
530 : 11048790 : unsigned int nregs;
531 : 11048790 : *old_nregs = max_reg_num ();
532 : 11048790 : ret = split_insns (pattern, insn);
533 : 11048790 : *new_regs = nregs = max_reg_num ();
534 : 22097580 : if (nregs > reg_stat.length ())
535 : 3422 : reg_stat.safe_grow_cleared (nregs, true);
536 : 11048790 : return ret;
537 : : }
538 : :
539 : : /* This is used by find_single_use to locate an rtx in LOC that
540 : : contains exactly one use of DEST, which is typically a REG.
541 : : It returns a pointer to the innermost rtx expression
542 : : containing DEST. Appearances of DEST that are being used to
543 : : totally replace it are not counted. */
544 : :
545 : : static rtx *
546 : 30186342 : find_single_use_1 (rtx dest, rtx *loc)
547 : : {
548 : 36521156 : rtx x = *loc;
549 : 36521156 : enum rtx_code code = GET_CODE (x);
550 : 36521156 : rtx *result = NULL;
551 : 36521156 : rtx *this_result;
552 : 36521156 : int i;
553 : 36521156 : const char *fmt;
554 : :
555 : 36521156 : switch (code)
556 : : {
557 : : case CONST:
558 : : case LABEL_REF:
559 : : case SYMBOL_REF:
560 : : CASE_CONST_ANY:
561 : : case CLOBBER:
562 : : return 0;
563 : :
564 : 6291000 : case SET:
565 : : /* If the destination is anything other than PC, a REG or a SUBREG
566 : : of a REG that occupies all of the REG, the insn uses DEST if
567 : : it is mentioned in the destination or the source. Otherwise, we
568 : : need just check the source. */
569 : 6291000 : if (GET_CODE (SET_DEST (x)) != PC
570 : 6291000 : && !REG_P (SET_DEST (x))
571 : 6291334 : && ! (GET_CODE (SET_DEST (x)) == SUBREG
572 : 334 : && REG_P (SUBREG_REG (SET_DEST (x)))
573 : 334 : && !read_modify_subreg_p (SET_DEST (x))))
574 : : break;
575 : :
576 : 6289888 : return find_single_use_1 (dest, &SET_SRC (x));
577 : :
578 : 44926 : case MEM:
579 : 44926 : case SUBREG:
580 : 44926 : return find_single_use_1 (dest, &XEXP (x, 0));
581 : :
582 : : default:
583 : : break;
584 : : }
585 : :
586 : : /* If it wasn't one of the common cases above, check each expression and
587 : : vector of this code. Look for a unique usage of DEST. */
588 : :
589 : 18191875 : fmt = GET_RTX_FORMAT (code);
590 : 48610069 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
591 : : {
592 : 30427254 : if (fmt[i] == 'e')
593 : : {
594 : 30089949 : if (dest == XEXP (x, i)
595 : 30089949 : || (REG_P (dest) && REG_P (XEXP (x, i))
596 : 752525 : && REGNO (dest) == REGNO (XEXP (x, i))))
597 : : this_result = loc;
598 : : else
599 : 23794278 : this_result = find_single_use_1 (dest, &XEXP (x, i));
600 : :
601 : 30089949 : if (result == NULL)
602 : : result = this_result;
603 : 42758 : else if (this_result)
604 : : /* Duplicate usage. */
605 : : return NULL;
606 : : }
607 : 337305 : else if (fmt[i] == 'E')
608 : : {
609 : 51908 : int j;
610 : :
611 : 158027 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
612 : : {
613 : 109940 : if (XVECEXP (x, i, j) == dest
614 : 109940 : || (REG_P (dest)
615 : 109940 : && REG_P (XVECEXP (x, i, j))
616 : 4441 : && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
617 : : this_result = loc;
618 : : else
619 : 109940 : this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
620 : :
621 : 109940 : if (result == NULL)
622 : : result = this_result;
623 : 18268 : else if (this_result)
624 : : return NULL;
625 : : }
626 : : }
627 : : }
628 : :
629 : : return result;
630 : : }
631 : :
632 : :
633 : : /* See if DEST, produced in INSN, is used only a single time in the
634 : : sequel. If so, return a pointer to the innermost rtx expression in which
635 : : it is used.
636 : :
637 : : If PLOC is nonzero, *PLOC is set to the insn containing the single use.
638 : :
639 : : Otherwise, we find the single use by finding an insn that has a
640 : : LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
641 : : only referenced once in that insn, we know that it must be the first
642 : : and last insn referencing DEST. */
643 : :
644 : : static rtx *
645 : 6740395 : find_single_use (rtx dest, rtx_insn *insn, rtx_insn **ploc)
646 : : {
647 : 6740395 : basic_block bb;
648 : 6740395 : rtx_insn *next;
649 : 6740395 : rtx *result;
650 : 6740395 : struct insn_link *link;
651 : :
652 : 6740395 : if (!REG_P (dest))
653 : : return 0;
654 : :
655 : 6740395 : bb = BLOCK_FOR_INSN (insn);
656 : 8843113 : for (next = NEXT_INSN (insn);
657 : 8843113 : next && BLOCK_FOR_INSN (next) == bb;
658 : 2102718 : next = NEXT_INSN (next))
659 : 8384842 : if (NONDEBUG_INSN_P (next) && dead_or_set_p (next, dest))
660 : : {
661 : 8207901 : FOR_EACH_LOG_LINK (link, next)
662 : 7352856 : if (link->insn == insn && link->regno == REGNO (dest))
663 : : break;
664 : :
665 : 7137169 : if (link)
666 : : {
667 : 6282124 : result = find_single_use_1 (dest, &PATTERN (next));
668 : 6282124 : if (ploc)
669 : 6282124 : *ploc = next;
670 : 6282124 : return result;
671 : : }
672 : : }
673 : :
674 : : return 0;
675 : : }
676 : :
677 : : /* Substitute NEWVAL, an rtx expression, into INTO, a place in some
678 : : insn. The substitution can be undone by undo_all. If INTO is already
679 : : set to NEWVAL, do not record this change. Because computing NEWVAL might
680 : : also call SUBST, we have to compute it before we put anything into
681 : : the undo table. */
682 : :
683 : : static void
684 : 772546346 : do_SUBST (rtx *into, rtx newval)
685 : : {
686 : 772546346 : struct undo *buf;
687 : 772546346 : rtx oldval = *into;
688 : :
689 : 772546346 : if (oldval == newval)
690 : : return;
691 : :
692 : : /* We'd like to catch as many invalid transformations here as
693 : : possible. Unfortunately, there are way too many mode changes
694 : : that are perfectly valid, so we'd waste too much effort for
695 : : little gain doing the checks here. Focus on catching invalid
696 : : transformations involving integer constants. */
697 : 88314905 : if (GET_MODE_CLASS (GET_MODE (oldval)) == MODE_INT
698 : 54654177 : && CONST_INT_P (newval))
699 : : {
700 : : /* Sanity check that we're replacing oldval with a CONST_INT
701 : : that is a valid sign-extension for the original mode. */
702 : 1786733 : gcc_assert (INTVAL (newval)
703 : : == trunc_int_for_mode (INTVAL (newval), GET_MODE (oldval)));
704 : :
705 : : /* Replacing the operand of a SUBREG or a ZERO_EXTEND with a
706 : : CONST_INT is not valid, because after the replacement, the
707 : : original mode would be gone. Unfortunately, we can't tell
708 : : when do_SUBST is called to replace the operand thereof, so we
709 : : perform this test on oldval instead, checking whether an
710 : : invalid replacement took place before we got here. */
711 : 1786733 : gcc_assert (!(GET_CODE (oldval) == SUBREG
712 : : && CONST_INT_P (SUBREG_REG (oldval))));
713 : 1786733 : gcc_assert (!(GET_CODE (oldval) == ZERO_EXTEND
714 : : && CONST_INT_P (XEXP (oldval, 0))));
715 : : }
716 : :
717 : 88314905 : if (undobuf.frees)
718 : 84401503 : buf = undobuf.frees, undobuf.frees = buf->next;
719 : : else
720 : 3913402 : buf = XNEW (struct undo);
721 : :
722 : 88314905 : buf->kind = UNDO_RTX;
723 : 88314905 : buf->where.r = into;
724 : 88314905 : buf->old_contents.r = oldval;
725 : 88314905 : *into = newval;
726 : :
727 : 88314905 : buf->next = undobuf.undos, undobuf.undos = buf;
728 : : }
729 : :
730 : : #define SUBST(INTO, NEWVAL) do_SUBST (&(INTO), (NEWVAL))
731 : :
732 : : /* Similar to SUBST, but NEWVAL is an int expression. Note that substitution
733 : : for the value of a HOST_WIDE_INT value (including CONST_INT) is
734 : : not safe. */
735 : :
736 : : static void
737 : 14756593 : do_SUBST_INT (int *into, int newval)
738 : : {
739 : 14756593 : struct undo *buf;
740 : 14756593 : int oldval = *into;
741 : :
742 : 14756593 : if (oldval == newval)
743 : : return;
744 : :
745 : 6515908 : if (undobuf.frees)
746 : 6018349 : buf = undobuf.frees, undobuf.frees = buf->next;
747 : : else
748 : 497559 : buf = XNEW (struct undo);
749 : :
750 : 6515908 : buf->kind = UNDO_INT;
751 : 6515908 : buf->where.i = into;
752 : 6515908 : buf->old_contents.i = oldval;
753 : 6515908 : *into = newval;
754 : :
755 : 6515908 : buf->next = undobuf.undos, undobuf.undos = buf;
756 : : }
757 : :
758 : : #define SUBST_INT(INTO, NEWVAL) do_SUBST_INT (&(INTO), (NEWVAL))
759 : :
760 : : /* Similar to SUBST, but just substitute the mode. This is used when
761 : : changing the mode of a pseudo-register, so that any other
762 : : references to the entry in the regno_reg_rtx array will change as
763 : : well. */
764 : :
765 : : static void
766 : 1362778 : subst_mode (int regno, machine_mode newval)
767 : : {
768 : 1362778 : struct undo *buf;
769 : 1362778 : rtx reg = regno_reg_rtx[regno];
770 : 1362778 : machine_mode oldval = GET_MODE (reg);
771 : :
772 : 1362778 : if (oldval == newval)
773 : : return;
774 : :
775 : 1362778 : if (undobuf.frees)
776 : 1288652 : buf = undobuf.frees, undobuf.frees = buf->next;
777 : : else
778 : 74126 : buf = XNEW (struct undo);
779 : :
780 : 1362778 : buf->kind = UNDO_MODE;
781 : 1362778 : buf->where.regno = regno;
782 : 1362778 : buf->old_contents.m = oldval;
783 : 1362778 : adjust_reg_mode (reg, newval);
784 : :
785 : 1362778 : buf->next = undobuf.undos, undobuf.undos = buf;
786 : : }
787 : :
788 : : /* Similar to SUBST, but NEWVAL is a LOG_LINKS expression. */
789 : :
790 : : static void
791 : 63428 : do_SUBST_LINK (struct insn_link **into, struct insn_link *newval)
792 : : {
793 : 63428 : struct undo *buf;
794 : 63428 : struct insn_link * oldval = *into;
795 : :
796 : 63428 : if (oldval == newval)
797 : : return;
798 : :
799 : 63428 : if (undobuf.frees)
800 : 60664 : buf = undobuf.frees, undobuf.frees = buf->next;
801 : : else
802 : 2764 : buf = XNEW (struct undo);
803 : :
804 : 63428 : buf->kind = UNDO_LINKS;
805 : 63428 : buf->where.l = into;
806 : 63428 : buf->old_contents.l = oldval;
807 : 63428 : *into = newval;
808 : :
809 : 63428 : buf->next = undobuf.undos, undobuf.undos = buf;
810 : : }
811 : :
812 : : #define SUBST_LINK(oldval, newval) do_SUBST_LINK (&oldval, newval)
813 : :
814 : : /* Subroutine of try_combine. Determine whether the replacement patterns
815 : : NEWPAT, NEWI2PAT and NEWOTHERPAT are cheaper according to insn_cost
816 : : than the original sequence I0, I1, I2, I3 and undobuf.other_insn. Note
817 : : that I0, I1 and/or NEWI2PAT may be NULL_RTX. Similarly, NEWOTHERPAT and
818 : : undobuf.other_insn may also both be NULL_RTX. Return false if the cost
819 : : of all the instructions can be estimated and the replacements are more
820 : : expensive than the original sequence. */
821 : :
822 : : static bool
823 : 3945811 : combine_validate_cost (rtx_insn *i0, rtx_insn *i1, rtx_insn *i2, rtx_insn *i3,
824 : : rtx newpat, rtx newi2pat, rtx newotherpat)
825 : : {
826 : 3945811 : int i0_cost, i1_cost, i2_cost, i3_cost;
827 : 3945811 : int new_i2_cost, new_i3_cost;
828 : 3945811 : int old_cost, new_cost;
829 : :
830 : : /* Lookup the original insn_costs. */
831 : 3945811 : i2_cost = INSN_COST (i2);
832 : 3945811 : i3_cost = INSN_COST (i3);
833 : :
834 : 3945811 : if (i1)
835 : : {
836 : 112960 : i1_cost = INSN_COST (i1);
837 : 112960 : if (i0)
838 : : {
839 : 5393 : i0_cost = INSN_COST (i0);
840 : 5259 : old_cost = (i0_cost > 0 && i1_cost > 0 && i2_cost > 0 && i3_cost > 0
841 : 10641 : ? i0_cost + i1_cost + i2_cost + i3_cost : 0);
842 : : }
843 : : else
844 : : {
845 : 101789 : old_cost = (i1_cost > 0 && i2_cost > 0 && i3_cost > 0
846 : 209354 : ? i1_cost + i2_cost + i3_cost : 0);
847 : : i0_cost = 0;
848 : : }
849 : : }
850 : : else
851 : : {
852 : 3832851 : old_cost = (i2_cost > 0 && i3_cost > 0) ? i2_cost + i3_cost : 0;
853 : : i1_cost = i0_cost = 0;
854 : : }
855 : :
856 : : /* If we have split a PARALLEL I2 to I1,I2, we have counted its cost twice;
857 : : correct that. */
858 : 3945811 : if (old_cost && i1 && INSN_UID (i1) == INSN_UID (i2))
859 : 1222 : old_cost -= i1_cost;
860 : :
861 : :
862 : : /* Calculate the replacement insn_costs. */
863 : 3945811 : rtx tmp = PATTERN (i3);
864 : 3945811 : PATTERN (i3) = newpat;
865 : 3945811 : int tmpi = INSN_CODE (i3);
866 : 3945811 : INSN_CODE (i3) = -1;
867 : 3945811 : new_i3_cost = insn_cost (i3, optimize_this_for_speed_p);
868 : 3945811 : PATTERN (i3) = tmp;
869 : 3945811 : INSN_CODE (i3) = tmpi;
870 : 3945811 : if (newi2pat)
871 : : {
872 : 201357 : tmp = PATTERN (i2);
873 : 201357 : PATTERN (i2) = newi2pat;
874 : 201357 : tmpi = INSN_CODE (i2);
875 : 201357 : INSN_CODE (i2) = -1;
876 : 201357 : new_i2_cost = insn_cost (i2, optimize_this_for_speed_p);
877 : 201357 : PATTERN (i2) = tmp;
878 : 201357 : INSN_CODE (i2) = tmpi;
879 : 201357 : new_cost = (new_i2_cost > 0 && new_i3_cost > 0)
880 : 201357 : ? new_i2_cost + new_i3_cost : 0;
881 : : }
882 : : else
883 : : {
884 : : new_cost = new_i3_cost;
885 : : new_i2_cost = 0;
886 : : }
887 : :
888 : 3945811 : if (undobuf.other_insn)
889 : : {
890 : 202000 : int old_other_cost, new_other_cost;
891 : :
892 : 202000 : old_other_cost = INSN_COST (undobuf.other_insn);
893 : 202000 : tmp = PATTERN (undobuf.other_insn);
894 : 202000 : PATTERN (undobuf.other_insn) = newotherpat;
895 : 202000 : tmpi = INSN_CODE (undobuf.other_insn);
896 : 202000 : INSN_CODE (undobuf.other_insn) = -1;
897 : 202000 : new_other_cost = insn_cost (undobuf.other_insn,
898 : : optimize_this_for_speed_p);
899 : 202000 : PATTERN (undobuf.other_insn) = tmp;
900 : 202000 : INSN_CODE (undobuf.other_insn) = tmpi;
901 : 202000 : if (old_other_cost > 0 && new_other_cost > 0)
902 : : {
903 : 202000 : old_cost += old_other_cost;
904 : 202000 : new_cost += new_other_cost;
905 : : }
906 : : else
907 : : old_cost = 0;
908 : : }
909 : :
910 : : /* Disallow this combination if both new_cost and old_cost are greater than
911 : : zero, and new_cost is greater than old cost. */
912 : 3945811 : bool reject = old_cost > 0 && new_cost > old_cost;
913 : :
914 : 3945811 : if (dump_file)
915 : : {
916 : 484 : fprintf (dump_file, "%s combination of insns ",
917 : : reject ? "rejecting" : "allowing");
918 : 244 : if (i0)
919 : 0 : fprintf (dump_file, "%d, ", INSN_UID (i0));
920 : 244 : if (i1 && INSN_UID (i1) != INSN_UID (i2))
921 : 1 : fprintf (dump_file, "%d, ", INSN_UID (i1));
922 : 244 : fprintf (dump_file, "%d and %d\n", INSN_UID (i2), INSN_UID (i3));
923 : :
924 : 244 : fprintf (dump_file, "original costs ");
925 : 244 : if (i0)
926 : 0 : fprintf (dump_file, "%d + ", i0_cost);
927 : 244 : if (i1 && INSN_UID (i1) != INSN_UID (i2))
928 : 1 : fprintf (dump_file, "%d + ", i1_cost);
929 : 244 : fprintf (dump_file, "%d + %d = %d\n", i2_cost, i3_cost, old_cost);
930 : :
931 : 244 : if (newi2pat)
932 : 19 : fprintf (dump_file, "replacement costs %d + %d = %d\n",
933 : : new_i2_cost, new_i3_cost, new_cost);
934 : : else
935 : 225 : fprintf (dump_file, "replacement cost %d\n", new_cost);
936 : : }
937 : :
938 : 3945811 : if (reject)
939 : : return false;
940 : :
941 : : /* Update the uid_insn_cost array with the replacement costs. */
942 : 3739565 : INSN_COST (i2) = new_i2_cost;
943 : 3739565 : INSN_COST (i3) = new_i3_cost;
944 : 3739565 : if (i1)
945 : : {
946 : 95831 : INSN_COST (i1) = 0;
947 : 95831 : if (i0)
948 : 5264 : INSN_COST (i0) = 0;
949 : : }
950 : :
951 : : return true;
952 : : }
953 : :
954 : :
955 : : /* Delete any insns that copy a register to itself.
956 : : Return true if the CFG was changed. */
957 : :
958 : : static bool
959 : 965627 : delete_noop_moves (void)
960 : : {
961 : 965627 : rtx_insn *insn, *next;
962 : 965627 : basic_block bb;
963 : :
964 : 965627 : bool edges_deleted = false;
965 : :
966 : 10685306 : FOR_EACH_BB_FN (bb, cfun)
967 : : {
968 : 125670504 : for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb)); insn = next)
969 : : {
970 : 115950825 : next = NEXT_INSN (insn);
971 : 115950825 : if (INSN_P (insn) && noop_move_p (insn))
972 : : {
973 : 5661 : if (dump_file)
974 : 0 : fprintf (dump_file, "deleting noop move %d\n", INSN_UID (insn));
975 : :
976 : 5661 : edges_deleted |= delete_insn_and_edges (insn);
977 : : }
978 : : }
979 : : }
980 : :
981 : 965627 : return edges_deleted;
982 : : }
983 : :
984 : :
985 : : /* Return false if we do not want to (or cannot) combine DEF. */
986 : : static bool
987 : 40120982 : can_combine_def_p (df_ref def)
988 : : {
989 : : /* Do not consider if it is pre/post modification in MEM. */
990 : 40120982 : if (DF_REF_FLAGS (def) & DF_REF_PRE_POST_MODIFY)
991 : : return false;
992 : :
993 : 38487351 : unsigned int regno = DF_REF_REGNO (def);
994 : :
995 : : /* Do not combine frame pointer adjustments. */
996 : 38487351 : if ((regno == FRAME_POINTER_REGNUM
997 : 0 : && (!reload_completed || frame_pointer_needed))
998 : 2062 : || (!HARD_FRAME_POINTER_IS_FRAME_POINTER
999 : 38487351 : && regno == HARD_FRAME_POINTER_REGNUM
1000 : : && (!reload_completed || frame_pointer_needed))
1001 : 38485289 : || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1002 : 0 : && regno == ARG_POINTER_REGNUM && fixed_regs[regno]))
1003 : 2062 : return false;
1004 : :
1005 : : return true;
1006 : : }
1007 : :
1008 : : /* Return false if we do not want to (or cannot) combine USE. */
1009 : : static bool
1010 : 72869521 : can_combine_use_p (df_ref use)
1011 : : {
1012 : : /* Do not consider the usage of the stack pointer by function call. */
1013 : 0 : if (DF_REF_FLAGS (use) & DF_REF_CALL_STACK_USAGE)
1014 : 0 : return false;
1015 : :
1016 : : return true;
1017 : : }
1018 : :
1019 : : /* Fill in log links field for all insns. */
1020 : :
1021 : : static void
1022 : 965627 : create_log_links (void)
1023 : : {
1024 : 965627 : basic_block bb;
1025 : 965627 : rtx_insn **next_use;
1026 : 965627 : rtx_insn *insn;
1027 : 965627 : df_ref def, use;
1028 : :
1029 : 965627 : next_use = XCNEWVEC (rtx_insn *, max_reg_num ());
1030 : :
1031 : : /* Pass through each block from the end, recording the uses of each
1032 : : register and establishing log links when def is encountered.
1033 : : Note that we do not clear next_use array in order to save time,
1034 : : so we have to test whether the use is in the same basic block as def.
1035 : :
1036 : : There are a few cases below when we do not consider the definition or
1037 : : usage -- these are taken from original flow.c did. Don't ask me why it is
1038 : : done this way; I don't know and if it works, I don't want to know. */
1039 : :
1040 : 10685306 : FOR_EACH_BB_FN (bb, cfun)
1041 : : {
1042 : 125655487 : FOR_BB_INSNS_REVERSE (bb, insn)
1043 : : {
1044 : 115935808 : if (!NONDEBUG_INSN_P (insn))
1045 : 57196513 : continue;
1046 : :
1047 : : /* Log links are created only once. */
1048 : 58739295 : gcc_assert (!LOG_LINKS (insn));
1049 : :
1050 : 476613628 : FOR_EACH_INSN_DEF (def, insn)
1051 : : {
1052 : 417874333 : unsigned int regno = DF_REF_REGNO (def);
1053 : 417874333 : rtx_insn *use_insn;
1054 : :
1055 : 417874333 : if (!next_use[regno])
1056 : 377753351 : continue;
1057 : :
1058 : 40120982 : if (!can_combine_def_p (def))
1059 : 1635693 : continue;
1060 : :
1061 : 38485289 : use_insn = next_use[regno];
1062 : 38485289 : next_use[regno] = NULL;
1063 : :
1064 : 38485289 : if (BLOCK_FOR_INSN (use_insn) != bb)
1065 : 2153232 : continue;
1066 : :
1067 : : /* flow.c claimed:
1068 : :
1069 : : We don't build a LOG_LINK for hard registers contained
1070 : : in ASM_OPERANDs. If these registers get replaced,
1071 : : we might wind up changing the semantics of the insn,
1072 : : even if reload can make what appear to be valid
1073 : : assignments later. */
1074 : 36332908 : if (regno < FIRST_PSEUDO_REGISTER
1075 : 36332057 : && asm_noperands (PATTERN (use_insn)) >= 0)
1076 : 851 : continue;
1077 : :
1078 : : /* Don't add duplicate links between instructions. */
1079 : 36331206 : struct insn_link *links;
1080 : 48900171 : FOR_EACH_LOG_LINK (links, use_insn)
1081 : 12568965 : if (insn == links->insn && regno == links->regno)
1082 : : break;
1083 : :
1084 : 36331206 : if (!links)
1085 : 36331206 : LOG_LINKS (use_insn)
1086 : 72662412 : = alloc_insn_link (insn, regno, LOG_LINKS (use_insn));
1087 : : }
1088 : :
1089 : 131608816 : FOR_EACH_INSN_USE (use, insn)
1090 : 141247808 : if (can_combine_use_p (use))
1091 : 68378287 : next_use[DF_REF_REGNO (use)] = insn;
1092 : : }
1093 : : }
1094 : :
1095 : 965627 : free (next_use);
1096 : 965627 : }
1097 : :
1098 : : /* Walk the LOG_LINKS of insn B to see if we find a reference to A. Return
1099 : : true if we found a LOG_LINK that proves that A feeds B. This only works
1100 : : if there are no instructions between A and B which could have a link
1101 : : depending on A, since in that case we would not record a link for B. */
1102 : :
1103 : : static bool
1104 : 11563758 : insn_a_feeds_b (rtx_insn *a, rtx_insn *b)
1105 : : {
1106 : 11563758 : struct insn_link *links;
1107 : 14607924 : FOR_EACH_LOG_LINK (links, b)
1108 : 12290404 : if (links->insn == a)
1109 : : return true;
1110 : : return false;
1111 : : }
1112 : :
1113 : : /* Main entry point for combiner. F is the first insn of the function.
1114 : : NREGS is the first unused pseudo-reg number.
1115 : :
1116 : : Return nonzero if the CFG was changed (e.g. if the combiner has
1117 : : turned an indirect jump instruction into a direct jump). */
1118 : : static bool
1119 : 1008570 : combine_instructions (rtx_insn *f, unsigned int nregs)
1120 : : {
1121 : 1008570 : rtx_insn *insn, *next;
1122 : 1008570 : struct insn_link *links, *nextlinks;
1123 : 1008570 : rtx_insn *first;
1124 : 1008570 : basic_block last_bb;
1125 : :
1126 : 1008570 : bool new_direct_jump_p = false;
1127 : :
1128 : 2976905 : for (first = f; first && !NONDEBUG_INSN_P (first); )
1129 : 1968335 : first = NEXT_INSN (first);
1130 : 1008570 : if (!first)
1131 : : return false;
1132 : :
1133 : 965627 : combine_attempts = 0;
1134 : 965627 : combine_merges = 0;
1135 : 965627 : combine_extras = 0;
1136 : 965627 : combine_successes = 0;
1137 : :
1138 : 965627 : rtl_hooks = combine_rtl_hooks;
1139 : :
1140 : 965627 : reg_stat.safe_grow_cleared (nregs, true);
1141 : :
1142 : 965627 : init_recog_no_volatile ();
1143 : :
1144 : : /* Allocate array for insn info. */
1145 : 965627 : max_uid_known = get_max_uid ();
1146 : 965627 : uid_log_links = XCNEWVEC (struct insn_link *, max_uid_known + 1);
1147 : 965627 : uid_insn_cost = XCNEWVEC (int, max_uid_known + 1);
1148 : 965627 : gcc_obstack_init (&insn_link_obstack);
1149 : :
1150 : 965627 : nonzero_bits_mode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, 0).require ();
1151 : :
1152 : : /* Don't use reg_stat[].nonzero_bits when computing it. This can cause
1153 : : problems when, for example, we have j <<= 1 in a loop. */
1154 : :
1155 : 965627 : nonzero_sign_valid = 0;
1156 : 965627 : label_tick = label_tick_ebb_start = 1;
1157 : :
1158 : : /* Scan all SETs and see if we can deduce anything about what
1159 : : bits are known to be zero for some registers and how many copies
1160 : : of the sign bit are known to exist for those registers.
1161 : :
1162 : : Also set any known values so that we can use it while searching
1163 : : for what bits are known to be set. */
1164 : :
1165 : 965627 : setup_incoming_promotions (first);
1166 : : /* Allow the entry block and the first block to fall into the same EBB.
1167 : : Conceptually the incoming promotions are assigned to the entry block. */
1168 : 965627 : last_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1169 : :
1170 : 965627 : create_log_links ();
1171 : 10685306 : FOR_EACH_BB_FN (this_basic_block, cfun)
1172 : : {
1173 : 9719679 : optimize_this_for_speed_p = optimize_bb_for_speed_p (this_basic_block);
1174 : 9719679 : last_call_luid = 0;
1175 : 9719679 : mem_last_set = -1;
1176 : :
1177 : 9719679 : label_tick++;
1178 : 9719679 : if (!single_pred_p (this_basic_block)
1179 : 9719679 : || single_pred (this_basic_block) != last_bb)
1180 : 4666240 : label_tick_ebb_start = label_tick;
1181 : 9719679 : last_bb = this_basic_block;
1182 : :
1183 : 125655487 : FOR_BB_INSNS (this_basic_block, insn)
1184 : 115935808 : if (INSN_P (insn) && BLOCK_FOR_INSN (insn))
1185 : : {
1186 : 100502120 : rtx links;
1187 : :
1188 : 100502120 : subst_low_luid = DF_INSN_LUID (insn);
1189 : 100502120 : subst_insn = insn;
1190 : :
1191 : 100502120 : note_stores (insn, set_nonzero_bits_and_sign_copies, insn);
1192 : 100502120 : record_dead_and_set_regs (insn);
1193 : :
1194 : 100502120 : if (AUTO_INC_DEC)
1195 : : for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
1196 : : if (REG_NOTE_KIND (links) == REG_INC)
1197 : : set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX,
1198 : : insn);
1199 : :
1200 : : /* Record the current insn_cost of this instruction. */
1201 : 100502120 : INSN_COST (insn) = insn_cost (insn, optimize_this_for_speed_p);
1202 : 100502120 : if (dump_file)
1203 : : {
1204 : 1715 : fprintf (dump_file, "insn_cost %d for ", INSN_COST (insn));
1205 : 1715 : dump_insn_slim (dump_file, insn);
1206 : : }
1207 : : }
1208 : : }
1209 : :
1210 : 965627 : nonzero_sign_valid = 1;
1211 : :
1212 : : /* Now scan all the insns in forward order. */
1213 : 965627 : label_tick = label_tick_ebb_start = 1;
1214 : 965627 : init_reg_last ();
1215 : 965627 : setup_incoming_promotions (first);
1216 : 965627 : last_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1217 : 965627 : int max_combine = param_max_combine_insns;
1218 : :
1219 : 10685306 : FOR_EACH_BB_FN (this_basic_block, cfun)
1220 : : {
1221 : 9719679 : rtx_insn *last_combined_insn = NULL;
1222 : :
1223 : : /* Ignore instruction combination in basic blocks that are going to
1224 : : be removed as unreachable anyway. See PR82386. */
1225 : 9719679 : if (EDGE_COUNT (this_basic_block->preds) == 0)
1226 : 1830 : continue;
1227 : :
1228 : 9717849 : optimize_this_for_speed_p = optimize_bb_for_speed_p (this_basic_block);
1229 : 9717849 : last_call_luid = 0;
1230 : 9717849 : mem_last_set = -1;
1231 : :
1232 : 9717849 : label_tick++;
1233 : 9717849 : if (!single_pred_p (this_basic_block)
1234 : 9717849 : || single_pred (this_basic_block) != last_bb)
1235 : 4666061 : label_tick_ebb_start = label_tick;
1236 : 9717849 : last_bb = this_basic_block;
1237 : :
1238 : 9717849 : rtl_profile_for_bb (this_basic_block);
1239 : 9717849 : for (insn = BB_HEAD (this_basic_block);
1240 : 129827795 : insn != NEXT_INSN (BB_END (this_basic_block));
1241 : 116398442 : insn = next ? next : NEXT_INSN (insn))
1242 : : {
1243 : 120109946 : next = 0;
1244 : 120109946 : if (!NONDEBUG_INSN_P (insn))
1245 : 57385651 : continue;
1246 : :
1247 : : while (last_combined_insn
1248 : 62726492 : && (!NONDEBUG_INSN_P (last_combined_insn)
1249 : 53182076 : || last_combined_insn->deleted ()))
1250 : 2197 : last_combined_insn = PREV_INSN (last_combined_insn);
1251 : 62724295 : if (last_combined_insn == NULL_RTX
1252 : 53181374 : || BLOCK_FOR_INSN (last_combined_insn) != this_basic_block
1253 : 115905458 : || DF_INSN_LUID (last_combined_insn) <= DF_INSN_LUID (insn))
1254 : : last_combined_insn = insn;
1255 : :
1256 : : /* See if we know about function return values before this
1257 : : insn based upon SUBREG flags. */
1258 : 62724295 : check_promoted_subreg (insn, PATTERN (insn));
1259 : :
1260 : : /* See if we can find hardregs and subreg of pseudos in
1261 : : narrower modes. This could help turning TRUNCATEs
1262 : : into SUBREGs. */
1263 : 62724295 : note_uses (&PATTERN (insn), record_truncated_values, NULL);
1264 : :
1265 : : /* Try this insn with each insn it links back to. */
1266 : :
1267 : 98296321 : FOR_EACH_LOG_LINK (links, insn)
1268 : 39165427 : if ((next = try_combine (insn, links->insn, NULL,
1269 : : NULL, &new_direct_jump_p,
1270 : : last_combined_insn)) != 0)
1271 : : {
1272 : 3593401 : statistics_counter_event (cfun, "two-insn combine", 1);
1273 : 3593401 : goto retry;
1274 : : }
1275 : :
1276 : : /* Try each sequence of three linked insns ending with this one. */
1277 : :
1278 : 59130894 : if (max_combine >= 3)
1279 : 94142350 : FOR_EACH_LOG_LINK (links, insn)
1280 : : {
1281 : 35190513 : rtx_insn *link = links->insn;
1282 : :
1283 : : /* If the linked insn has been replaced by a note, then there
1284 : : is no point in pursuing this chain any further. */
1285 : 35190513 : if (NOTE_P (link))
1286 : 227 : continue;
1287 : :
1288 : 52597413 : FOR_EACH_LOG_LINK (nextlinks, link)
1289 : 17479872 : if ((next = try_combine (insn, link, nextlinks->insn,
1290 : : NULL, &new_direct_jump_p,
1291 : : last_combined_insn)) != 0)
1292 : : {
1293 : 72745 : statistics_counter_event (cfun, "three-insn combine", 1);
1294 : 72745 : goto retry;
1295 : : }
1296 : : }
1297 : :
1298 : : /* Try combining an insn with two different insns whose results it
1299 : : uses. */
1300 : 58951837 : if (max_combine >= 3)
1301 : 94037636 : FOR_EACH_LOG_LINK (links, insn)
1302 : 47408733 : for (nextlinks = links->next; nextlinks;
1303 : 12309446 : nextlinks = nextlinks->next)
1304 : 12322934 : if ((next = try_combine (insn, links->insn,
1305 : : nextlinks->insn, NULL,
1306 : : &new_direct_jump_p,
1307 : : last_combined_insn)) != 0)
1308 : :
1309 : : {
1310 : 13488 : statistics_counter_event (cfun, "three-insn combine", 1);
1311 : 13488 : goto retry;
1312 : : }
1313 : :
1314 : : /* Try four-instruction combinations. */
1315 : 58938349 : if (max_combine >= 4)
1316 : 94014937 : FOR_EACH_LOG_LINK (links, insn)
1317 : : {
1318 : 35081787 : struct insn_link *next1;
1319 : 35081787 : rtx_insn *link = links->insn;
1320 : :
1321 : : /* If the linked insn has been replaced by a note, then there
1322 : : is no point in pursuing this chain any further. */
1323 : 35081787 : if (NOTE_P (link))
1324 : 227 : continue;
1325 : :
1326 : 52469063 : FOR_EACH_LOG_LINK (next1, link)
1327 : : {
1328 : 17388764 : rtx_insn *link1 = next1->insn;
1329 : 17388764 : if (NOTE_P (link1))
1330 : 72 : continue;
1331 : : /* I0 -> I1 -> I2 -> I3. */
1332 : 28595636 : FOR_EACH_LOG_LINK (nextlinks, link1)
1333 : 11208010 : if ((next = try_combine (insn, link, link1,
1334 : : nextlinks->insn,
1335 : : &new_direct_jump_p,
1336 : : last_combined_insn)) != 0)
1337 : : {
1338 : 1066 : statistics_counter_event (cfun, "four-insn combine", 1);
1339 : 1066 : goto retry;
1340 : : }
1341 : : /* I0, I1 -> I2, I2 -> I3. */
1342 : 21392445 : for (nextlinks = next1->next; nextlinks;
1343 : 4004819 : nextlinks = nextlinks->next)
1344 : 4005014 : if ((next = try_combine (insn, link, link1,
1345 : : nextlinks->insn,
1346 : : &new_direct_jump_p,
1347 : : last_combined_insn)) != 0)
1348 : : {
1349 : 195 : statistics_counter_event (cfun, "four-insn combine", 1);
1350 : 195 : goto retry;
1351 : : }
1352 : : }
1353 : :
1354 : 47385561 : for (next1 = links->next; next1; next1 = next1->next)
1355 : : {
1356 : 12309121 : rtx_insn *link1 = next1->insn;
1357 : 12309121 : if (NOTE_P (link1))
1358 : 8 : continue;
1359 : : /* I0 -> I2; I1, I2 -> I3. */
1360 : 15523381 : FOR_EACH_LOG_LINK (nextlinks, link)
1361 : 3217935 : if ((next = try_combine (insn, link, link1,
1362 : : nextlinks->insn,
1363 : : &new_direct_jump_p,
1364 : : last_combined_insn)) != 0)
1365 : : {
1366 : 3667 : statistics_counter_event (cfun, "four-insn combine", 1);
1367 : 3667 : goto retry;
1368 : : }
1369 : : /* I0 -> I1; I1, I2 -> I3. */
1370 : 15727089 : FOR_EACH_LOG_LINK (nextlinks, link1)
1371 : 3421835 : if ((next = try_combine (insn, link, link1,
1372 : : nextlinks->insn,
1373 : : &new_direct_jump_p,
1374 : : last_combined_insn)) != 0)
1375 : : {
1376 : 192 : statistics_counter_event (cfun, "four-insn combine", 1);
1377 : 192 : goto retry;
1378 : : }
1379 : : }
1380 : : }
1381 : :
1382 : : /* Try this insn with each REG_EQUAL note it links back to. */
1383 : 94151738 : FOR_EACH_LOG_LINK (links, insn)
1384 : : {
1385 : 35138947 : rtx set, note;
1386 : 35138947 : rtx_insn *temp = links->insn;
1387 : 35138947 : if ((set = single_set (temp)) != 0
1388 : 34760580 : && (note = find_reg_equal_equiv_note (temp)) != 0
1389 : 2460729 : && (note = XEXP (note, 0), GET_CODE (note)) != EXPR_LIST
1390 : 2460729 : && ! side_effects_p (SET_SRC (set))
1391 : : /* Avoid using a register that may already been marked
1392 : : dead by an earlier instruction. */
1393 : 2460729 : && ! unmentioned_reg_p (note, SET_SRC (set))
1394 : 36327058 : && (GET_MODE (note) == VOIDmode
1395 : 26009 : ? SCALAR_INT_MODE_P (GET_MODE (SET_DEST (set)))
1396 : 1162102 : : (GET_MODE (SET_DEST (set)) == GET_MODE (note)
1397 : 1162074 : && (GET_CODE (SET_DEST (set)) != ZERO_EXTRACT
1398 : 0 : || (GET_MODE (XEXP (SET_DEST (set), 0))
1399 : : == GET_MODE (note))))))
1400 : : {
1401 : : /* Temporarily replace the set's source with the
1402 : : contents of the REG_EQUAL note. The insn will
1403 : : be deleted or recognized by try_combine. */
1404 : 1188066 : rtx orig_src = SET_SRC (set);
1405 : 1188066 : rtx orig_dest = SET_DEST (set);
1406 : 1188066 : if (GET_CODE (SET_DEST (set)) == ZERO_EXTRACT)
1407 : 0 : SET_DEST (set) = XEXP (SET_DEST (set), 0);
1408 : 1188066 : SET_SRC (set) = note;
1409 : 1188066 : i2mod = temp;
1410 : 1188066 : i2mod_old_rhs = copy_rtx (orig_src);
1411 : 1188066 : i2mod_new_rhs = copy_rtx (note);
1412 : 1188066 : next = try_combine (insn, i2mod, NULL, NULL,
1413 : : &new_direct_jump_p,
1414 : : last_combined_insn);
1415 : 1188066 : i2mod = NULL;
1416 : 1188066 : if (next)
1417 : : {
1418 : 26750 : statistics_counter_event (cfun, "insn-with-note combine", 1);
1419 : 26750 : goto retry;
1420 : : }
1421 : 1161316 : INSN_CODE (temp) = -1;
1422 : 1161316 : SET_SRC (set) = orig_src;
1423 : 1161316 : SET_DEST (set) = orig_dest;
1424 : : }
1425 : : }
1426 : :
1427 : 59012791 : if (!NOTE_P (insn))
1428 : 59012791 : record_dead_and_set_regs (insn);
1429 : :
1430 : 0 : retry:
1431 : 120109946 : ;
1432 : : }
1433 : : }
1434 : :
1435 : 965627 : default_rtl_profile ();
1436 : 965627 : clear_bb_flags ();
1437 : :
1438 : 965627 : if (purge_all_dead_edges ())
1439 : 1229 : new_direct_jump_p = true;
1440 : 965627 : if (delete_noop_moves ())
1441 : 0 : new_direct_jump_p = true;
1442 : :
1443 : : /* Clean up. */
1444 : 965627 : obstack_free (&insn_link_obstack, NULL);
1445 : 965627 : free (uid_log_links);
1446 : 965627 : free (uid_insn_cost);
1447 : 965627 : reg_stat.release ();
1448 : :
1449 : 965627 : {
1450 : 965627 : struct undo *undo, *next;
1451 : 5453478 : for (undo = undobuf.frees; undo; undo = next)
1452 : : {
1453 : 4487851 : next = undo->next;
1454 : 4487851 : free (undo);
1455 : : }
1456 : 965627 : undobuf.frees = 0;
1457 : : }
1458 : :
1459 : 965627 : statistics_counter_event (cfun, "attempts", combine_attempts);
1460 : 965627 : statistics_counter_event (cfun, "merges", combine_merges);
1461 : 965627 : statistics_counter_event (cfun, "extras", combine_extras);
1462 : 965627 : statistics_counter_event (cfun, "successes", combine_successes);
1463 : :
1464 : 965627 : nonzero_sign_valid = 0;
1465 : 965627 : rtl_hooks = general_rtl_hooks;
1466 : :
1467 : : /* Make recognizer allow volatile MEMs again. */
1468 : 965627 : init_recog ();
1469 : :
1470 : 965627 : return new_direct_jump_p;
1471 : : }
1472 : :
1473 : : /* Wipe the last_xxx fields of reg_stat in preparation for another pass. */
1474 : :
1475 : : static void
1476 : 965627 : init_reg_last (void)
1477 : : {
1478 : 965627 : unsigned int i;
1479 : 965627 : reg_stat_type *p;
1480 : :
1481 : 136567868 : FOR_EACH_VEC_ELT (reg_stat, i, p)
1482 : 135602241 : memset (p, 0, offsetof (reg_stat_type, sign_bit_copies));
1483 : 965627 : }
1484 : :
1485 : : /* Set up any promoted values for incoming argument registers. */
1486 : :
1487 : : static void
1488 : 1931254 : setup_incoming_promotions (rtx_insn *first)
1489 : : {
1490 : 1931254 : tree arg;
1491 : 1931254 : bool strictly_local = false;
1492 : :
1493 : 5270116 : for (arg = DECL_ARGUMENTS (current_function_decl); arg;
1494 : 3338862 : arg = DECL_CHAIN (arg))
1495 : : {
1496 : 3338862 : rtx x, reg = DECL_INCOMING_RTL (arg);
1497 : 3338862 : int uns1, uns3;
1498 : 3338862 : machine_mode mode1, mode2, mode3, mode4;
1499 : :
1500 : : /* Only continue if the incoming argument is in a register. */
1501 : 3338862 : if (!REG_P (reg))
1502 : 3338784 : continue;
1503 : :
1504 : : /* Determine, if possible, whether all call sites of the current
1505 : : function lie within the current compilation unit. (This does
1506 : : take into account the exporting of a function via taking its
1507 : : address, and so forth.) */
1508 : 2617116 : strictly_local
1509 : 2617116 : = cgraph_node::local_info_node (current_function_decl)->local;
1510 : :
1511 : : /* The mode and signedness of the argument before any promotions happen
1512 : : (equal to the mode of the pseudo holding it at that stage). */
1513 : 2617116 : mode1 = TYPE_MODE (TREE_TYPE (arg));
1514 : 2617116 : uns1 = TYPE_UNSIGNED (TREE_TYPE (arg));
1515 : :
1516 : : /* The mode and signedness of the argument after any source language and
1517 : : TARGET_PROMOTE_PROTOTYPES-driven promotions. */
1518 : 2617116 : mode2 = TYPE_MODE (DECL_ARG_TYPE (arg));
1519 : 2617116 : uns3 = TYPE_UNSIGNED (DECL_ARG_TYPE (arg));
1520 : :
1521 : : /* The mode and signedness of the argument as it is actually passed,
1522 : : see assign_parm_setup_reg in function.cc. */
1523 : 2617116 : mode3 = promote_function_mode (TREE_TYPE (arg), mode1, &uns3,
1524 : 2617116 : TREE_TYPE (cfun->decl), 0);
1525 : :
1526 : : /* The mode of the register in which the argument is being passed. */
1527 : 2617116 : mode4 = GET_MODE (reg);
1528 : :
1529 : : /* Eliminate sign extensions in the callee when:
1530 : : (a) A mode promotion has occurred; */
1531 : 2617116 : if (mode1 == mode3)
1532 : 2617038 : continue;
1533 : : /* (b) The mode of the register is the same as the mode of
1534 : : the argument as it is passed; */
1535 : 78 : if (mode3 != mode4)
1536 : 0 : continue;
1537 : : /* (c) There's no language level extension; */
1538 : 78 : if (mode1 == mode2)
1539 : : ;
1540 : : /* (c.1) All callers are from the current compilation unit. If that's
1541 : : the case we don't have to rely on an ABI, we only have to know
1542 : : what we're generating right now, and we know that we will do the
1543 : : mode1 to mode2 promotion with the given sign. */
1544 : 0 : else if (!strictly_local)
1545 : 0 : continue;
1546 : : /* (c.2) The combination of the two promotions is useful. This is
1547 : : true when the signs match, or if the first promotion is unsigned.
1548 : : In the later case, (sign_extend (zero_extend x)) is the same as
1549 : : (zero_extend (zero_extend x)), so make sure to force UNS3 true. */
1550 : 0 : else if (uns1)
1551 : 0 : uns3 = true;
1552 : 0 : else if (uns3)
1553 : 0 : continue;
1554 : :
1555 : : /* Record that the value was promoted from mode1 to mode3,
1556 : : so that any sign extension at the head of the current
1557 : : function may be eliminated. */
1558 : 78 : x = gen_rtx_CLOBBER (mode1, const0_rtx);
1559 : 78 : x = gen_rtx_fmt_e ((uns3 ? ZERO_EXTEND : SIGN_EXTEND), mode3, x);
1560 : 78 : record_value_for_reg (reg, first, x);
1561 : : }
1562 : 1931254 : }
1563 : :
1564 : : /* If MODE has a precision lower than PREC and SRC is a non-negative constant
1565 : : that would appear negative in MODE, sign-extend SRC for use in nonzero_bits
1566 : : because some machines (maybe most) will actually do the sign-extension and
1567 : : this is the conservative approach.
1568 : :
1569 : : ??? For 2.5, try to tighten up the MD files in this regard instead of this
1570 : : kludge. */
1571 : :
1572 : : static rtx
1573 : 0 : sign_extend_short_imm (rtx src, machine_mode mode, unsigned int prec)
1574 : : {
1575 : 0 : scalar_int_mode int_mode;
1576 : 0 : if (CONST_INT_P (src)
1577 : 0 : && is_a <scalar_int_mode> (mode, &int_mode)
1578 : 0 : && GET_MODE_PRECISION (int_mode) < prec
1579 : 0 : && INTVAL (src) > 0
1580 : 0 : && val_signbit_known_set_p (int_mode, INTVAL (src)))
1581 : 0 : src = GEN_INT (INTVAL (src) | ~GET_MODE_MASK (int_mode));
1582 : :
1583 : 0 : return src;
1584 : : }
1585 : :
1586 : : /* Update RSP for pseudo-register X from INSN's REG_EQUAL note (if one exists)
1587 : : and SET. */
1588 : :
1589 : : static void
1590 : 22316088 : update_rsp_from_reg_equal (reg_stat_type *rsp, rtx_insn *insn, const_rtx set,
1591 : : rtx x)
1592 : : {
1593 : 22316088 : rtx reg_equal_note = insn ? find_reg_equal_equiv_note (insn) : NULL_RTX;
1594 : 22316088 : unsigned HOST_WIDE_INT bits = 0;
1595 : 22316088 : rtx reg_equal = NULL, src = SET_SRC (set);
1596 : 22316088 : unsigned int num = 0;
1597 : :
1598 : 22316088 : if (reg_equal_note)
1599 : 938231 : reg_equal = XEXP (reg_equal_note, 0);
1600 : :
1601 : 22316088 : if (SHORT_IMMEDIATES_SIGN_EXTEND)
1602 : : {
1603 : : src = sign_extend_short_imm (src, GET_MODE (x), BITS_PER_WORD);
1604 : : if (reg_equal)
1605 : : reg_equal = sign_extend_short_imm (reg_equal, GET_MODE (x), BITS_PER_WORD);
1606 : : }
1607 : :
1608 : : /* Don't call nonzero_bits if it cannot change anything. */
1609 : 22316088 : if (rsp->nonzero_bits != HOST_WIDE_INT_M1U)
1610 : : {
1611 : 19340788 : machine_mode mode = GET_MODE (x);
1612 : 19340788 : if (GET_MODE_CLASS (mode) == MODE_INT
1613 : 19340788 : && HWI_COMPUTABLE_MODE_P (mode))
1614 : 19340656 : mode = nonzero_bits_mode;
1615 : 19340788 : bits = nonzero_bits (src, mode);
1616 : 19340788 : if (reg_equal && bits)
1617 : 891872 : bits &= nonzero_bits (reg_equal, mode);
1618 : 19340788 : rsp->nonzero_bits |= bits;
1619 : : }
1620 : :
1621 : : /* Don't call num_sign_bit_copies if it cannot change anything. */
1622 : 22316088 : if (rsp->sign_bit_copies != 1)
1623 : : {
1624 : 19204262 : num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
1625 : 19204262 : if (reg_equal && maybe_ne (num, GET_MODE_PRECISION (GET_MODE (x))))
1626 : : {
1627 : 888770 : unsigned int numeq = num_sign_bit_copies (reg_equal, GET_MODE (x));
1628 : 888770 : if (num == 0 || numeq > num)
1629 : 19204262 : num = numeq;
1630 : : }
1631 : 19204262 : if (rsp->sign_bit_copies == 0 || num < rsp->sign_bit_copies)
1632 : 18527336 : rsp->sign_bit_copies = num;
1633 : : }
1634 : 22316088 : }
1635 : :
1636 : : /* Called via note_stores. If X is a pseudo that is narrower than
1637 : : HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
1638 : :
1639 : : If we are setting only a portion of X and we can't figure out what
1640 : : portion, assume all bits will be used since we don't know what will
1641 : : be happening.
1642 : :
1643 : : Similarly, set how many bits of X are known to be copies of the sign bit
1644 : : at all locations in the function. This is the smallest number implied
1645 : : by any set of X. */
1646 : :
1647 : : static void
1648 : 68646990 : set_nonzero_bits_and_sign_copies (rtx x, const_rtx set, void *data)
1649 : : {
1650 : 68646990 : rtx_insn *insn = (rtx_insn *) data;
1651 : 68646990 : scalar_int_mode mode;
1652 : :
1653 : 68646990 : if (REG_P (x)
1654 : 55641044 : && REGNO (x) >= FIRST_PSEUDO_REGISTER
1655 : : /* If this register is undefined at the start of the file, we can't
1656 : : say what its contents were. */
1657 : 55445436 : && ! REGNO_REG_SET_P
1658 : : (DF_LR_IN (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb), REGNO (x))
1659 : 27603147 : && is_a <scalar_int_mode> (GET_MODE (x), &mode)
1660 : 91888153 : && HWI_COMPUTABLE_MODE_P (mode))
1661 : : {
1662 : 22523628 : reg_stat_type *rsp = ®_stat[REGNO (x)];
1663 : :
1664 : 22523628 : if (set == 0 || GET_CODE (set) == CLOBBER)
1665 : : {
1666 : 21654 : rsp->nonzero_bits = GET_MODE_MASK (mode);
1667 : 21654 : rsp->sign_bit_copies = 1;
1668 : 21654 : return;
1669 : : }
1670 : :
1671 : : /* If this register is being initialized using itself, and the
1672 : : register is uninitialized in this basic block, and there are
1673 : : no LOG_LINKS which set the register, then part of the
1674 : : register is uninitialized. In that case we can't assume
1675 : : anything about the number of nonzero bits.
1676 : :
1677 : : ??? We could do better if we checked this in
1678 : : reg_{nonzero_bits,num_sign_bit_copies}_for_combine. Then we
1679 : : could avoid making assumptions about the insn which initially
1680 : : sets the register, while still using the information in other
1681 : : insns. We would have to be careful to check every insn
1682 : : involved in the combination. */
1683 : :
1684 : 22501974 : if (insn
1685 : 21211680 : && reg_referenced_p (x, PATTERN (insn))
1686 : 24981086 : && !REGNO_REG_SET_P (DF_LR_IN (BLOCK_FOR_INSN (insn)),
1687 : : REGNO (x)))
1688 : : {
1689 : 282027 : struct insn_link *link;
1690 : :
1691 : 441608 : FOR_EACH_LOG_LINK (link, insn)
1692 : 351616 : if (dead_or_set_p (link->insn, x))
1693 : : break;
1694 : 282027 : if (!link)
1695 : : {
1696 : 89992 : rsp->nonzero_bits = GET_MODE_MASK (mode);
1697 : 89992 : rsp->sign_bit_copies = 1;
1698 : 89992 : return;
1699 : : }
1700 : : }
1701 : :
1702 : : /* If this is a complex assignment, see if we can convert it into a
1703 : : simple assignment. */
1704 : 22411982 : set = expand_field_assignment (set);
1705 : :
1706 : : /* If this is a simple assignment, or we have a paradoxical SUBREG,
1707 : : set what we know about X. */
1708 : :
1709 : 22411982 : if (SET_DEST (set) == x
1710 : 22411982 : || (paradoxical_subreg_p (SET_DEST (set))
1711 : 3210 : && SUBREG_REG (SET_DEST (set)) == x))
1712 : 22316088 : update_rsp_from_reg_equal (rsp, insn, set, x);
1713 : : else
1714 : : {
1715 : 95894 : rsp->nonzero_bits = GET_MODE_MASK (mode);
1716 : 95894 : rsp->sign_bit_copies = 1;
1717 : : }
1718 : : }
1719 : : }
1720 : :
1721 : : /* See if INSN can be combined into I3. PRED, PRED2, SUCC and SUCC2 are
1722 : : optionally insns that were previously combined into I3 or that will be
1723 : : combined into the merger of INSN and I3. The order is PRED, PRED2,
1724 : : INSN, SUCC, SUCC2, I3.
1725 : :
1726 : : Return false if the combination is not allowed for any reason.
1727 : :
1728 : : If the combination is allowed, *PDEST will be set to the single
1729 : : destination of INSN and *PSRC to the single source, and this function
1730 : : will return true. */
1731 : :
1732 : : static bool
1733 : 57059594 : can_combine_p (rtx_insn *insn, rtx_insn *i3, rtx_insn *pred ATTRIBUTE_UNUSED,
1734 : : rtx_insn *pred2 ATTRIBUTE_UNUSED, rtx_insn *succ, rtx_insn *succ2,
1735 : : rtx *pdest, rtx *psrc)
1736 : : {
1737 : 57059594 : int i;
1738 : 57059594 : const_rtx set = 0;
1739 : 57059594 : rtx src, dest;
1740 : 57059594 : rtx_insn *p;
1741 : 57059594 : rtx link;
1742 : 57059594 : bool all_adjacent = true;
1743 : 57059594 : bool (*is_volatile_p) (const_rtx);
1744 : :
1745 : 57059594 : if (succ)
1746 : : {
1747 : 12825854 : if (succ2)
1748 : : {
1749 : 1620587 : if (next_active_insn (succ2) != i3)
1750 : 169667 : all_adjacent = false;
1751 : 1620587 : if (next_active_insn (succ) != succ2)
1752 : 1835204 : all_adjacent = false;
1753 : : }
1754 : 11205267 : else if (next_active_insn (succ) != i3)
1755 : 1835204 : all_adjacent = false;
1756 : 12825854 : if (next_active_insn (insn) != succ)
1757 : 16333648 : all_adjacent = false;
1758 : : }
1759 : 44233740 : else if (next_active_insn (insn) != i3)
1760 : 16333648 : all_adjacent = false;
1761 : :
1762 : : /* Can combine only if previous insn is a SET of a REG or a SUBREG,
1763 : : or a PARALLEL consisting of such a SET and CLOBBERs.
1764 : :
1765 : : If INSN has CLOBBER parallel parts, ignore them for our processing.
1766 : : By definition, these happen during the execution of the insn. When it
1767 : : is merged with another insn, all bets are off. If they are, in fact,
1768 : : needed and aren't also supplied in I3, they may be added by
1769 : : recog_for_combine. Otherwise, it won't match.
1770 : :
1771 : : We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
1772 : : note.
1773 : :
1774 : : Get the source and destination of INSN. If more than one, can't
1775 : : combine. */
1776 : :
1777 : 57059594 : if (GET_CODE (PATTERN (insn)) == SET)
1778 : : set = PATTERN (insn);
1779 : 15127103 : else if (GET_CODE (PATTERN (insn)) == PARALLEL
1780 : 15127103 : && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1781 : : {
1782 : 45343000 : for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1783 : : {
1784 : 30687050 : rtx elt = XVECEXP (PATTERN (insn), 0, i);
1785 : :
1786 : 30687050 : switch (GET_CODE (elt))
1787 : : {
1788 : : /* This is important to combine floating point insns
1789 : : for the SH4 port. */
1790 : 107926 : case USE:
1791 : : /* Combining an isolated USE doesn't make sense.
1792 : : We depend here on combinable_i3pat to reject them. */
1793 : : /* The code below this loop only verifies that the inputs of
1794 : : the SET in INSN do not change. We call reg_set_between_p
1795 : : to verify that the REG in the USE does not change between
1796 : : I3 and INSN.
1797 : : If the USE in INSN was for a pseudo register, the matching
1798 : : insn pattern will likely match any register; combining this
1799 : : with any other USE would only be safe if we knew that the
1800 : : used registers have identical values, or if there was
1801 : : something to tell them apart, e.g. different modes. For
1802 : : now, we forgo such complicated tests and simply disallow
1803 : : combining of USES of pseudo registers with any other USE. */
1804 : 107926 : if (REG_P (XEXP (elt, 0))
1805 : 107926 : && GET_CODE (PATTERN (i3)) == PARALLEL)
1806 : : {
1807 : 224 : rtx i3pat = PATTERN (i3);
1808 : 224 : int i = XVECLEN (i3pat, 0) - 1;
1809 : 224 : unsigned int regno = REGNO (XEXP (elt, 0));
1810 : :
1811 : 458 : do
1812 : : {
1813 : 458 : rtx i3elt = XVECEXP (i3pat, 0, i);
1814 : :
1815 : 458 : if (GET_CODE (i3elt) == USE
1816 : 208 : && REG_P (XEXP (i3elt, 0))
1817 : 692 : && (REGNO (XEXP (i3elt, 0)) == regno
1818 : 182 : ? reg_set_between_p (XEXP (elt, 0),
1819 : 26 : PREV_INSN (insn), i3)
1820 : : : regno >= FIRST_PSEUDO_REGISTER))
1821 : 182 : return false;
1822 : : }
1823 : 276 : while (--i >= 0);
1824 : : }
1825 : : break;
1826 : :
1827 : : /* We can ignore CLOBBERs. */
1828 : : case CLOBBER:
1829 : : break;
1830 : :
1831 : 15690386 : case SET:
1832 : : /* Ignore SETs whose result isn't used but not those that
1833 : : have side-effects. */
1834 : 15690386 : if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
1835 : 191857 : && insn_nothrow_p (insn)
1836 : 15868541 : && !side_effects_p (elt))
1837 : : break;
1838 : :
1839 : : /* If we have already found a SET, this is a second one and
1840 : : so we cannot combine with this insn. */
1841 : 15589406 : if (set)
1842 : : return false;
1843 : :
1844 : : set = elt;
1845 : : break;
1846 : :
1847 : : default:
1848 : : /* Anything else means we can't combine. */
1849 : : return false;
1850 : : }
1851 : : }
1852 : :
1853 : 14655950 : if (set == 0
1854 : : /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
1855 : : so don't do anything with it. */
1856 : 14655950 : || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
1857 : : return false;
1858 : : }
1859 : : else
1860 : : return false;
1861 : :
1862 : : if (set == 0)
1863 : : return false;
1864 : :
1865 : : /* The simplification in expand_field_assignment may call back to
1866 : : get_last_value, so set safe guard here. */
1867 : 56570176 : subst_low_luid = DF_INSN_LUID (insn);
1868 : :
1869 : 56570176 : set = expand_field_assignment (set);
1870 : 56570176 : src = SET_SRC (set), dest = SET_DEST (set);
1871 : :
1872 : : /* Do not eliminate user-specified register if it is in an
1873 : : asm input because we may break the register asm usage defined
1874 : : in GCC manual if allow to do so.
1875 : : Be aware that this may cover more cases than we expect but this
1876 : : should be harmless. */
1877 : 56037174 : if (REG_P (dest) && REG_USERVAR_P (dest) && HARD_REGISTER_P (dest)
1878 : 56570178 : && extract_asm_operands (PATTERN (i3)))
1879 : : return false;
1880 : :
1881 : : /* Don't eliminate a store in the stack pointer. */
1882 : 56570176 : if (dest == stack_pointer_rtx
1883 : : /* Don't combine with an insn that sets a register to itself if it has
1884 : : a REG_EQUAL note. This may be part of a LIBCALL sequence. */
1885 : 54656091 : || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
1886 : : /* Can't merge an ASM_OPERANDS. */
1887 : 54656091 : || GET_CODE (src) == ASM_OPERANDS
1888 : : /* Can't merge a function call. */
1889 : 54652809 : || GET_CODE (src) == CALL
1890 : : /* Don't eliminate a function call argument. */
1891 : 54652809 : || (CALL_P (i3)
1892 : 8737487 : && (find_reg_fusage (i3, USE, dest)
1893 : 186502 : || (REG_P (dest)
1894 : 186502 : && REGNO (dest) < FIRST_PSEUDO_REGISTER
1895 : 162 : && global_regs[REGNO (dest)])))
1896 : : /* Don't substitute into an incremented register. */
1897 : : || FIND_REG_INC_NOTE (i3, dest)
1898 : : || (succ && FIND_REG_INC_NOTE (succ, dest))
1899 : 54652809 : || (succ2 && FIND_REG_INC_NOTE (succ2, dest))
1900 : : /* Don't substitute into a non-local goto, this confuses CFG. */
1901 : 46101822 : || (JUMP_P (i3) && find_reg_note (i3, REG_NON_LOCAL_GOTO, NULL_RTX))
1902 : : /* Make sure that DEST is not used after INSN but before SUCC, or
1903 : : after SUCC and before SUCC2, or after SUCC2 but before I3. */
1904 : 46101101 : || (!all_adjacent
1905 : 11475012 : && ((succ2
1906 : 737038 : && (reg_used_between_p (dest, succ2, i3)
1907 : 717999 : || reg_used_between_p (dest, succ, succ2)))
1908 : 11437203 : || (!succ2 && succ && reg_used_between_p (dest, succ, i3))
1909 : 11205724 : || (!succ2 && !succ && reg_used_between_p (dest, insn, i3))
1910 : 11205724 : || (succ
1911 : : /* SUCC and SUCC2 can be split halves from a PARALLEL; in
1912 : : that case SUCC is not in the insn stream, so use SUCC2
1913 : : instead for this test. */
1914 : 9019410 : && reg_used_between_p (dest, insn,
1915 : : succ2
1916 : 699229 : && INSN_UID (succ) == INSN_UID (succ2)
1917 : : ? succ2 : succ))))
1918 : : /* Make sure that the value that is to be substituted for the register
1919 : : does not use any registers whose values alter in between. However,
1920 : : If the insns are adjacent, a use can't cross a set even though we
1921 : : think it might (this can happen for a sequence of insns each setting
1922 : : the same destination; last_set of that register might point to
1923 : : a NOTE). If INSN has a REG_EQUIV note, the register is always
1924 : : equivalent to the memory so the substitution is valid even if there
1925 : : are intervening stores. Also, don't move a volatile asm or
1926 : : UNSPEC_VOLATILE across any other insns. */
1927 : : || (! all_adjacent
1928 : 11205724 : && (((!MEM_P (src)
1929 : 3103089 : || ! find_reg_note (insn, REG_EQUIV, src))
1930 : 11091851 : && modified_between_p (src, insn, i3))
1931 : 10308844 : || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
1932 : 10308844 : || GET_CODE (src) == UNSPEC_VOLATILE))
1933 : : /* Don't combine across a CALL_INSN, because that would possibly
1934 : : change whether the life span of some REGs crosses calls or not,
1935 : : and it is a pain to update that information.
1936 : : Exception: if source is a constant, moving it later can't hurt.
1937 : : Accept that as a special case. */
1938 : 101494511 : || (DF_INSN_LUID (insn) < last_call_luid && ! CONSTANT_P (src)))
1939 : 11962421 : return false;
1940 : :
1941 : : /* DEST must be a REG. */
1942 : 44607755 : if (REG_P (dest))
1943 : : {
1944 : : /* If register alignment is being enforced for multi-word items in all
1945 : : cases except for parameters, it is possible to have a register copy
1946 : : insn referencing a hard register that is not allowed to contain the
1947 : : mode being copied and which would not be valid as an operand of most
1948 : : insns. Eliminate this problem by not combining with such an insn.
1949 : :
1950 : : Also, on some machines we don't want to extend the life of a hard
1951 : : register. */
1952 : :
1953 : 44080020 : if (REG_P (src)
1954 : 44080020 : && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1955 : 28783 : && !targetm.hard_regno_mode_ok (REGNO (dest), GET_MODE (dest)))
1956 : : /* Don't extend the life of a hard register unless it is
1957 : : user variable (if we have few registers) or it can't
1958 : : fit into the desired register (meaning something special
1959 : : is going on).
1960 : : Also avoid substituting a return register into I3, because
1961 : : reload can't handle a conflict with constraints of other
1962 : : inputs. */
1963 : 2508250 : || (REGNO (src) < FIRST_PSEUDO_REGISTER
1964 : 36666 : && !targetm.hard_regno_mode_ok (REGNO (src),
1965 : 36666 : GET_MODE (src)))))
1966 : 0 : return false;
1967 : : }
1968 : : else
1969 : : return false;
1970 : :
1971 : :
1972 : 44080020 : if (GET_CODE (PATTERN (i3)) == PARALLEL)
1973 : 34607555 : for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1974 : 23453308 : if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER)
1975 : : {
1976 : 10756665 : rtx reg = XEXP (XVECEXP (PATTERN (i3), 0, i), 0);
1977 : :
1978 : : /* If the clobber represents an earlyclobber operand, we must not
1979 : : substitute an expression containing the clobbered register.
1980 : : As we do not analyze the constraint strings here, we have to
1981 : : make the conservative assumption. However, if the register is
1982 : : a fixed hard reg, the clobber cannot represent any operand;
1983 : : we leave it up to the machine description to either accept or
1984 : : reject use-and-clobber patterns. */
1985 : 10756665 : if (!REG_P (reg)
1986 : 10387382 : || REGNO (reg) >= FIRST_PSEUDO_REGISTER
1987 : 21096737 : || !fixed_regs[REGNO (reg)])
1988 : 454981 : if (reg_overlap_mentioned_p (reg, src))
1989 : : return false;
1990 : : }
1991 : :
1992 : : /* If INSN contains anything volatile, or is an `asm' (whether volatile
1993 : : or not), reject, unless nothing volatile comes between it and I3 */
1994 : :
1995 : 44079373 : if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
1996 : : {
1997 : : /* Make sure neither succ nor succ2 contains a volatile reference. */
1998 : 683054 : if (succ2 != 0 && volatile_refs_p (PATTERN (succ2)))
1999 : : return false;
2000 : 683050 : if (succ != 0 && volatile_refs_p (PATTERN (succ)))
2001 : : return false;
2002 : : /* We'll check insns between INSN and I3 below. */
2003 : : }
2004 : :
2005 : : /* If INSN is an asm, and DEST is a hard register, reject, since it has
2006 : : to be an explicit register variable, and was chosen for a reason. */
2007 : :
2008 : 44032031 : if (GET_CODE (src) == ASM_OPERANDS
2009 : 44032031 : && REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER)
2010 : : return false;
2011 : :
2012 : : /* If INSN contains volatile references (specifically volatile MEMs),
2013 : : we cannot combine across any other volatile references.
2014 : : Even if INSN doesn't contain volatile references, any intervening
2015 : : volatile insn might affect machine state. */
2016 : :
2017 : 44032031 : is_volatile_p = volatile_refs_p (PATTERN (insn))
2018 : 44032031 : ? volatile_refs_p
2019 : : : volatile_insn_p;
2020 : :
2021 : 196580752 : for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
2022 : 108730745 : if (INSN_P (p) && p != succ && p != succ2 && is_volatile_p (PATTERN (p)))
2023 : : return false;
2024 : :
2025 : : /* If INSN contains an autoincrement or autodecrement, make sure that
2026 : : register is not used between there and I3, and not already used in
2027 : : I3 either. Neither must it be used in PRED or SUCC, if they exist.
2028 : : Also insist that I3 not be a jump if using LRA; if it were one
2029 : : and the incremented register were spilled, we would lose.
2030 : : Reload handles this correctly. */
2031 : :
2032 : 43817976 : if (AUTO_INC_DEC)
2033 : : for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2034 : : if (REG_NOTE_KIND (link) == REG_INC
2035 : : && ((JUMP_P (i3) && targetm.lra_p ())
2036 : : || reg_used_between_p (XEXP (link, 0), insn, i3)
2037 : : || (pred != NULL_RTX
2038 : : && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (pred)))
2039 : : || (pred2 != NULL_RTX
2040 : : && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (pred2)))
2041 : : || (succ != NULL_RTX
2042 : : && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (succ)))
2043 : : || (succ2 != NULL_RTX
2044 : : && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (succ2)))
2045 : : || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
2046 : : return false;
2047 : :
2048 : : /* If we get here, we have passed all the tests and the combination is
2049 : : to be allowed. */
2050 : :
2051 : 43817976 : *pdest = dest;
2052 : 43817976 : *psrc = src;
2053 : :
2054 : 43817976 : return true;
2055 : : }
2056 : :
2057 : : /* LOC is the location within I3 that contains its pattern or the component
2058 : : of a PARALLEL of the pattern. We validate that it is valid for combining.
2059 : :
2060 : : One problem is if I3 modifies its output, as opposed to replacing it
2061 : : entirely, we can't allow the output to contain I2DEST, I1DEST or I0DEST as
2062 : : doing so would produce an insn that is not equivalent to the original insns.
2063 : :
2064 : : Consider:
2065 : :
2066 : : (set (reg:DI 101) (reg:DI 100))
2067 : : (set (subreg:SI (reg:DI 101) 0) <foo>)
2068 : :
2069 : : This is NOT equivalent to:
2070 : :
2071 : : (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
2072 : : (set (reg:DI 101) (reg:DI 100))])
2073 : :
2074 : : Not only does this modify 100 (in which case it might still be valid
2075 : : if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
2076 : :
2077 : : We can also run into a problem if I2 sets a register that I1
2078 : : uses and I1 gets directly substituted into I3 (not via I2). In that
2079 : : case, we would be getting the wrong value of I2DEST into I3, so we
2080 : : must reject the combination. This case occurs when I2 and I1 both
2081 : : feed into I3, rather than when I1 feeds into I2, which feeds into I3.
2082 : : If I1_NOT_IN_SRC is nonzero, it means that finding I1 in the source
2083 : : of a SET must prevent combination from occurring. The same situation
2084 : : can occur for I0, in which case I0_NOT_IN_SRC is set.
2085 : :
2086 : : Before doing the above check, we first try to expand a field assignment
2087 : : into a set of logical operations.
2088 : :
2089 : : If PI3_DEST_KILLED is nonzero, it is a pointer to a location in which
2090 : : we place a register that is both set and used within I3. If more than one
2091 : : such register is detected, we fail.
2092 : :
2093 : : Return true if the combination is valid, false otherwise. */
2094 : :
2095 : : static bool
2096 : 63927167 : combinable_i3pat (rtx_insn *i3, rtx *loc, rtx i2dest, rtx i1dest, rtx i0dest,
2097 : : bool i1_not_in_src, bool i0_not_in_src, rtx *pi3dest_killed)
2098 : : {
2099 : 63927167 : rtx x = *loc;
2100 : :
2101 : 63927167 : if (GET_CODE (x) == SET)
2102 : : {
2103 : 42662380 : rtx set = x ;
2104 : 42662380 : rtx dest = SET_DEST (set);
2105 : 42662380 : rtx src = SET_SRC (set);
2106 : 42662380 : rtx inner_dest = dest;
2107 : 42662380 : rtx subdest;
2108 : :
2109 : 42662380 : while (GET_CODE (inner_dest) == STRICT_LOW_PART
2110 : 43129342 : || GET_CODE (inner_dest) == SUBREG
2111 : 43129342 : || GET_CODE (inner_dest) == ZERO_EXTRACT)
2112 : 466962 : inner_dest = XEXP (inner_dest, 0);
2113 : :
2114 : : /* Check for the case where I3 modifies its output, as discussed
2115 : : above. We don't want to prevent pseudos from being combined
2116 : : into the address of a MEM, so only prevent the combination if
2117 : : i1 or i2 set the same MEM. */
2118 : 445470 : if ((inner_dest != dest &&
2119 : : (!MEM_P (inner_dest)
2120 : 632 : || rtx_equal_p (i2dest, inner_dest)
2121 : 632 : || (i1dest && rtx_equal_p (i1dest, inner_dest))
2122 : 632 : || (i0dest && rtx_equal_p (i0dest, inner_dest)))
2123 : 444838 : && (reg_overlap_mentioned_p (i2dest, inner_dest)
2124 : 321261 : || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))
2125 : 319882 : || (i0dest && reg_overlap_mentioned_p (i0dest, inner_dest))))
2126 : :
2127 : : /* This is the same test done in can_combine_p except we can't test
2128 : : all_adjacent; we don't have to, since this instruction will stay
2129 : : in place, thus we are not considering increasing the lifetime of
2130 : : INNER_DEST.
2131 : :
2132 : : Also, if this insn sets a function argument, combining it with
2133 : : something that might need a spill could clobber a previous
2134 : : function argument; the all_adjacent test in can_combine_p also
2135 : : checks this; here, we do a more specific test for this case. */
2136 : :
2137 : 42537313 : || (REG_P (inner_dest)
2138 : 27787763 : && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
2139 : 6708825 : && !targetm.hard_regno_mode_ok (REGNO (inner_dest),
2140 : 6708825 : GET_MODE (inner_dest)))
2141 : 42537313 : || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src))
2142 : 85194503 : || (i0_not_in_src && reg_overlap_mentioned_p (i0dest, src)))
2143 : 153357 : return false;
2144 : :
2145 : : /* If DEST is used in I3, it is being killed in this insn, so
2146 : : record that for later. We have to consider paradoxical
2147 : : subregs here, since they kill the whole register, but we
2148 : : ignore partial subregs, STRICT_LOW_PART, etc.
2149 : : Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
2150 : : STACK_POINTER_REGNUM, since these are always considered to be
2151 : : live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
2152 : 42509023 : subdest = dest;
2153 : 42509023 : if (GET_CODE (subdest) == SUBREG && !partial_subreg_p (subdest))
2154 : 227877 : subdest = SUBREG_REG (subdest);
2155 : 42509023 : if (pi3dest_killed
2156 : 31291962 : && REG_P (subdest)
2157 : 20022629 : && reg_referenced_p (subdest, PATTERN (i3))
2158 : 1258641 : && REGNO (subdest) != FRAME_POINTER_REGNUM
2159 : 1258641 : && (HARD_FRAME_POINTER_IS_FRAME_POINTER
2160 : 1258641 : || REGNO (subdest) != HARD_FRAME_POINTER_REGNUM)
2161 : 1258641 : && (FRAME_POINTER_REGNUM == ARG_POINTER_REGNUM
2162 : 1258641 : || (REGNO (subdest) != ARG_POINTER_REGNUM
2163 : 0 : || ! fixed_regs [REGNO (subdest)]))
2164 : 43767664 : && REGNO (subdest) != STACK_POINTER_REGNUM)
2165 : : {
2166 : 1222747 : if (*pi3dest_killed)
2167 : : return false;
2168 : :
2169 : 1158658 : *pi3dest_killed = subdest;
2170 : : }
2171 : : }
2172 : :
2173 : 21264787 : else if (GET_CODE (x) == PARALLEL)
2174 : : {
2175 : : int i;
2176 : :
2177 : 32428993 : for (i = 0; i < XVECLEN (x, 0); i++)
2178 : 21944952 : if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest, i0dest,
2179 : : i1_not_in_src, i0_not_in_src, pi3dest_killed))
2180 : : return false;
2181 : : }
2182 : :
2183 : : return true;
2184 : : }
2185 : :
2186 : : /* Return true if X is an arithmetic expression that contains a multiplication
2187 : : and division. We don't count multiplications by powers of two here. */
2188 : :
2189 : : static bool
2190 : 16009457 : contains_muldiv (rtx x)
2191 : : {
2192 : 16646070 : switch (GET_CODE (x))
2193 : : {
2194 : : case MOD: case DIV: case UMOD: case UDIV:
2195 : : return true;
2196 : :
2197 : 473564 : case MULT:
2198 : 473564 : return ! (CONST_INT_P (XEXP (x, 1))
2199 : 151295 : && pow2p_hwi (UINTVAL (XEXP (x, 1))));
2200 : 16023256 : default:
2201 : 16023256 : if (BINARY_P (x))
2202 : 5571960 : return contains_muldiv (XEXP (x, 0))
2203 : 5571960 : || contains_muldiv (XEXP (x, 1));
2204 : :
2205 : 10451296 : if (UNARY_P (x))
2206 : 636613 : return contains_muldiv (XEXP (x, 0));
2207 : :
2208 : : return false;
2209 : : }
2210 : : }
2211 : :
2212 : : /* Determine whether INSN can be used in a combination. Return true if
2213 : : not. This is used in try_combine to detect early some cases where we
2214 : : can't perform combinations. */
2215 : :
2216 : : static bool
2217 : 155939087 : cant_combine_insn_p (rtx_insn *insn)
2218 : : {
2219 : 155939087 : rtx set;
2220 : 155939087 : rtx src, dest;
2221 : :
2222 : : /* If this isn't really an insn, we can't do anything.
2223 : : This can occur when flow deletes an insn that it has merged into an
2224 : : auto-increment address. */
2225 : 155939087 : if (!NONDEBUG_INSN_P (insn))
2226 : : return true;
2227 : :
2228 : : /* Never combine loads and stores involving hard regs that are likely
2229 : : to be spilled. The register allocator can usually handle such
2230 : : reg-reg moves by tying. If we allow the combiner to make
2231 : : substitutions of likely-spilled regs, reload might die.
2232 : : As an exception, we allow combinations involving fixed regs; these are
2233 : : not available to the register allocator so there's no risk involved. */
2234 : :
2235 : 155938693 : set = single_set (insn);
2236 : 155938693 : if (! set)
2237 : : return false;
2238 : 143348631 : src = SET_SRC (set);
2239 : 143348631 : dest = SET_DEST (set);
2240 : 143348631 : if (GET_CODE (src) == SUBREG)
2241 : 987444 : src = SUBREG_REG (src);
2242 : 143348631 : if (GET_CODE (dest) == SUBREG)
2243 : 1602636 : dest = SUBREG_REG (dest);
2244 : 39338919 : if (REG_P (src) && REG_P (dest)
2245 : 176249350 : && ((HARD_REGISTER_P (src)
2246 : 6494458 : && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (src))
2247 : : #ifdef LEAF_REGISTERS
2248 : : && ! LEAF_REGISTERS [REGNO (src)])
2249 : : #else
2250 : : )
2251 : : #endif
2252 : 26693589 : || (HARD_REGISTER_P (dest)
2253 : 18784244 : && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (dest))
2254 : 18531898 : && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (dest))))))
2255 : 23108431 : return true;
2256 : :
2257 : : return false;
2258 : : }
2259 : :
2260 : : struct likely_spilled_retval_info
2261 : : {
2262 : : unsigned regno, nregs;
2263 : : unsigned mask;
2264 : : };
2265 : :
2266 : : /* Called via note_stores by likely_spilled_retval_p. Remove from info->mask
2267 : : hard registers that are known to be written to / clobbered in full. */
2268 : : static void
2269 : 193215 : likely_spilled_retval_1 (rtx x, const_rtx set, void *data)
2270 : : {
2271 : 193215 : struct likely_spilled_retval_info *const info =
2272 : : (struct likely_spilled_retval_info *) data;
2273 : 193215 : unsigned regno, nregs;
2274 : 193215 : unsigned new_mask;
2275 : :
2276 : 193215 : if (!REG_P (XEXP (set, 0)))
2277 : : return;
2278 : 193215 : regno = REGNO (x);
2279 : 193215 : if (regno >= info->regno + info->nregs)
2280 : : return;
2281 : 193215 : nregs = REG_NREGS (x);
2282 : 193215 : if (regno + nregs <= info->regno)
2283 : : return;
2284 : 193215 : new_mask = (2U << (nregs - 1)) - 1;
2285 : 193215 : if (regno < info->regno)
2286 : 0 : new_mask >>= info->regno - regno;
2287 : : else
2288 : 193215 : new_mask <<= regno - info->regno;
2289 : 193215 : info->mask &= ~new_mask;
2290 : : }
2291 : :
2292 : : /* Return true iff part of the return value is live during INSN, and
2293 : : it is likely spilled. This can happen when more than one insn is needed
2294 : : to copy the return value, e.g. when we consider to combine into the
2295 : : second copy insn for a complex value. */
2296 : :
2297 : : static bool
2298 : 44550741 : likely_spilled_retval_p (rtx_insn *insn)
2299 : : {
2300 : 44550741 : rtx_insn *use = BB_END (this_basic_block);
2301 : 44550741 : rtx reg;
2302 : 44550741 : rtx_insn *p;
2303 : 44550741 : unsigned regno, nregs;
2304 : : /* We assume here that no machine mode needs more than
2305 : : 32 hard registers when the value overlaps with a register
2306 : : for which TARGET_FUNCTION_VALUE_REGNO_P is true. */
2307 : 44550741 : unsigned mask;
2308 : 44550741 : struct likely_spilled_retval_info info;
2309 : :
2310 : 44550741 : if (!NONJUMP_INSN_P (use) || GET_CODE (PATTERN (use)) != USE || insn == use)
2311 : : return false;
2312 : 3132024 : reg = XEXP (PATTERN (use), 0);
2313 : 3132024 : if (!REG_P (reg) || !targetm.calls.function_value_regno_p (REGNO (reg)))
2314 : 0 : return false;
2315 : 3132024 : regno = REGNO (reg);
2316 : 3132024 : nregs = REG_NREGS (reg);
2317 : 3132024 : if (nregs == 1)
2318 : : return false;
2319 : 190697 : mask = (2U << (nregs - 1)) - 1;
2320 : :
2321 : : /* Disregard parts of the return value that are set later. */
2322 : 190697 : info.regno = regno;
2323 : 190697 : info.nregs = nregs;
2324 : 190697 : info.mask = mask;
2325 : 663598 : for (p = PREV_INSN (use); info.mask && p != insn; p = PREV_INSN (p))
2326 : 282204 : if (INSN_P (p))
2327 : 282204 : note_stores (p, likely_spilled_retval_1, &info);
2328 : 381392 : mask = info.mask;
2329 : :
2330 : : /* Check if any of the (probably) live return value registers is
2331 : : likely spilled. */
2332 : : nregs --;
2333 : 381392 : do
2334 : : {
2335 : 381392 : if ((mask & 1 << nregs)
2336 : 381392 : && targetm.class_likely_spilled_p (REGNO_REG_CLASS (regno + nregs)))
2337 : : return true;
2338 : 381380 : } while (nregs--);
2339 : : return false;
2340 : : }
2341 : :
2342 : : /* Adjust INSN after we made a change to its destination.
2343 : :
2344 : : Changing the destination can invalidate notes that say something about
2345 : : the results of the insn and a LOG_LINK pointing to the insn. */
2346 : :
2347 : : static void
2348 : 16157 : adjust_for_new_dest (rtx_insn *insn)
2349 : : {
2350 : : /* For notes, be conservative and simply remove them. */
2351 : 16157 : remove_reg_equal_equiv_notes (insn, true);
2352 : :
2353 : : /* The new insn will have a destination that was previously the destination
2354 : : of an insn just above it. Call distribute_links to make a LOG_LINK from
2355 : : the next use of that destination. */
2356 : :
2357 : 16157 : rtx set = single_set (insn);
2358 : 16157 : gcc_assert (set);
2359 : :
2360 : 16157 : rtx reg = SET_DEST (set);
2361 : :
2362 : 16157 : while (GET_CODE (reg) == ZERO_EXTRACT
2363 : 16157 : || GET_CODE (reg) == STRICT_LOW_PART
2364 : 32314 : || GET_CODE (reg) == SUBREG)
2365 : 0 : reg = XEXP (reg, 0);
2366 : 16157 : gcc_assert (REG_P (reg));
2367 : :
2368 : 16157 : distribute_links (alloc_insn_link (insn, REGNO (reg), NULL));
2369 : :
2370 : 16157 : df_insn_rescan (insn);
2371 : 16157 : }
2372 : :
2373 : : /* Return TRUE if combine can reuse reg X in mode MODE.
2374 : : ADDED_SETS is trueif the original set is still required. */
2375 : : static bool
2376 : 2447031 : can_change_dest_mode (rtx x, bool added_sets, machine_mode mode)
2377 : : {
2378 : 2447031 : unsigned int regno;
2379 : :
2380 : 2447031 : if (!REG_P (x))
2381 : : return false;
2382 : :
2383 : : /* Don't change between modes with different underlying register sizes,
2384 : : since this could lead to invalid subregs. */
2385 : 2447031 : if (maybe_ne (REGMODE_NATURAL_SIZE (mode),
2386 : 2447031 : REGMODE_NATURAL_SIZE (GET_MODE (x))))
2387 : : return false;
2388 : :
2389 : 2447031 : regno = REGNO (x);
2390 : : /* Allow hard registers if the new mode is legal, and occupies no more
2391 : : registers than the old mode. */
2392 : 2447031 : if (regno < FIRST_PSEUDO_REGISTER)
2393 : 1036149 : return (targetm.hard_regno_mode_ok (regno, mode)
2394 : 1036149 : && REG_NREGS (x) >= hard_regno_nregs (regno, mode));
2395 : :
2396 : : /* Or a pseudo that is only used once. */
2397 : 1410882 : return (regno < reg_n_sets_max
2398 : 1410882 : && REG_N_SETS (regno) == 1
2399 : 1371211 : && !added_sets
2400 : 2782093 : && !REG_USERVAR_P (x));
2401 : : }
2402 : :
2403 : :
2404 : : /* Check whether X, the destination of a set, refers to part of
2405 : : the register specified by REG. */
2406 : :
2407 : : static bool
2408 : 16389 : reg_subword_p (rtx x, rtx reg)
2409 : : {
2410 : : /* Check that reg is an integer mode register. */
2411 : 16389 : if (!REG_P (reg) || GET_MODE_CLASS (GET_MODE (reg)) != MODE_INT)
2412 : : return false;
2413 : :
2414 : 15939 : if (GET_CODE (x) == STRICT_LOW_PART
2415 : 15502 : || GET_CODE (x) == ZERO_EXTRACT)
2416 : 464 : x = XEXP (x, 0);
2417 : :
2418 : 15939 : return GET_CODE (x) == SUBREG
2419 : 15754 : && !paradoxical_subreg_p (x)
2420 : 15754 : && SUBREG_REG (x) == reg
2421 : 31693 : && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT;
2422 : : }
2423 : :
2424 : : /* Return whether PAT is a PARALLEL of exactly N register SETs followed
2425 : : by an arbitrary number of CLOBBERs. */
2426 : : static bool
2427 : 95480303 : is_parallel_of_n_reg_sets (rtx pat, int n)
2428 : : {
2429 : 95480303 : if (GET_CODE (pat) != PARALLEL)
2430 : : return false;
2431 : :
2432 : 25387344 : int len = XVECLEN (pat, 0);
2433 : 25387344 : if (len < n)
2434 : : return false;
2435 : :
2436 : : int i;
2437 : 50779472 : for (i = 0; i < n; i++)
2438 : 48137603 : if (GET_CODE (XVECEXP (pat, 0, i)) != SET
2439 : 28316368 : || !REG_P (SET_DEST (XVECEXP (pat, 0, i))))
2440 : : return false;
2441 : 2985209 : for ( ; i < len; i++)
2442 : 937573 : switch (GET_CODE (XVECEXP (pat, 0, i)))
2443 : : {
2444 : 343340 : case CLOBBER:
2445 : 343340 : if (XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
2446 : : return false;
2447 : 343340 : break;
2448 : : default:
2449 : : return false;
2450 : : }
2451 : : return true;
2452 : : }
2453 : :
2454 : : /* Return whether INSN, a PARALLEL of N register SETs (and maybe some
2455 : : CLOBBERs), can be split into individual SETs in that order, without
2456 : : changing semantics. */
2457 : : static bool
2458 : 355605 : can_split_parallel_of_n_reg_sets (rtx_insn *insn, int n)
2459 : : {
2460 : 355605 : if (!insn_nothrow_p (insn))
2461 : : return false;
2462 : :
2463 : 354106 : rtx pat = PATTERN (insn);
2464 : :
2465 : 354106 : int i, j;
2466 : 951458 : for (i = 0; i < n; i++)
2467 : : {
2468 : 652782 : if (side_effects_p (SET_SRC (XVECEXP (pat, 0, i))))
2469 : : return false;
2470 : :
2471 : 649838 : rtx reg = SET_DEST (XVECEXP (pat, 0, i));
2472 : :
2473 : 948514 : for (j = i + 1; j < n; j++)
2474 : 351162 : if (reg_referenced_p (reg, XVECEXP (pat, 0, j)))
2475 : : return false;
2476 : : }
2477 : :
2478 : : return true;
2479 : : }
2480 : :
2481 : : /* Return whether X is just a single_set, with the source
2482 : : a general_operand. */
2483 : : static bool
2484 : 61983816 : is_just_move (rtx_insn *x)
2485 : : {
2486 : 61983816 : rtx set = single_set (x);
2487 : 61983816 : if (!set)
2488 : : return false;
2489 : :
2490 : 61572512 : return general_operand (SET_SRC (set), VOIDmode);
2491 : : }
2492 : :
2493 : : /* Callback function to count autoincs. */
2494 : :
2495 : : static int
2496 : 1013093 : count_auto_inc (rtx, rtx, rtx, rtx, rtx, void *arg)
2497 : : {
2498 : 1013093 : (*((int *) arg))++;
2499 : :
2500 : 1013093 : return 0;
2501 : : }
2502 : :
2503 : : /* Try to combine the insns I0, I1 and I2 into I3.
2504 : : Here I0, I1 and I2 appear earlier than I3.
2505 : : I0 and I1 can be zero; then we combine just I2 into I3, or I1 and I2 into
2506 : : I3.
2507 : :
2508 : : If we are combining more than two insns and the resulting insn is not
2509 : : recognized, try splitting it into two insns. If that happens, I2 and I3
2510 : : are retained and I1/I0 are pseudo-deleted by turning them into a NOTE.
2511 : : Otherwise, I0, I1 and I2 are pseudo-deleted.
2512 : :
2513 : : Return 0 if the combination does not work. Then nothing is changed.
2514 : : If we did the combination, return the insn at which combine should
2515 : : resume scanning.
2516 : :
2517 : : Set NEW_DIRECT_JUMP_P to true if try_combine creates a
2518 : : new direct jump instruction.
2519 : :
2520 : : LAST_COMBINED_INSN is either I3, or some insn after I3 that has
2521 : : been I3 passed to an earlier try_combine within the same basic
2522 : : block. */
2523 : :
2524 : : static rtx_insn *
2525 : 92009093 : try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0,
2526 : : bool *new_direct_jump_p, rtx_insn *last_combined_insn)
2527 : : {
2528 : : /* New patterns for I3 and I2, respectively. */
2529 : 92009093 : rtx newpat, newi2pat = 0;
2530 : 92009093 : rtvec newpat_vec_with_clobbers = 0;
2531 : 92009093 : bool substed_i2 = false, substed_i1 = false, substed_i0 = false;
2532 : : /* Indicates need to preserve SET in I0, I1 or I2 in I3 if it is not
2533 : : dead. */
2534 : 92009093 : bool added_sets_0, added_sets_1, added_sets_2;
2535 : : /* Total number of SETs to put into I3. */
2536 : 92009093 : int total_sets;
2537 : : /* Nonzero if I2's or I1's body now appears in I3. */
2538 : 92009093 : int i2_is_used = 0, i1_is_used = 0;
2539 : : /* INSN_CODEs for new I3, new I2, and user of condition code. */
2540 : 92009093 : int insn_code_number, i2_code_number = 0, other_code_number = 0;
2541 : : /* Contains I3 if the destination of I3 is used in its source, which means
2542 : : that the old life of I3 is being killed. If that usage is placed into
2543 : : I2 and not in I3, a REG_DEAD note must be made. */
2544 : 92009093 : rtx i3dest_killed = 0;
2545 : : /* SET_DEST and SET_SRC of I2, I1 and I0. */
2546 : 92009093 : rtx i2dest = 0, i2src = 0, i1dest = 0, i1src = 0, i0dest = 0, i0src = 0;
2547 : : /* Copy of SET_SRC of I1 and I0, if needed. */
2548 : 92009093 : rtx i1src_copy = 0, i0src_copy = 0, i0src_copy2 = 0;
2549 : : /* Set if I2DEST was reused as a scratch register. */
2550 : 92009093 : bool i2scratch = false;
2551 : : /* The PATTERNs of I0, I1, and I2, or a copy of them in certain cases. */
2552 : 92009093 : rtx i0pat = 0, i1pat = 0, i2pat = 0;
2553 : : /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
2554 : 92009093 : bool i2dest_in_i2src = false, i1dest_in_i1src = false;
2555 : 92009093 : bool i2dest_in_i1src = false, i0dest_in_i0src = false;
2556 : 92009093 : bool i1dest_in_i0src = false, i2dest_in_i0src = false;;
2557 : 92009093 : bool i2dest_killed = false, i1dest_killed = false, i0dest_killed = false;
2558 : 92009093 : bool i1_feeds_i2_n = false, i0_feeds_i2_n = false, i0_feeds_i1_n = false;
2559 : : /* Notes that must be added to REG_NOTES in I3 and I2. */
2560 : 92009093 : rtx new_i3_notes, new_i2_notes;
2561 : : /* Notes that we substituted I3 into I2 instead of the normal case. */
2562 : 92009093 : bool i3_subst_into_i2 = false;
2563 : : /* Notes that I1, I2 or I3 is a MULT operation. */
2564 : 92009093 : bool have_mult = false;
2565 : 92009093 : bool swap_i2i3 = false;
2566 : 92009093 : bool split_i2i3 = false;
2567 : 92009093 : bool changed_i3_dest = false;
2568 : 92009093 : bool i2_was_move = false, i3_was_move = false;
2569 : 92009093 : int n_auto_inc = 0;
2570 : :
2571 : 92009093 : int maxreg;
2572 : 92009093 : rtx_insn *temp_insn;
2573 : 92009093 : rtx temp_expr;
2574 : 92009093 : struct insn_link *link;
2575 : 92009093 : rtx other_pat = 0;
2576 : 92009093 : rtx new_other_notes;
2577 : 92009093 : int i;
2578 : 92009093 : scalar_int_mode dest_mode, temp_mode;
2579 : 92009093 : bool has_non_call_exception = false;
2580 : :
2581 : : /* Immediately return if any of I0,I1,I2 are the same insn (I3 can
2582 : : never be). */
2583 : 92009093 : if (i1 == i2 || i0 == i2 || (i0 && i0 == i1))
2584 : : return 0;
2585 : :
2586 : : /* Only try four-insn combinations when there's high likelihood of
2587 : : success. Look for simple insns, such as loads of constants or
2588 : : binary operations involving a constant. */
2589 : 21481612 : if (i0)
2590 : : {
2591 : 21481612 : int i;
2592 : 21481612 : int ngood = 0;
2593 : 21481612 : int nshift = 0;
2594 : 21481612 : rtx set0, set3;
2595 : :
2596 : 21481612 : if (!flag_expensive_optimizations)
2597 : : return 0;
2598 : :
2599 : 84623650 : for (i = 0; i < 4; i++)
2600 : : {
2601 : 69160953 : rtx_insn *insn = i == 0 ? i0 : i == 1 ? i1 : i == 2 ? i2 : i3;
2602 : 69160953 : rtx set = single_set (insn);
2603 : 69160953 : rtx src;
2604 : 69160953 : if (!set)
2605 : 2285674 : continue;
2606 : 66875279 : src = SET_SRC (set);
2607 : 66875279 : if (CONSTANT_P (src))
2608 : : {
2609 : 4448832 : ngood += 2;
2610 : 4448832 : break;
2611 : : }
2612 : 62426447 : else if (BINARY_P (src) && CONSTANT_P (XEXP (src, 1)))
2613 : 7828079 : ngood++;
2614 : 54598368 : else if (GET_CODE (src) == ASHIFT || GET_CODE (src) == ASHIFTRT
2615 : 54509634 : || GET_CODE (src) == LSHIFTRT)
2616 : 115391 : nshift++;
2617 : : }
2618 : :
2619 : : /* If I0 loads a memory and I3 sets the same memory, then I1 and I2
2620 : : are likely manipulating its value. Ideally we'll be able to combine
2621 : : all four insns into a bitfield insertion of some kind.
2622 : :
2623 : : Note the source in I0 might be inside a sign/zero extension and the
2624 : : memory modes in I0 and I3 might be different. So extract the address
2625 : : from the destination of I3 and search for it in the source of I0.
2626 : :
2627 : : In the event that there's a match but the source/dest do not actually
2628 : : refer to the same memory, the worst that happens is we try some
2629 : : combinations that we wouldn't have otherwise. */
2630 : 19911529 : if ((set0 = single_set (i0))
2631 : : /* Ensure the source of SET0 is a MEM, possibly buried inside
2632 : : an extension. */
2633 : 19789250 : && (GET_CODE (SET_SRC (set0)) == MEM
2634 : 16574921 : || ((GET_CODE (SET_SRC (set0)) == ZERO_EXTEND
2635 : 16574921 : || GET_CODE (SET_SRC (set0)) == SIGN_EXTEND)
2636 : 462319 : && GET_CODE (XEXP (SET_SRC (set0), 0)) == MEM))
2637 : 3300097 : && (set3 = single_set (i3))
2638 : : /* Ensure the destination of SET3 is a MEM. */
2639 : 2863708 : && GET_CODE (SET_DEST (set3)) == MEM
2640 : : /* Would it be better to extract the base address for the MEM
2641 : : in SET3 and look for that? I don't have cases where it matters
2642 : : but I could envision such cases. */
2643 : 20264036 : && rtx_referenced_p (XEXP (SET_DEST (set3), 0), SET_SRC (set0)))
2644 : 22690 : ngood += 2;
2645 : :
2646 : 19911529 : if (ngood < 2 && nshift < 2)
2647 : : return 0;
2648 : : }
2649 : :
2650 : : /* Exit early if one of the insns involved can't be used for
2651 : : combinations. */
2652 : 76063771 : if (CALL_P (i2)
2653 : 71100273 : || (i1 && CALL_P (i1))
2654 : 67728882 : || (i0 && CALL_P (i0))
2655 : 67659566 : || cant_combine_insn_p (i3)
2656 : 64532923 : || cant_combine_insn_p (i2)
2657 : 49500098 : || (i1 && cant_combine_insn_p (i1))
2658 : 44630854 : || (i0 && cant_combine_insn_p (i0))
2659 : 120614512 : || likely_spilled_retval_p (i3))
2660 : 31513042 : return 0;
2661 : :
2662 : 44550729 : combine_attempts++;
2663 : 44550729 : undobuf.other_insn = 0;
2664 : :
2665 : : /* Reset the hard register usage information. */
2666 : 44550729 : CLEAR_HARD_REG_SET (newpat_used_regs);
2667 : :
2668 : 44550729 : if (dump_file && (dump_flags & TDF_DETAILS))
2669 : : {
2670 : 174 : if (i0)
2671 : 20 : fprintf (dump_file, "\nTrying %d, %d, %d -> %d:\n",
2672 : 20 : INSN_UID (i0), INSN_UID (i1), INSN_UID (i2), INSN_UID (i3));
2673 : 154 : else if (i1)
2674 : 26 : fprintf (dump_file, "\nTrying %d, %d -> %d:\n",
2675 : 26 : INSN_UID (i1), INSN_UID (i2), INSN_UID (i3));
2676 : : else
2677 : 128 : fprintf (dump_file, "\nTrying %d -> %d:\n",
2678 : 128 : INSN_UID (i2), INSN_UID (i3));
2679 : :
2680 : 174 : if (i0)
2681 : 20 : dump_insn_slim (dump_file, i0);
2682 : 174 : if (i1)
2683 : 46 : dump_insn_slim (dump_file, i1);
2684 : 174 : dump_insn_slim (dump_file, i2);
2685 : 174 : dump_insn_slim (dump_file, i3);
2686 : : }
2687 : :
2688 : : /* If multiple insns feed into one of I2 or I3, they can be in any
2689 : : order. To simplify the code below, reorder them in sequence. */
2690 : 44550729 : if (i0 && DF_INSN_LUID (i0) > DF_INSN_LUID (i2))
2691 : : std::swap (i0, i2);
2692 : 44550729 : if (i0 && DF_INSN_LUID (i0) > DF_INSN_LUID (i1))
2693 : : std::swap (i0, i1);
2694 : 44550729 : if (i1 && DF_INSN_LUID (i1) > DF_INSN_LUID (i2))
2695 : : std::swap (i1, i2);
2696 : :
2697 : 44550729 : added_links_insn = 0;
2698 : 44550729 : added_notes_insn = 0;
2699 : :
2700 : : /* First check for one important special case that the code below will
2701 : : not handle. Namely, the case where I1 is zero, I2 is a PARALLEL
2702 : : and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
2703 : : we may be able to replace that destination with the destination of I3.
2704 : : This occurs in the common code where we compute both a quotient and
2705 : : remainder into a structure, in which case we want to do the computation
2706 : : directly into the structure to avoid register-register copies.
2707 : :
2708 : : Note that this case handles both multiple sets in I2 and also cases
2709 : : where I2 has a number of CLOBBERs inside the PARALLEL.
2710 : :
2711 : : We make very conservative checks below and only try to handle the
2712 : : most common cases of this. For example, we only handle the case
2713 : : where I2 and I3 are adjacent to avoid making difficult register
2714 : : usage tests. */
2715 : :
2716 : 27888583 : if (i1 == 0 && NONJUMP_INSN_P (i3) && GET_CODE (PATTERN (i3)) == SET
2717 : 14481146 : && REG_P (SET_SRC (PATTERN (i3)))
2718 : 4930730 : && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
2719 : 4736408 : && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
2720 : 3912486 : && GET_CODE (PATTERN (i2)) == PARALLEL
2721 : 1027697 : && ! side_effects_p (SET_DEST (PATTERN (i3)))
2722 : : /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
2723 : : below would need to check what is inside (and reg_overlap_mentioned_p
2724 : : doesn't support those codes anyway). Don't allow those destinations;
2725 : : the resulting insn isn't likely to be recognized anyway. */
2726 : 539755 : && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
2727 : 539685 : && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
2728 : 538828 : && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
2729 : 538828 : SET_DEST (PATTERN (i3)))
2730 : 45089469 : && next_active_insn (i2) == i3)
2731 : : {
2732 : 333682 : rtx p2 = PATTERN (i2);
2733 : :
2734 : : /* Make sure that the destination of I3,
2735 : : which we are going to substitute into one output of I2,
2736 : : is not used within another output of I2. We must avoid making this:
2737 : : (parallel [(set (mem (reg 69)) ...)
2738 : : (set (reg 69) ...)])
2739 : : which is not well-defined as to order of actions.
2740 : : (Besides, reload can't handle output reloads for this.)
2741 : :
2742 : : The problem can also happen if the dest of I3 is a memory ref,
2743 : : if another dest in I2 is an indirect memory ref.
2744 : :
2745 : : Neither can this PARALLEL be an asm. We do not allow combining
2746 : : that usually (see can_combine_p), so do not here either. */
2747 : 333682 : bool ok = true;
2748 : 1012841 : for (i = 0; ok && i < XVECLEN (p2, 0); i++)
2749 : : {
2750 : 679159 : if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
2751 : 333181 : || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
2752 : 1356968 : && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
2753 : 677809 : SET_DEST (XVECEXP (p2, 0, i))))
2754 : : ok = false;
2755 : 678333 : else if (GET_CODE (XVECEXP (p2, 0, i)) == SET
2756 : 345154 : && GET_CODE (SET_SRC (XVECEXP (p2, 0, i))) == ASM_OPERANDS)
2757 : 1926 : ok = false;
2758 : : }
2759 : :
2760 : 333682 : if (ok)
2761 : 395618 : for (i = 0; i < XVECLEN (p2, 0); i++)
2762 : 365109 : if (GET_CODE (XVECEXP (p2, 0, i)) == SET
2763 : 365109 : && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
2764 : : {
2765 : 301247 : combine_merges++;
2766 : :
2767 : 301247 : subst_insn = i3;
2768 : 301247 : subst_low_luid = DF_INSN_LUID (i2);
2769 : :
2770 : 301247 : added_sets_2 = added_sets_1 = added_sets_0 = false;
2771 : 301247 : i2src = SET_SRC (XVECEXP (p2, 0, i));
2772 : 301247 : i2dest = SET_DEST (XVECEXP (p2, 0, i));
2773 : 301247 : i2dest_killed = dead_or_set_p (i2, i2dest);
2774 : :
2775 : : /* Replace the dest in I2 with our dest and make the resulting
2776 : : insn the new pattern for I3. Then skip to where we validate
2777 : : the pattern. Everything was set up above. */
2778 : 301247 : SUBST (SET_DEST (XVECEXP (p2, 0, i)), SET_DEST (PATTERN (i3)));
2779 : 301247 : newpat = p2;
2780 : 301247 : i3_subst_into_i2 = true;
2781 : 301247 : goto validate_replacement;
2782 : : }
2783 : : }
2784 : :
2785 : : /* If I2 is setting a pseudo to a constant and I3 is setting some
2786 : : sub-part of it to another constant, merge them by making a new
2787 : : constant. */
2788 : 44249482 : if (i1 == 0
2789 : 27587336 : && (temp_expr = single_set (i2)) != 0
2790 : 27328914 : && is_a <scalar_int_mode> (GET_MODE (SET_DEST (temp_expr)), &temp_mode)
2791 : 18259158 : && CONST_SCALAR_INT_P (SET_SRC (temp_expr))
2792 : 2756744 : && GET_CODE (PATTERN (i3)) == SET
2793 : 1346322 : && CONST_SCALAR_INT_P (SET_SRC (PATTERN (i3)))
2794 : 44265871 : && reg_subword_p (SET_DEST (PATTERN (i3)), SET_DEST (temp_expr)))
2795 : : {
2796 : 15754 : rtx dest = SET_DEST (PATTERN (i3));
2797 : 15754 : rtx temp_dest = SET_DEST (temp_expr);
2798 : 15754 : int offset = -1;
2799 : 15754 : int width = 0;
2800 : :
2801 : 15754 : if (GET_CODE (dest) == ZERO_EXTRACT)
2802 : : {
2803 : 1 : if (CONST_INT_P (XEXP (dest, 1))
2804 : 1 : && CONST_INT_P (XEXP (dest, 2))
2805 : 2 : && is_a <scalar_int_mode> (GET_MODE (XEXP (dest, 0)),
2806 : : &dest_mode))
2807 : : {
2808 : 1 : width = INTVAL (XEXP (dest, 1));
2809 : 1 : offset = INTVAL (XEXP (dest, 2));
2810 : 1 : dest = XEXP (dest, 0);
2811 : 1 : if (BITS_BIG_ENDIAN)
2812 : : offset = GET_MODE_PRECISION (dest_mode) - width - offset;
2813 : : }
2814 : : }
2815 : : else
2816 : : {
2817 : 15753 : if (GET_CODE (dest) == STRICT_LOW_PART)
2818 : 437 : dest = XEXP (dest, 0);
2819 : 15753 : if (is_a <scalar_int_mode> (GET_MODE (dest), &dest_mode))
2820 : : {
2821 : 15753 : width = GET_MODE_PRECISION (dest_mode);
2822 : 15753 : offset = 0;
2823 : : }
2824 : : }
2825 : :
2826 : 15754 : if (offset >= 0)
2827 : : {
2828 : : /* If this is the low part, we're done. */
2829 : 15754 : if (subreg_lowpart_p (dest))
2830 : : ;
2831 : : /* Handle the case where inner is twice the size of outer. */
2832 : 4147 : else if (GET_MODE_PRECISION (temp_mode)
2833 : 4147 : == 2 * GET_MODE_PRECISION (dest_mode))
2834 : 4135 : offset += GET_MODE_PRECISION (dest_mode);
2835 : : /* Otherwise give up for now. */
2836 : : else
2837 : : offset = -1;
2838 : : }
2839 : :
2840 : 15742 : if (offset >= 0)
2841 : : {
2842 : 15742 : rtx inner = SET_SRC (PATTERN (i3));
2843 : 15742 : rtx outer = SET_SRC (temp_expr);
2844 : :
2845 : 31484 : wide_int o = wi::insert (rtx_mode_t (outer, temp_mode),
2846 : 15742 : rtx_mode_t (inner, dest_mode),
2847 : 31484 : offset, width);
2848 : :
2849 : 15742 : combine_merges++;
2850 : 15742 : subst_insn = i3;
2851 : 15742 : subst_low_luid = DF_INSN_LUID (i2);
2852 : 15742 : added_sets_2 = added_sets_1 = added_sets_0 = false;
2853 : 15742 : i2dest = temp_dest;
2854 : 15742 : i2dest_killed = dead_or_set_p (i2, i2dest);
2855 : :
2856 : : /* Replace the source in I2 with the new constant and make the
2857 : : resulting insn the new pattern for I3. Then skip to where we
2858 : : validate the pattern. Everything was set up above. */
2859 : 15742 : SUBST (SET_SRC (temp_expr),
2860 : : immed_wide_int_const (o, temp_mode));
2861 : :
2862 : 15742 : newpat = PATTERN (i2);
2863 : :
2864 : : /* The dest of I3 has been replaced with the dest of I2. */
2865 : 15742 : changed_i3_dest = true;
2866 : 15742 : goto validate_replacement;
2867 : 15742 : }
2868 : : }
2869 : :
2870 : : /* If we have no I1 and I2 looks like:
2871 : : (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
2872 : : (set Y OP)])
2873 : : make up a dummy I1 that is
2874 : : (set Y OP)
2875 : : and change I2 to be
2876 : : (set (reg:CC X) (compare:CC Y (const_int 0)))
2877 : :
2878 : : (We can ignore any trailing CLOBBERs.)
2879 : :
2880 : : This undoes a previous combination and allows us to match a branch-and-
2881 : : decrement insn. */
2882 : :
2883 : 44233740 : if (i1 == 0
2884 : 27571594 : && is_parallel_of_n_reg_sets (PATTERN (i2), 2)
2885 : 211606 : && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
2886 : : == MODE_CC)
2887 : 134914 : && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
2888 : 109334 : && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
2889 : 70773 : && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
2890 : 70773 : SET_SRC (XVECEXP (PATTERN (i2), 0, 1)))
2891 : 63428 : && !reg_used_between_p (SET_DEST (XVECEXP (PATTERN (i2), 0, 0)), i2, i3)
2892 : 44297168 : && !reg_used_between_p (SET_DEST (XVECEXP (PATTERN (i2), 0, 1)), i2, i3))
2893 : : {
2894 : : /* We make I1 with the same INSN_UID as I2. This gives it
2895 : : the same DF_INSN_LUID for value tracking. Our fake I1 will
2896 : : never appear in the insn stream so giving it the same INSN_UID
2897 : : as I2 will not cause a problem. */
2898 : :
2899 : 126856 : i1 = gen_rtx_INSN (VOIDmode, NULL, i2, BLOCK_FOR_INSN (i2),
2900 : 63428 : XVECEXP (PATTERN (i2), 0, 1), INSN_LOCATION (i2),
2901 : : -1, NULL_RTX);
2902 : 63428 : INSN_UID (i1) = INSN_UID (i2);
2903 : :
2904 : 63428 : SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
2905 : 63428 : SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
2906 : : SET_DEST (PATTERN (i1)));
2907 : 63428 : unsigned int regno = REGNO (SET_DEST (PATTERN (i1)));
2908 : 63428 : SUBST_LINK (LOG_LINKS (i2),
2909 : : alloc_insn_link (i1, regno, LOG_LINKS (i2)));
2910 : : }
2911 : :
2912 : : /* If I2 is a PARALLEL of two SETs of REGs (and perhaps some CLOBBERs),
2913 : : make those two SETs separate I1 and I2 insns, and make an I0 that is
2914 : : the original I1. */
2915 : 44233740 : if (i0 == 0
2916 : 42178762 : && is_parallel_of_n_reg_sets (PATTERN (i2), 2)
2917 : 355605 : && can_split_parallel_of_n_reg_sets (i2, 2)
2918 : 298676 : && !reg_used_between_p (SET_DEST (XVECEXP (PATTERN (i2), 0, 0)), i2, i3)
2919 : 261911 : && !reg_used_between_p (SET_DEST (XVECEXP (PATTERN (i2), 0, 1)), i2, i3)
2920 : 245128 : && !reg_set_between_p (SET_DEST (XVECEXP (PATTERN (i2), 0, 0)), i2, i3)
2921 : 44478864 : && !reg_set_between_p (SET_DEST (XVECEXP (PATTERN (i2), 0, 1)), i2, i3))
2922 : : {
2923 : : /* If there is no I1, there is no I0 either. */
2924 : 245124 : i0 = i1;
2925 : :
2926 : : /* We make I1 with the same INSN_UID as I2. This gives it
2927 : : the same DF_INSN_LUID for value tracking. Our fake I1 will
2928 : : never appear in the insn stream so giving it the same INSN_UID
2929 : : as I2 will not cause a problem. */
2930 : :
2931 : 490248 : i1 = gen_rtx_INSN (VOIDmode, NULL, i2, BLOCK_FOR_INSN (i2),
2932 : 245124 : XVECEXP (PATTERN (i2), 0, 0), INSN_LOCATION (i2),
2933 : : -1, NULL_RTX);
2934 : 245124 : INSN_UID (i1) = INSN_UID (i2);
2935 : :
2936 : 245124 : SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 1));
2937 : : }
2938 : :
2939 : : /* Verify that I2 and maybe I1 and I0 can be combined into I3. */
2940 : 44233740 : if (!can_combine_p (i2, i3, i0, i1, NULL, NULL, &i2dest, &i2src))
2941 : : {
2942 : 11778543 : if (dump_file && (dump_flags & TDF_DETAILS))
2943 : 8 : fprintf (dump_file, "Can't combine i2 into i3\n");
2944 : 11778543 : undo_all ();
2945 : 11778543 : return 0;
2946 : : }
2947 : 32455197 : if (i1 && !can_combine_p (i1, i3, i0, NULL, i2, NULL, &i1dest, &i1src))
2948 : : {
2949 : 1304343 : if (dump_file && (dump_flags & TDF_DETAILS))
2950 : 0 : fprintf (dump_file, "Can't combine i1 into i3\n");
2951 : 1304343 : undo_all ();
2952 : 1304343 : return 0;
2953 : : }
2954 : 31150854 : if (i0 && !can_combine_p (i0, i3, NULL, NULL, i1, i2, &i0dest, &i0src))
2955 : : {
2956 : 158732 : if (dump_file && (dump_flags & TDF_DETAILS))
2957 : 0 : fprintf (dump_file, "Can't combine i0 into i3\n");
2958 : 158732 : undo_all ();
2959 : 158732 : return 0;
2960 : : }
2961 : :
2962 : : /* With non-call exceptions we can end up trying to combine multiple
2963 : : insns with possible EH side effects. Make sure we can combine
2964 : : that to a single insn which means there must be at most one insn
2965 : : in the combination with an EH side effect. */
2966 : 30992122 : if (cfun->can_throw_non_call_exceptions)
2967 : : {
2968 : 6125125 : if (find_reg_note (i3, REG_EH_REGION, NULL_RTX)
2969 : 6100442 : || find_reg_note (i2, REG_EH_REGION, NULL_RTX)
2970 : 6100249 : || (i1 && find_reg_note (i1, REG_EH_REGION, NULL_RTX))
2971 : 12225324 : || (i0 && find_reg_note (i0, REG_EH_REGION, NULL_RTX)))
2972 : : {
2973 : 24926 : has_non_call_exception = true;
2974 : 24926 : if (insn_could_throw_p (i3)
2975 : 24926 : + insn_could_throw_p (i2)
2976 : 24926 : + (i1 ? insn_could_throw_p (i1) : 0)
2977 : 24926 : + (i0 ? insn_could_throw_p (i0) : 0) > 1)
2978 : : {
2979 : 214 : if (dump_file && (dump_flags & TDF_DETAILS))
2980 : 0 : fprintf (dump_file, "Can't combine multiple insns with EH "
2981 : : "side-effects\n");
2982 : 214 : undo_all ();
2983 : 214 : return 0;
2984 : : }
2985 : : }
2986 : : }
2987 : :
2988 : : /* Record whether i2 and i3 are trivial moves. */
2989 : 30991908 : i2_was_move = is_just_move (i2);
2990 : 30991908 : i3_was_move = is_just_move (i3);
2991 : :
2992 : : /* Record whether I2DEST is used in I2SRC and similarly for the other
2993 : : cases. Knowing this will help in register status updating below. */
2994 : 30991908 : i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
2995 : 30991908 : i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
2996 : 9742131 : i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
2997 : 30991908 : i0dest_in_i0src = i0 && reg_overlap_mentioned_p (i0dest, i0src);
2998 : 1461855 : i1dest_in_i0src = i0 && reg_overlap_mentioned_p (i1dest, i0src);
2999 : 1461855 : i2dest_in_i0src = i0 && reg_overlap_mentioned_p (i2dest, i0src);
3000 : 30991908 : i2dest_killed = dead_or_set_p (i2, i2dest);
3001 : 30991908 : i1dest_killed = i1 && dead_or_set_p (i1, i1dest);
3002 : 30991908 : i0dest_killed = i0 && dead_or_set_p (i0, i0dest);
3003 : :
3004 : : /* For the earlier insns, determine which of the subsequent ones they
3005 : : feed. */
3006 : 30991908 : i1_feeds_i2_n = i1 && insn_a_feeds_b (i1, i2);
3007 : 30991908 : i0_feeds_i1_n = i0 && insn_a_feeds_b (i0, i1);
3008 : 2563938 : i0_feeds_i2_n = (i0 && (!i0_feeds_i1_n ? insn_a_feeds_b (i0, i2)
3009 : 1102083 : : (!reg_overlap_mentioned_p (i1dest, i0dest)
3010 : 1078330 : && reg_overlap_mentioned_p (i0dest, i2src))));
3011 : :
3012 : : /* Ensure that I3's pattern can be the destination of combines. */
3013 : 30991908 : if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest, i0dest,
3014 : 30991908 : i1 && i2dest_in_i1src && !i1_feeds_i2_n,
3015 : 1461855 : i0 && ((i2dest_in_i0src && !i0_feeds_i2_n)
3016 : 1438094 : || (i1dest_in_i0src && !i0_feeds_i1_n)),
3017 : : &i3dest_killed))
3018 : : {
3019 : 217411 : undo_all ();
3020 : 217411 : return 0;
3021 : : }
3022 : :
3023 : : /* See if any of the insns is a MULT operation. Unless one is, we will
3024 : : reject a combination that is, since it must be slower. Be conservative
3025 : : here. */
3026 : 30774497 : if (GET_CODE (i2src) == MULT
3027 : 29991534 : || (i1 != 0 && GET_CODE (i1src) == MULT)
3028 : 29659550 : || (i0 != 0 && GET_CODE (i0src) == MULT)
3029 : 60389043 : || (GET_CODE (PATTERN (i3)) == SET
3030 : 23092030 : && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
3031 : : have_mult = true;
3032 : :
3033 : : /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
3034 : : We used to do this EXCEPT in one case: I3 has a post-inc in an
3035 : : output operand. However, that exception can give rise to insns like
3036 : : mov r3,(r3)+
3037 : : which is a famous insn on the PDP-11 where the value of r3 used as the
3038 : : source was model-dependent. Avoid this sort of thing. */
3039 : :
3040 : : #if 0
3041 : : if (!(GET_CODE (PATTERN (i3)) == SET
3042 : : && REG_P (SET_SRC (PATTERN (i3)))
3043 : : && MEM_P (SET_DEST (PATTERN (i3)))
3044 : : && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
3045 : : || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
3046 : : /* It's not the exception. */
3047 : : #endif
3048 : 30774497 : if (AUTO_INC_DEC)
3049 : : {
3050 : : rtx link;
3051 : : for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
3052 : : if (REG_NOTE_KIND (link) == REG_INC
3053 : : && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
3054 : : || (i1 != 0
3055 : : && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
3056 : : {
3057 : : undo_all ();
3058 : : return 0;
3059 : : }
3060 : : }
3061 : :
3062 : : /* See if the SETs in I1 or I2 need to be kept around in the merged
3063 : : instruction: whenever the value set there is still needed past I3.
3064 : : For the SET in I2, this is easy: we see if I2DEST dies or is set in I3.
3065 : :
3066 : : For the SET in I1, we have two cases: if I1 and I2 independently feed
3067 : : into I3, the set in I1 needs to be kept around unless I1DEST dies
3068 : : or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
3069 : : in I1 needs to be kept around unless I1DEST dies or is set in either
3070 : : I2 or I3. The same considerations apply to I0. */
3071 : :
3072 : 30774497 : added_sets_2 = !dead_or_set_p (i3, i2dest);
3073 : :
3074 : 30774497 : if (i1)
3075 : 9667709 : added_sets_1 = !(dead_or_set_p (i3, i1dest)
3076 : 7404295 : || (i1_feeds_i2_n && dead_or_set_p (i2, i1dest)));
3077 : : else
3078 : : added_sets_1 = false;
3079 : :
3080 : 30774497 : if (i0)
3081 : 2085417 : added_sets_0 = !(dead_or_set_p (i3, i0dest)
3082 : 1293123 : || (i0_feeds_i1_n && dead_or_set_p (i1, i0dest))
3083 : 240510 : || ((i0_feeds_i2_n || (i0_feeds_i1_n && i1_feeds_i2_n))
3084 : 601630 : && dead_or_set_p (i2, i0dest)));
3085 : : else
3086 : : added_sets_0 = false;
3087 : :
3088 : : /* We are about to copy insns for the case where they need to be kept
3089 : : around. Check that they can be copied in the merged instruction. */
3090 : :
3091 : 30774497 : if (targetm.cannot_copy_insn_p
3092 : 30774497 : && ((added_sets_2 && targetm.cannot_copy_insn_p (i2))
3093 : 0 : || (i1 && added_sets_1 && targetm.cannot_copy_insn_p (i1))
3094 : 0 : || (i0 && added_sets_0 && targetm.cannot_copy_insn_p (i0))))
3095 : : {
3096 : 0 : undo_all ();
3097 : 0 : return 0;
3098 : : }
3099 : :
3100 : : /* We cannot safely duplicate volatile references in any case. */
3101 : :
3102 : 6869652 : if ((added_sets_2 && volatile_refs_p (PATTERN (i2)))
3103 : 30749594 : || (added_sets_1 && volatile_refs_p (PATTERN (i1)))
3104 : 61501241 : || (added_sets_0 && volatile_refs_p (PATTERN (i0))))
3105 : : {
3106 : 49901 : undo_all ();
3107 : 49901 : return 0;
3108 : : }
3109 : :
3110 : : /* Count how many auto_inc expressions there were in the original insns;
3111 : : we need to have the same number in the resulting patterns. */
3112 : :
3113 : 30724596 : if (i0)
3114 : 1429762 : for_each_inc_dec (PATTERN (i0), count_auto_inc, &n_auto_inc);
3115 : 30724596 : if (i1)
3116 : 9642373 : for_each_inc_dec (PATTERN (i1), count_auto_inc, &n_auto_inc);
3117 : 30724596 : for_each_inc_dec (PATTERN (i2), count_auto_inc, &n_auto_inc);
3118 : 30724596 : for_each_inc_dec (PATTERN (i3), count_auto_inc, &n_auto_inc);
3119 : :
3120 : : /* If the set in I2 needs to be kept around, we must make a copy of
3121 : : PATTERN (I2), so that when we substitute I1SRC for I1DEST in
3122 : : PATTERN (I2), we are only substituting for the original I1DEST, not into
3123 : : an already-substituted copy. This also prevents making self-referential
3124 : : rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
3125 : : I2DEST. */
3126 : :
3127 : 30724596 : if (added_sets_2)
3128 : : {
3129 : 6841940 : if (GET_CODE (PATTERN (i2)) == PARALLEL)
3130 : 2141628 : i2pat = gen_rtx_SET (i2dest, copy_rtx (i2src));
3131 : : else
3132 : 4700312 : i2pat = copy_rtx (PATTERN (i2));
3133 : : }
3134 : :
3135 : 30724596 : if (added_sets_1)
3136 : : {
3137 : 3570810 : if (GET_CODE (PATTERN (i1)) == PARALLEL)
3138 : 1158935 : i1pat = gen_rtx_SET (i1dest, copy_rtx (i1src));
3139 : : else
3140 : 2411875 : i1pat = copy_rtx (PATTERN (i1));
3141 : : }
3142 : :
3143 : 30724596 : if (added_sets_0)
3144 : : {
3145 : 334989 : if (GET_CODE (PATTERN (i0)) == PARALLEL)
3146 : 157140 : i0pat = gen_rtx_SET (i0dest, copy_rtx (i0src));
3147 : : else
3148 : 177849 : i0pat = copy_rtx (PATTERN (i0));
3149 : : }
3150 : :
3151 : 30724596 : combine_merges++;
3152 : :
3153 : : /* Substitute in the latest insn for the regs set by the earlier ones. */
3154 : :
3155 : 30724596 : maxreg = max_reg_num ();
3156 : :
3157 : 30724596 : subst_insn = i3;
3158 : :
3159 : : /* Many machines have insns that can both perform an
3160 : : arithmetic operation and set the condition code. These operations will
3161 : : be represented as a PARALLEL with the first element of the vector
3162 : : being a COMPARE of an arithmetic operation with the constant zero.
3163 : : The second element of the vector will set some pseudo to the result
3164 : : of the same arithmetic operation. If we simplify the COMPARE, we won't
3165 : : match such a pattern and so will generate an extra insn. Here we test
3166 : : for this case, where both the comparison and the operation result are
3167 : : needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
3168 : : I2SRC. Later we will make the PARALLEL that contains I2. */
3169 : :
3170 : 21082223 : if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
3171 : 3955596 : && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
3172 : 1664580 : && CONST_INT_P (XEXP (SET_SRC (PATTERN (i3)), 1))
3173 : 31552924 : && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
3174 : : {
3175 : 759434 : rtx newpat_dest;
3176 : 759434 : rtx *cc_use_loc = NULL;
3177 : 759434 : rtx_insn *cc_use_insn = NULL;
3178 : 759434 : rtx op0 = i2src, op1 = XEXP (SET_SRC (PATTERN (i3)), 1);
3179 : 759434 : machine_mode compare_mode, orig_compare_mode;
3180 : 759434 : enum rtx_code compare_code = UNKNOWN, orig_compare_code = UNKNOWN;
3181 : 759434 : scalar_int_mode mode;
3182 : :
3183 : 759434 : newpat = PATTERN (i3);
3184 : 759434 : newpat_dest = SET_DEST (newpat);
3185 : 759434 : compare_mode = orig_compare_mode = GET_MODE (newpat_dest);
3186 : :
3187 : 759434 : if (undobuf.other_insn == 0
3188 : 759434 : && (cc_use_loc = find_single_use (SET_DEST (newpat), i3,
3189 : : &cc_use_insn)))
3190 : : {
3191 : 753005 : compare_code = orig_compare_code = GET_CODE (*cc_use_loc);
3192 : 753005 : if (is_a <scalar_int_mode> (GET_MODE (i2dest), &mode))
3193 : 753005 : compare_code = simplify_compare_const (compare_code, mode,
3194 : : &op0, &op1);
3195 : 753005 : target_canonicalize_comparison (&compare_code, &op0, &op1, 1);
3196 : : }
3197 : :
3198 : : /* Do the rest only if op1 is const0_rtx, which may be the
3199 : : result of simplification. */
3200 : 759434 : if (op1 == const0_rtx)
3201 : : {
3202 : : /* If a single use of the CC is found, prepare to modify it
3203 : : when SELECT_CC_MODE returns a new CC-class mode, or when
3204 : : the above simplify_compare_const() returned a new comparison
3205 : : operator. undobuf.other_insn is assigned the CC use insn
3206 : : when modifying it. */
3207 : 479285 : if (cc_use_loc)
3208 : : {
3209 : : #ifdef SELECT_CC_MODE
3210 : 476778 : machine_mode new_mode
3211 : 476778 : = SELECT_CC_MODE (compare_code, op0, op1);
3212 : 476778 : if (new_mode != orig_compare_mode
3213 : 476778 : && can_change_dest_mode (SET_DEST (newpat),
3214 : : added_sets_2, new_mode))
3215 : : {
3216 : 330 : unsigned int regno = REGNO (newpat_dest);
3217 : 330 : compare_mode = new_mode;
3218 : 330 : if (regno < FIRST_PSEUDO_REGISTER)
3219 : 330 : newpat_dest = gen_rtx_REG (compare_mode, regno);
3220 : : else
3221 : : {
3222 : 0 : subst_mode (regno, compare_mode);
3223 : 0 : newpat_dest = regno_reg_rtx[regno];
3224 : : }
3225 : : }
3226 : : #endif
3227 : : /* Cases for modifying the CC-using comparison. */
3228 : 476778 : if (compare_code != orig_compare_code
3229 : 344 : && COMPARISON_P (*cc_use_loc))
3230 : : {
3231 : : /* Replace cc_use_loc with entire new RTX. */
3232 : 344 : SUBST (*cc_use_loc,
3233 : : gen_rtx_fmt_ee (compare_code, GET_MODE (*cc_use_loc),
3234 : : newpat_dest, const0_rtx));
3235 : 344 : undobuf.other_insn = cc_use_insn;
3236 : : }
3237 : 476434 : else if (compare_mode != orig_compare_mode)
3238 : : {
3239 : 1 : subrtx_ptr_iterator::array_type array;
3240 : :
3241 : : /* Just replace the CC reg with a new mode. */
3242 : 4 : FOR_EACH_SUBRTX_PTR (iter, array, cc_use_loc, NONCONST)
3243 : : {
3244 : 3 : rtx *loc = *iter;
3245 : 3 : if (REG_P (*loc)
3246 : 3 : && REGNO (*loc) == REGNO (newpat_dest))
3247 : : {
3248 : 1 : SUBST (*loc, newpat_dest);
3249 : 1 : iter.skip_subrtxes ();
3250 : : }
3251 : : }
3252 : 1 : undobuf.other_insn = cc_use_insn;
3253 : 1 : }
3254 : : }
3255 : :
3256 : : /* Now we modify the current newpat:
3257 : : First, SET_DEST(newpat) is updated if the CC mode has been
3258 : : altered. For targets without SELECT_CC_MODE, this should be
3259 : : optimized away. */
3260 : 479285 : if (compare_mode != orig_compare_mode)
3261 : 330 : SUBST (SET_DEST (newpat), newpat_dest);
3262 : : /* This is always done to propagate i2src into newpat. */
3263 : 479285 : SUBST (SET_SRC (newpat),
3264 : : gen_rtx_COMPARE (compare_mode, op0, op1));
3265 : : /* Create new version of i2pat if needed; the below PARALLEL
3266 : : creation needs this to work correctly. */
3267 : 479285 : if (! rtx_equal_p (i2src, op0))
3268 : 29 : i2pat = gen_rtx_SET (i2dest, op0);
3269 : 479285 : i2_is_used = 1;
3270 : : }
3271 : : }
3272 : :
3273 : 759434 : if (i2_is_used == 0)
3274 : : {
3275 : : /* It is possible that the source of I2 or I1 may be performing
3276 : : an unneeded operation, such as a ZERO_EXTEND of something
3277 : : that is known to have the high part zero. Handle that case
3278 : : by letting subst look at the inner insns.
3279 : :
3280 : : Another way to do this would be to have a function that tries
3281 : : to simplify a single insn instead of merging two or more
3282 : : insns. We don't do this because of the potential of infinite
3283 : : loops and because of the potential extra memory required.
3284 : : However, doing it the way we are is a bit of a kludge and
3285 : : doesn't catch all cases.
3286 : :
3287 : : But only do this if -fexpensive-optimizations since it slows
3288 : : things down and doesn't usually win.
3289 : :
3290 : : This is not done in the COMPARE case above because the
3291 : : unmodified I2PAT is used in the PARALLEL and so a pattern
3292 : : with a modified I2SRC would not match. */
3293 : :
3294 : 30245311 : if (flag_expensive_optimizations)
3295 : : {
3296 : : /* Pass pc_rtx so no substitutions are done, just
3297 : : simplifications. */
3298 : 28323915 : if (i1)
3299 : : {
3300 : 9075573 : subst_low_luid = DF_INSN_LUID (i1);
3301 : 9075573 : i1src = subst (i1src, pc_rtx, pc_rtx, false, false, false);
3302 : : }
3303 : :
3304 : 28323915 : subst_low_luid = DF_INSN_LUID (i2);
3305 : 28323915 : i2src = subst (i2src, pc_rtx, pc_rtx, false, false, false);
3306 : : }
3307 : :
3308 : 30245311 : n_occurrences = 0; /* `subst' counts here */
3309 : 30245311 : subst_low_luid = DF_INSN_LUID (i2);
3310 : :
3311 : : /* If I1 feeds into I2 and I1DEST is in I1SRC, we need to make a unique
3312 : : copy of I2SRC each time we substitute it, in order to avoid creating
3313 : : self-referential RTL when we will be substituting I1SRC for I1DEST
3314 : : later. Likewise if I0 feeds into I2, either directly or indirectly
3315 : : through I1, and I0DEST is in I0SRC. */
3316 : 30245311 : newpat = subst (PATTERN (i3), i2dest, i2src, false, false,
3317 : 30245311 : (i1_feeds_i2_n && i1dest_in_i1src)
3318 : 30245311 : || ((i0_feeds_i2_n || (i0_feeds_i1_n && i1_feeds_i2_n))
3319 : : && i0dest_in_i0src));
3320 : 30245311 : substed_i2 = true;
3321 : :
3322 : : /* Record whether I2's body now appears within I3's body. */
3323 : 30245311 : i2_is_used = n_occurrences;
3324 : : }
3325 : :
3326 : : /* If we already got a failure, don't try to do more. Otherwise, try to
3327 : : substitute I1 if we have it. */
3328 : :
3329 : 30724596 : if (i1 && GET_CODE (newpat) != CLOBBER)
3330 : : {
3331 : : /* Before we can do this substitution, we must redo the test done
3332 : : above (see detailed comments there) that ensures I1DEST isn't
3333 : : mentioned in any SETs in NEWPAT that are field assignments. */
3334 : 9574677 : if (!combinable_i3pat (NULL, &newpat, i1dest, NULL_RTX, NULL_RTX,
3335 : : false, false, 0))
3336 : : {
3337 : 33 : undo_all ();
3338 : 33 : return 0;
3339 : : }
3340 : :
3341 : 9574644 : n_occurrences = 0;
3342 : 9574644 : subst_low_luid = DF_INSN_LUID (i1);
3343 : :
3344 : : /* If the following substitution will modify I1SRC, make a copy of it
3345 : : for the case where it is substituted for I1DEST in I2PAT later. */
3346 : 9574644 : if (added_sets_2 && i1_feeds_i2_n)
3347 : 1400888 : i1src_copy = copy_rtx (i1src);
3348 : :
3349 : : /* If I0 feeds into I1 and I0DEST is in I0SRC, we need to make a unique
3350 : : copy of I1SRC each time we substitute it, in order to avoid creating
3351 : : self-referential RTL when we will be substituting I0SRC for I0DEST
3352 : : later. */
3353 : 19149288 : newpat = subst (newpat, i1dest, i1src, false, false,
3354 : 9574644 : i0_feeds_i1_n && i0dest_in_i0src);
3355 : 9574644 : substed_i1 = true;
3356 : :
3357 : : /* Record whether I1's body now appears within I3's body. */
3358 : 9574644 : i1_is_used = n_occurrences;
3359 : : }
3360 : :
3361 : : /* Likewise for I0 if we have it. */
3362 : :
3363 : 30724563 : if (i0 && GET_CODE (newpat) != CLOBBER)
3364 : : {
3365 : 1415630 : if (!combinable_i3pat (NULL, &newpat, i0dest, NULL_RTX, NULL_RTX,
3366 : : false, false, 0))
3367 : : {
3368 : 2 : undo_all ();
3369 : 2 : return 0;
3370 : : }
3371 : :
3372 : : /* If the following substitution will modify I0SRC, make a copy of it
3373 : : for the case where it is substituted for I0DEST in I1PAT later. */
3374 : 1415628 : if (added_sets_1 && i0_feeds_i1_n)
3375 : 220312 : i0src_copy = copy_rtx (i0src);
3376 : : /* And a copy for I0DEST in I2PAT substitution. */
3377 : 1415628 : if (added_sets_2 && ((i0_feeds_i1_n && i1_feeds_i2_n)
3378 : 184520 : || (i0_feeds_i2_n)))
3379 : 298551 : i0src_copy2 = copy_rtx (i0src);
3380 : :
3381 : 1415628 : n_occurrences = 0;
3382 : 1415628 : subst_low_luid = DF_INSN_LUID (i0);
3383 : 1415628 : newpat = subst (newpat, i0dest, i0src, false, false, false);
3384 : 1415628 : substed_i0 = true;
3385 : : }
3386 : :
3387 : 30724561 : if (n_auto_inc)
3388 : : {
3389 : 507107 : int new_n_auto_inc = 0;
3390 : 507107 : for_each_inc_dec (newpat, count_auto_inc, &new_n_auto_inc);
3391 : :
3392 : 507107 : if (n_auto_inc != new_n_auto_inc)
3393 : : {
3394 : 1125 : if (dump_file && (dump_flags & TDF_DETAILS))
3395 : 0 : fprintf (dump_file, "Number of auto_inc expressions changed\n");
3396 : 1125 : undo_all ();
3397 : 1125 : return 0;
3398 : : }
3399 : : }
3400 : :
3401 : : /* Fail if an autoincrement side-effect has been duplicated. Be careful
3402 : : to count all the ways that I2SRC and I1SRC can be used. */
3403 : 30723436 : if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
3404 : : && i2_is_used + added_sets_2 > 1)
3405 : : || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
3406 : : && (i1_is_used + added_sets_1 + (added_sets_2 && i1_feeds_i2_n) > 1))
3407 : : || (i0 != 0 && FIND_REG_INC_NOTE (i0, NULL_RTX) != 0
3408 : : && (n_occurrences + added_sets_0
3409 : : + (added_sets_1 && i0_feeds_i1_n)
3410 : : + (added_sets_2 && i0_feeds_i2_n) > 1))
3411 : : /* Fail if we tried to make a new register. */
3412 : 30723436 : || max_reg_num () != maxreg
3413 : : /* Fail if we couldn't do something and have a CLOBBER. */
3414 : 30723436 : || GET_CODE (newpat) == CLOBBER
3415 : : /* Fail if this new pattern is a MULT and we didn't have one before
3416 : : at the outer level. */
3417 : 61085003 : || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
3418 : 256190 : && ! have_mult))
3419 : : {
3420 : 378713 : undo_all ();
3421 : 378713 : return 0;
3422 : : }
3423 : :
3424 : : /* If the actions of the earlier insns must be kept
3425 : : in addition to substituting them into the latest one,
3426 : : we must make a new PARALLEL for the latest insn
3427 : : to hold additional the SETs. */
3428 : :
3429 : 30344723 : if (added_sets_0 || added_sets_1 || added_sets_2)
3430 : : {
3431 : 9860910 : int extra_sets = added_sets_0 + added_sets_1 + added_sets_2;
3432 : 9860910 : combine_extras++;
3433 : :
3434 : 9860910 : if (GET_CODE (newpat) == PARALLEL)
3435 : : {
3436 : 2057101 : rtvec old = XVEC (newpat, 0);
3437 : 2057101 : total_sets = XVECLEN (newpat, 0) + extra_sets;
3438 : 2057101 : newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
3439 : 2057101 : memcpy (XVEC (newpat, 0)->elem, &old->elem[0],
3440 : 2057101 : sizeof (old->elem[0]) * old->num_elem);
3441 : : }
3442 : : else
3443 : : {
3444 : 7803809 : rtx old = newpat;
3445 : 7803809 : total_sets = 1 + extra_sets;
3446 : 7803809 : newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
3447 : 7803809 : XVECEXP (newpat, 0, 0) = old;
3448 : : }
3449 : :
3450 : 9860910 : if (added_sets_0)
3451 : 324621 : XVECEXP (newpat, 0, --total_sets) = i0pat;
3452 : :
3453 : 9860910 : if (added_sets_1)
3454 : : {
3455 : 3525019 : rtx t = i1pat;
3456 : 3525019 : if (i0_feeds_i1_n)
3457 : 219943 : t = subst (t, i0dest, i0src_copy ? i0src_copy : i0src,
3458 : : false, false, false);
3459 : :
3460 : 3525019 : XVECEXP (newpat, 0, --total_sets) = t;
3461 : : }
3462 : 9860910 : if (added_sets_2)
3463 : : {
3464 : 6778338 : rtx t = i2pat;
3465 : 6778338 : if (i1_feeds_i2_n)
3466 : 1373297 : t = subst (t, i1dest, i1src_copy ? i1src_copy : i1src, false, false,
3467 : 1373297 : i0_feeds_i1_n && i0dest_in_i0src);
3468 : 6778338 : if ((i0_feeds_i1_n && i1_feeds_i2_n) || i0_feeds_i2_n)
3469 : 297335 : t = subst (t, i0dest, i0src_copy2 ? i0src_copy2 : i0src,
3470 : : false, false, false);
3471 : :
3472 : 6778338 : XVECEXP (newpat, 0, --total_sets) = t;
3473 : : }
3474 : : }
3475 : :
3476 : 23566385 : validate_replacement:
3477 : :
3478 : : /* Note which hard regs this insn has as inputs. */
3479 : 30661712 : mark_used_regs_combine (newpat);
3480 : :
3481 : : /* If recog_for_combine fails, it strips existing clobbers. If we'll
3482 : : consider splitting this pattern, we might need these clobbers. */
3483 : 30661712 : if (i1 && GET_CODE (newpat) == PARALLEL
3484 : 6606274 : && GET_CODE (XVECEXP (newpat, 0, XVECLEN (newpat, 0) - 1)) == CLOBBER)
3485 : : {
3486 : 1608081 : int len = XVECLEN (newpat, 0);
3487 : :
3488 : 1608081 : newpat_vec_with_clobbers = rtvec_alloc (len);
3489 : 6481819 : for (i = 0; i < len; i++)
3490 : 3265657 : RTVEC_ELT (newpat_vec_with_clobbers, i) = XVECEXP (newpat, 0, i);
3491 : : }
3492 : :
3493 : : /* We have recognized nothing yet. */
3494 : 30661712 : insn_code_number = -1;
3495 : :
3496 : : /* See if this is a PARALLEL of two SETs where one SET's destination is
3497 : : a register that is unused and this isn't marked as an instruction that
3498 : : might trap in an EH region. In that case, we just need the other SET.
3499 : : We prefer this over the PARALLEL.
3500 : :
3501 : : This can occur when simplifying a divmod insn. We *must* test for this
3502 : : case here because the code below that splits two independent SETs doesn't
3503 : : handle this case correctly when it updates the register status.
3504 : :
3505 : : It's pointless doing this if we originally had two sets, one from
3506 : : i3, and one from i2. Combining then splitting the parallel results
3507 : : in the original i2 again plus an invalid insn (which we delete).
3508 : : The net effect is only to move instructions around, which makes
3509 : : debug info less accurate.
3510 : :
3511 : : If the remaining SET came from I2 its destination should not be used
3512 : : between I2 and I3. See PR82024. */
3513 : :
3514 : 6778338 : if (!(added_sets_2 && i1 == 0)
3515 : 25729947 : && is_parallel_of_n_reg_sets (newpat, 2)
3516 : 32142137 : && asm_noperands (newpat) < 0)
3517 : : {
3518 : 1479618 : rtx set0 = XVECEXP (newpat, 0, 0);
3519 : 1479618 : rtx set1 = XVECEXP (newpat, 0, 1);
3520 : 1479618 : rtx oldpat = newpat;
3521 : :
3522 : 1479618 : if (((REG_P (SET_DEST (set1))
3523 : 1479618 : && find_reg_note (i3, REG_UNUSED, SET_DEST (set1)))
3524 : 1439744 : || (GET_CODE (SET_DEST (set1)) == SUBREG
3525 : 0 : && find_reg_note (i3, REG_UNUSED, SUBREG_REG (SET_DEST (set1)))))
3526 : 39874 : && insn_nothrow_p (i3)
3527 : 1518255 : && !side_effects_p (SET_SRC (set1)))
3528 : : {
3529 : 38445 : newpat = set0;
3530 : 38445 : insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3531 : : }
3532 : :
3533 : 1441173 : else if (((REG_P (SET_DEST (set0))
3534 : 1441173 : && find_reg_note (i3, REG_UNUSED, SET_DEST (set0)))
3535 : 1417577 : || (GET_CODE (SET_DEST (set0)) == SUBREG
3536 : 0 : && find_reg_note (i3, REG_UNUSED,
3537 : 0 : SUBREG_REG (SET_DEST (set0)))))
3538 : 23596 : && insn_nothrow_p (i3)
3539 : 1463969 : && !side_effects_p (SET_SRC (set0)))
3540 : : {
3541 : 22755 : rtx dest = SET_DEST (set1);
3542 : 22755 : if (GET_CODE (dest) == SUBREG)
3543 : 0 : dest = SUBREG_REG (dest);
3544 : 22755 : if (!reg_used_between_p (dest, i2, i3))
3545 : : {
3546 : 22754 : newpat = set1;
3547 : 22754 : insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3548 : :
3549 : 22754 : if (insn_code_number >= 0)
3550 : : changed_i3_dest = true;
3551 : : }
3552 : : }
3553 : :
3554 : 38445 : if (insn_code_number < 0)
3555 : 1474394 : newpat = oldpat;
3556 : : }
3557 : :
3558 : : /* Is the result of combination a valid instruction? */
3559 : 1474394 : if (insn_code_number < 0)
3560 : 30656488 : insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3561 : :
3562 : : /* If we were combining three insns and the result is a simple SET
3563 : : with no ASM_OPERANDS that wasn't recognized, try to split it into two
3564 : : insns. There are two ways to do this. It can be split using a
3565 : : machine-specific method (like when you have an addition of a large
3566 : : constant) or by combine in the function find_split_point. */
3567 : :
3568 : 9423617 : if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
3569 : 35008930 : && asm_noperands (newpat) < 0)
3570 : : {
3571 : 4346776 : rtx parallel, *split;
3572 : 4346776 : rtx_insn *m_split_insn;
3573 : 4346776 : unsigned int old_nregs, new_nregs;
3574 : :
3575 : : /* See if the MD file can split NEWPAT. If it can't, see if letting it
3576 : : use I2DEST as a scratch register will help. In the latter case,
3577 : : convert I2DEST to the mode of the source of NEWPAT if we can. */
3578 : :
3579 : 4346776 : m_split_insn = combine_split_insns (newpat, i3, &old_nregs, &new_nregs);
3580 : :
3581 : : /* We can only use I2DEST as a scratch reg if it doesn't overlap any
3582 : : inputs of NEWPAT. */
3583 : :
3584 : : /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
3585 : : possible to try that as a scratch reg. This would require adding
3586 : : more code to make it work though. */
3587 : :
3588 : 4346776 : if (m_split_insn == 0 && ! reg_overlap_mentioned_p (i2dest, newpat))
3589 : : {
3590 : 4220453 : machine_mode new_mode = GET_MODE (SET_DEST (newpat));
3591 : :
3592 : : /* ??? Reusing i2dest without resetting the reg_stat entry for it
3593 : : (temporarily, until we are committed to this instruction
3594 : : combination) does not work: for example, any call to nonzero_bits
3595 : : on the register (from a splitter in the MD file, for example)
3596 : : will get the old information, which is invalid.
3597 : :
3598 : : Since nowadays we can create registers during combine just fine,
3599 : : we should just create a new one here, not reuse i2dest. */
3600 : :
3601 : : /* First try to split using the original register as a
3602 : : scratch register. */
3603 : 4220453 : parallel = gen_rtx_PARALLEL (VOIDmode,
3604 : : gen_rtvec (2, newpat,
3605 : : gen_rtx_CLOBBER (VOIDmode,
3606 : : i2dest)));
3607 : 4220453 : m_split_insn = combine_split_insns (parallel, i3, &old_nregs, &new_nregs);
3608 : :
3609 : : /* If that didn't work, try changing the mode of I2DEST if
3610 : : we can. */
3611 : 4220453 : if (m_split_insn == 0
3612 : 4220453 : && new_mode != GET_MODE (i2dest)
3613 : 1559209 : && new_mode != VOIDmode
3614 : 5287490 : && can_change_dest_mode (i2dest, added_sets_2, new_mode))
3615 : : {
3616 : 807354 : machine_mode old_mode = GET_MODE (i2dest);
3617 : 807354 : rtx ni2dest;
3618 : :
3619 : 807354 : if (REGNO (i2dest) < FIRST_PSEUDO_REGISTER)
3620 : 10110 : ni2dest = gen_rtx_REG (new_mode, REGNO (i2dest));
3621 : : else
3622 : : {
3623 : 797244 : subst_mode (REGNO (i2dest), new_mode);
3624 : 797244 : ni2dest = regno_reg_rtx[REGNO (i2dest)];
3625 : : }
3626 : :
3627 : 807354 : parallel = (gen_rtx_PARALLEL
3628 : : (VOIDmode,
3629 : : gen_rtvec (2, newpat,
3630 : : gen_rtx_CLOBBER (VOIDmode,
3631 : : ni2dest))));
3632 : 807354 : m_split_insn = combine_split_insns (parallel, i3, &old_nregs, &new_nregs);
3633 : :
3634 : 807354 : if (m_split_insn == 0
3635 : 807354 : && REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
3636 : : {
3637 : 797244 : struct undo *buf;
3638 : :
3639 : 797244 : adjust_reg_mode (regno_reg_rtx[REGNO (i2dest)], old_mode);
3640 : 797244 : buf = undobuf.undos;
3641 : 797244 : undobuf.undos = buf->next;
3642 : 797244 : buf->next = undobuf.frees;
3643 : 797244 : undobuf.frees = buf;
3644 : : }
3645 : : }
3646 : :
3647 : 4220453 : i2scratch = m_split_insn != 0;
3648 : : }
3649 : :
3650 : : /* If recog_for_combine has discarded clobbers, try to use them
3651 : : again for the split. */
3652 : 4346776 : if (m_split_insn == 0 && newpat_vec_with_clobbers)
3653 : : {
3654 : 1559067 : parallel = gen_rtx_PARALLEL (VOIDmode, newpat_vec_with_clobbers);
3655 : 1559067 : m_split_insn = combine_split_insns (parallel, i3, &old_nregs, &new_nregs);
3656 : : }
3657 : :
3658 : 4358389 : if (m_split_insn && NEXT_INSN (m_split_insn) == NULL_RTX)
3659 : : {
3660 : 1367 : rtx m_split_pat = PATTERN (m_split_insn);
3661 : 1367 : insn_code_number = recog_for_combine (&m_split_pat, i3, &new_i3_notes,
3662 : : old_nregs, new_nregs);
3663 : 1367 : if (insn_code_number >= 0)
3664 : 163 : newpat = m_split_pat;
3665 : : }
3666 : 10246 : else if (m_split_insn && NEXT_INSN (NEXT_INSN (m_split_insn)) == NULL_RTX
3667 : 4355655 : && (next_nonnote_nondebug_insn (i2) == i3
3668 : 22 : || !modified_between_p (PATTERN (m_split_insn), i2, i3)))
3669 : : {
3670 : 10246 : rtx i2set, i3set;
3671 : 10246 : rtx newi3pat = PATTERN (NEXT_INSN (m_split_insn));
3672 : 10246 : newi2pat = PATTERN (m_split_insn);
3673 : :
3674 : 10246 : i3set = single_set (NEXT_INSN (m_split_insn));
3675 : 10246 : i2set = single_set (m_split_insn);
3676 : :
3677 : 10246 : i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
3678 : :
3679 : : /* If I2 or I3 has multiple SETs, we won't know how to track
3680 : : register status, so don't use these insns. If I2's destination
3681 : : is used between I2 and I3, we also can't use these insns. */
3682 : :
3683 : 10246 : if (i2_code_number >= 0 && i2set && i3set
3684 : 20492 : && (next_nonnote_nondebug_insn (i2) == i3
3685 : 22 : || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
3686 : 10246 : insn_code_number = recog_for_combine (&newi3pat, i3,
3687 : : &new_i3_notes,
3688 : : old_nregs, new_nregs);
3689 : 10246 : if (insn_code_number >= 0)
3690 : 10244 : newpat = newi3pat;
3691 : :
3692 : : /* It is possible that both insns now set the destination of I3.
3693 : : If so, we must show an extra use of it. */
3694 : :
3695 : 10246 : if (insn_code_number >= 0)
3696 : : {
3697 : 10244 : rtx new_i3_dest = SET_DEST (i3set);
3698 : 10244 : rtx new_i2_dest = SET_DEST (i2set);
3699 : :
3700 : 10244 : while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
3701 : 10282 : || GET_CODE (new_i3_dest) == STRICT_LOW_PART
3702 : 20546 : || GET_CODE (new_i3_dest) == SUBREG)
3703 : 38 : new_i3_dest = XEXP (new_i3_dest, 0);
3704 : :
3705 : 10244 : while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
3706 : 10244 : || GET_CODE (new_i2_dest) == STRICT_LOW_PART
3707 : 20488 : || GET_CODE (new_i2_dest) == SUBREG)
3708 : 0 : new_i2_dest = XEXP (new_i2_dest, 0);
3709 : :
3710 : 10244 : if (REG_P (new_i3_dest)
3711 : 6532 : && REG_P (new_i2_dest)
3712 : 6532 : && REGNO (new_i3_dest) == REGNO (new_i2_dest)
3713 : 10244 : && REGNO (new_i2_dest) < reg_n_sets_max)
3714 : 0 : INC_REG_N_SETS (REGNO (new_i2_dest), 1);
3715 : : }
3716 : : }
3717 : :
3718 : : /* If we can split it and use I2DEST, go ahead and see if that
3719 : : helps things be recognized. Verify that none of the registers
3720 : : are set between I2 and I3. */
3721 : 1206 : if (insn_code_number < 0
3722 : 4336369 : && (split = find_split_point (&newpat, i3, false)) != 0
3723 : : /* We need I2DEST in the proper mode. If it is a hard register
3724 : : or the only use of a pseudo, we can change its mode.
3725 : : Make sure we don't change a hard register to have a mode that
3726 : : isn't valid for it, or change the number of registers. */
3727 : 4131109 : && (GET_MODE (*split) == GET_MODE (i2dest)
3728 : 1512969 : || GET_MODE (*split) == VOIDmode
3729 : 1161177 : || can_change_dest_mode (i2dest, added_sets_2,
3730 : : GET_MODE (*split)))
3731 : 3537504 : && (next_nonnote_nondebug_insn (i2) == i3
3732 : 558807 : || !modified_between_p (*split, i2, i3))
3733 : : /* We can't overwrite I2DEST if its value is still used by
3734 : : NEWPAT. */
3735 : 3508739 : && ! reg_referenced_p (i2dest, newpat)
3736 : : /* We should not split a possibly trapping part when we
3737 : : care about non-call EH and have REG_EH_REGION notes
3738 : : to distribute. */
3739 : 7788398 : && ! (cfun->can_throw_non_call_exceptions
3740 : 373006 : && has_non_call_exception
3741 : 127 : && may_trap_p (*split)))
3742 : : {
3743 : 3442701 : rtx newdest = i2dest;
3744 : 3442701 : enum rtx_code split_code = GET_CODE (*split);
3745 : 3442701 : machine_mode split_mode = GET_MODE (*split);
3746 : 3442701 : bool subst_done = false;
3747 : 3442701 : newi2pat = NULL_RTX;
3748 : :
3749 : 3442701 : i2scratch = true;
3750 : :
3751 : : /* *SPLIT may be part of I2SRC, so make sure we have the
3752 : : original expression around for later debug processing.
3753 : : We should not need I2SRC any more in other cases. */
3754 : 3442701 : if (MAY_HAVE_DEBUG_BIND_INSNS)
3755 : 1657916 : i2src = copy_rtx (i2src);
3756 : : else
3757 : 1784785 : i2src = NULL;
3758 : :
3759 : : /* Get NEWDEST as a register in the proper mode. We have already
3760 : : validated that we can do this. */
3761 : 3442701 : if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
3762 : : {
3763 : 565534 : if (REGNO (i2dest) < FIRST_PSEUDO_REGISTER)
3764 : 0 : newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
3765 : : else
3766 : : {
3767 : 565534 : subst_mode (REGNO (i2dest), split_mode);
3768 : 565534 : newdest = regno_reg_rtx[REGNO (i2dest)];
3769 : : }
3770 : : }
3771 : :
3772 : : /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
3773 : : an ASHIFT. This can occur if it was inside a PLUS and hence
3774 : : appeared to be a memory address. This is a kludge. */
3775 : 3442701 : if (split_code == MULT
3776 : 207298 : && CONST_INT_P (XEXP (*split, 1))
3777 : 110640 : && INTVAL (XEXP (*split, 1)) > 0
3778 : 3548542 : && (i = exact_log2 (UINTVAL (XEXP (*split, 1)))) >= 0)
3779 : : {
3780 : 72839 : rtx i_rtx = gen_int_shift_amount (split_mode, i);
3781 : 72839 : SUBST (*split, gen_rtx_ASHIFT (split_mode,
3782 : : XEXP (*split, 0), i_rtx));
3783 : : /* Update split_code because we may not have a multiply
3784 : : anymore. */
3785 : 72839 : split_code = GET_CODE (*split);
3786 : : }
3787 : :
3788 : : /* Similarly for (plus (mult FOO (const_int pow2))). */
3789 : 3442701 : if (split_code == PLUS
3790 : 655613 : && GET_CODE (XEXP (*split, 0)) == MULT
3791 : 110384 : && CONST_INT_P (XEXP (XEXP (*split, 0), 1))
3792 : 40229 : && INTVAL (XEXP (XEXP (*split, 0), 1)) > 0
3793 : 3478524 : && (i = exact_log2 (UINTVAL (XEXP (XEXP (*split, 0), 1)))) >= 0)
3794 : : {
3795 : 6864 : rtx nsplit = XEXP (*split, 0);
3796 : 6864 : rtx i_rtx = gen_int_shift_amount (GET_MODE (nsplit), i);
3797 : 6864 : SUBST (XEXP (*split, 0), gen_rtx_ASHIFT (GET_MODE (nsplit),
3798 : : XEXP (nsplit, 0),
3799 : : i_rtx));
3800 : : /* Update split_code because we may not have a multiply
3801 : : anymore. */
3802 : 6864 : split_code = GET_CODE (*split);
3803 : : }
3804 : :
3805 : : #ifdef INSN_SCHEDULING
3806 : : /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
3807 : : be written as a ZERO_EXTEND. */
3808 : 3442701 : if (split_code == SUBREG && MEM_P (SUBREG_REG (*split)))
3809 : : {
3810 : : /* Or as a SIGN_EXTEND if LOAD_EXTEND_OP says that that's
3811 : : what it really is. */
3812 : 9772 : if (load_extend_op (GET_MODE (SUBREG_REG (*split)))
3813 : : == SIGN_EXTEND)
3814 : : SUBST (*split, gen_rtx_SIGN_EXTEND (split_mode,
3815 : : SUBREG_REG (*split)));
3816 : : else
3817 : 9772 : SUBST (*split, gen_rtx_ZERO_EXTEND (split_mode,
3818 : : SUBREG_REG (*split)));
3819 : : }
3820 : : #endif
3821 : :
3822 : : /* Attempt to split binary operators using arithmetic identities. */
3823 : 3442701 : if (BINARY_P (SET_SRC (newpat))
3824 : 2871044 : && split_mode == GET_MODE (SET_SRC (newpat))
3825 : 5401153 : && ! side_effects_p (SET_SRC (newpat)))
3826 : : {
3827 : 1941460 : rtx setsrc = SET_SRC (newpat);
3828 : 1941460 : machine_mode mode = GET_MODE (setsrc);
3829 : 1941460 : enum rtx_code code = GET_CODE (setsrc);
3830 : 1941460 : rtx src_op0 = XEXP (setsrc, 0);
3831 : 1941460 : rtx src_op1 = XEXP (setsrc, 1);
3832 : :
3833 : : /* Split "X = Y op Y" as "Z = Y; X = Z op Z". */
3834 : 1941460 : if (rtx_equal_p (src_op0, src_op1))
3835 : : {
3836 : 1435 : newi2pat = gen_rtx_SET (newdest, src_op0);
3837 : 1435 : SUBST (XEXP (setsrc, 0), newdest);
3838 : 1435 : SUBST (XEXP (setsrc, 1), newdest);
3839 : 1435 : subst_done = true;
3840 : : }
3841 : : /* Split "((P op Q) op R) op S" where op is PLUS or MULT. */
3842 : 1940025 : else if ((code == PLUS || code == MULT)
3843 : 937743 : && GET_CODE (src_op0) == code
3844 : 401174 : && GET_CODE (XEXP (src_op0, 0)) == code
3845 : 172187 : && (INTEGRAL_MODE_P (mode)
3846 : : || (FLOAT_MODE_P (mode)
3847 : 98888 : && flag_unsafe_math_optimizations)))
3848 : : {
3849 : 77452 : rtx p = XEXP (XEXP (src_op0, 0), 0);
3850 : 77452 : rtx q = XEXP (XEXP (src_op0, 0), 1);
3851 : 77452 : rtx r = XEXP (src_op0, 1);
3852 : 77452 : rtx s = src_op1;
3853 : :
3854 : : /* Split both "((X op Y) op X) op Y" and
3855 : : "((X op Y) op Y) op X" as "T op T" where T is
3856 : : "X op Y". */
3857 : 77660 : if ((rtx_equal_p (p,r) && rtx_equal_p (q,s))
3858 : 77591 : || (rtx_equal_p (p,s) && rtx_equal_p (q,r)))
3859 : : {
3860 : 69 : newi2pat = gen_rtx_SET (newdest, XEXP (src_op0, 0));
3861 : 69 : SUBST (XEXP (setsrc, 0), newdest);
3862 : 69 : SUBST (XEXP (setsrc, 1), newdest);
3863 : 69 : subst_done = true;
3864 : : }
3865 : : /* Split "((X op X) op Y) op Y)" as "T op T" where
3866 : : T is "X op Y". */
3867 : 77383 : else if (rtx_equal_p (p,q) && rtx_equal_p (r,s))
3868 : : {
3869 : 25 : rtx tmp = simplify_gen_binary (code, mode, p, r);
3870 : 25 : newi2pat = gen_rtx_SET (newdest, tmp);
3871 : 25 : SUBST (XEXP (setsrc, 0), newdest);
3872 : 25 : SUBST (XEXP (setsrc, 1), newdest);
3873 : 25 : subst_done = true;
3874 : : }
3875 : : }
3876 : : }
3877 : :
3878 : 1529 : if (!subst_done)
3879 : : {
3880 : 3441172 : newi2pat = gen_rtx_SET (newdest, *split);
3881 : 3441172 : SUBST (*split, newdest);
3882 : : }
3883 : :
3884 : 3442701 : i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
3885 : :
3886 : : /* recog_for_combine might have added CLOBBERs to newi2pat.
3887 : : Make sure NEWPAT does not depend on the clobbered regs. */
3888 : 3442701 : if (GET_CODE (newi2pat) == PARALLEL)
3889 : 2431253 : for (i = XVECLEN (newi2pat, 0) - 1; i >= 0; i--)
3890 : 1631701 : if (GET_CODE (XVECEXP (newi2pat, 0, i)) == CLOBBER)
3891 : : {
3892 : 832149 : rtx reg = XEXP (XVECEXP (newi2pat, 0, i), 0);
3893 : 832149 : if (reg_overlap_mentioned_p (reg, newpat))
3894 : : {
3895 : 16532 : undo_all ();
3896 : 16532 : return 0;
3897 : : }
3898 : : }
3899 : :
3900 : : /* If the split point was a MULT and we didn't have one before,
3901 : : don't use one now. */
3902 : 3426169 : if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
3903 : 2081122 : insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3904 : : }
3905 : : }
3906 : :
3907 : : /* Check for a case where we loaded from memory in a narrow mode and
3908 : : then sign extended it, but we need both registers. In that case,
3909 : : we have a PARALLEL with both loads from the same memory location.
3910 : : We can split this into a load from memory followed by a register-register
3911 : : copy. This saves at least one insn, more if register allocation can
3912 : : eliminate the copy.
3913 : :
3914 : : We cannot do this if the involved modes have more than one elements,
3915 : : like for vector or complex modes.
3916 : :
3917 : : We cannot do this if the destination of the first assignment is a
3918 : : condition code register. We eliminate this case by making sure
3919 : : the SET_DEST and SET_SRC have the same mode.
3920 : :
3921 : : We cannot do this if the destination of the second assignment is
3922 : : a register that we have already assumed is zero-extended. Similarly
3923 : : for a SUBREG of such a register. */
3924 : :
3925 : 5076841 : else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
3926 : 5027316 : && GET_CODE (newpat) == PARALLEL
3927 : 5025231 : && XVECLEN (newpat, 0) == 2
3928 : 4206190 : && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
3929 : 4205826 : && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
3930 : 23832 : && (GET_MODE (SET_DEST (XVECEXP (newpat, 0, 0)))
3931 : 23832 : == GET_MODE (SET_SRC (XVECEXP (newpat, 0, 0))))
3932 : 23832 : && ! VECTOR_MODE_P (GET_MODE (SET_DEST (XVECEXP (newpat, 0, 0))))
3933 : : && ! COMPLEX_MODE_P (GET_MODE (SET_DEST (XVECEXP (newpat, 0, 0))))
3934 : 22528 : && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
3935 : 22528 : && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
3936 : 22528 : XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
3937 : 5965 : && !modified_between_p (SET_SRC (XVECEXP (newpat, 0, 1)), i2, i3)
3938 : 5965 : && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
3939 : 5965 : && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
3940 : 5965 : && ! (temp_expr = SET_DEST (XVECEXP (newpat, 0, 1)),
3941 : : (REG_P (temp_expr)
3942 : 5965 : && reg_stat[REGNO (temp_expr)].nonzero_bits != 0
3943 : 6053 : && known_lt (GET_MODE_PRECISION (GET_MODE (temp_expr)),
3944 : : BITS_PER_WORD)
3945 : 5836 : && known_lt (GET_MODE_PRECISION (GET_MODE (temp_expr)),
3946 : : HOST_BITS_PER_INT)
3947 : 1089 : && (reg_stat[REGNO (temp_expr)].nonzero_bits
3948 : 1089 : != GET_MODE_MASK (word_mode))))
3949 : 5949 : && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
3950 : 0 : && (temp_expr = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
3951 : 0 : (REG_P (temp_expr)
3952 : 0 : && reg_stat[REGNO (temp_expr)].nonzero_bits != 0
3953 : 0 : && known_lt (GET_MODE_PRECISION (GET_MODE (temp_expr)),
3954 : : BITS_PER_WORD)
3955 : 0 : && known_lt (GET_MODE_PRECISION (GET_MODE (temp_expr)),
3956 : : HOST_BITS_PER_INT)
3957 : 0 : && (reg_stat[REGNO (temp_expr)].nonzero_bits
3958 : 0 : != GET_MODE_MASK (word_mode)))))
3959 : 5949 : && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
3960 : 5949 : SET_SRC (XVECEXP (newpat, 0, 1)))
3961 : 26320796 : && ! find_reg_note (i3, REG_UNUSED,
3962 : 5860 : SET_DEST (XVECEXP (newpat, 0, 0))))
3963 : : {
3964 : 5860 : rtx ni2dest;
3965 : :
3966 : 5860 : newi2pat = XVECEXP (newpat, 0, 0);
3967 : 5860 : ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
3968 : 5860 : newpat = XVECEXP (newpat, 0, 1);
3969 : 5860 : SUBST (SET_SRC (newpat),
3970 : : gen_lowpart (GET_MODE (SET_SRC (newpat)), ni2dest));
3971 : 5860 : i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
3972 : :
3973 : 5860 : if (i2_code_number >= 0)
3974 : 0 : insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3975 : :
3976 : 5860 : if (insn_code_number >= 0)
3977 : : swap_i2i3 = 1;
3978 : : }
3979 : :
3980 : : /* Similarly, check for a case where we have a PARALLEL of two independent
3981 : : SETs but we started with three insns. In this case, we can do the sets
3982 : : as two separate insns. This case occurs when some SET allows two
3983 : : other insns to combine, but the destination of that SET is still live.
3984 : :
3985 : : Also do this if we started with two insns and (at least) one of the
3986 : : resulting sets is a noop; this noop will be deleted later.
3987 : :
3988 : : Also do this if we started with two insns neither of which was a simple
3989 : : move. */
3990 : :
3991 : 22559628 : else if (insn_code_number < 0 && asm_noperands (newpat) < 0
3992 : 22542868 : && GET_CODE (newpat) == PARALLEL
3993 : 10084911 : && XVECLEN (newpat, 0) == 2
3994 : 9154852 : && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
3995 : 9054345 : && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
3996 : 9003961 : && (i1
3997 : 4821018 : || set_noop_p (XVECEXP (newpat, 0, 0))
3998 : 4820662 : || set_noop_p (XVECEXP (newpat, 0, 1))
3999 : 4820654 : || (!i2_was_move && !i3_was_move))
4000 : 5899878 : && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
4001 : 5899095 : && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
4002 : 5898840 : && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
4003 : 5898274 : && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
4004 : 5898264 : && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
4005 : : XVECEXP (newpat, 0, 0))
4006 : 4877327 : && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
4007 : 4877327 : XVECEXP (newpat, 0, 1))
4008 : 31470357 : && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0)))
4009 : 408400 : && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1)))))
4010 : : {
4011 : 4538467 : rtx set0 = XVECEXP (newpat, 0, 0);
4012 : 4538467 : rtx set1 = XVECEXP (newpat, 0, 1);
4013 : :
4014 : : /* Normally, it doesn't matter which of the two is done first, but
4015 : : one which uses any regs/memory set in between i2 and i3 can't
4016 : : be first. The PARALLEL might also have been pre-existing in i3,
4017 : : so we need to make sure that we won't wrongly hoist a SET to i2
4018 : : that would conflict with a death note present in there, or would
4019 : : have its dest modified between i2 and i3. */
4020 : 4538467 : if (!modified_between_p (SET_SRC (set1), i2, i3)
4021 : 9046119 : && !(REG_P (SET_DEST (set1))
4022 : 4515880 : && find_reg_note (i2, REG_DEAD, SET_DEST (set1)))
4023 : 4544095 : && !(GET_CODE (SET_DEST (set1)) == SUBREG
4024 : 14359 : && find_reg_note (i2, REG_DEAD,
4025 : 14359 : SUBREG_REG (SET_DEST (set1))))
4026 : 4529736 : && !modified_between_p (SET_DEST (set1), i2, i3)
4027 : : /* If I3 is a jump, ensure that set0 is a jump so that
4028 : : we do not create invalid RTL. */
4029 : 9068203 : && (!JUMP_P (i3) || SET_DEST (set0) == pc_rtx)
4030 : : )
4031 : : {
4032 : 4529736 : newi2pat = set1;
4033 : 4529736 : newpat = set0;
4034 : : }
4035 : 8731 : else if (!modified_between_p (SET_SRC (set0), i2, i3)
4036 : 1006 : && !(REG_P (SET_DEST (set0))
4037 : 503 : && find_reg_note (i2, REG_DEAD, SET_DEST (set0)))
4038 : 503 : && !(GET_CODE (SET_DEST (set0)) == SUBREG
4039 : 0 : && find_reg_note (i2, REG_DEAD,
4040 : 0 : SUBREG_REG (SET_DEST (set0))))
4041 : 503 : && !modified_between_p (SET_DEST (set0), i2, i3)
4042 : : /* If I3 is a jump, ensure that set1 is a jump so that
4043 : : we do not create invalid RTL. */
4044 : 9233 : && (!JUMP_P (i3) || SET_DEST (set1) == pc_rtx)
4045 : : )
4046 : : {
4047 : 502 : newi2pat = set0;
4048 : 502 : newpat = set1;
4049 : : }
4050 : : else
4051 : : {
4052 : 8229 : undo_all ();
4053 : 8229 : return 0;
4054 : : }
4055 : :
4056 : 4530238 : i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
4057 : :
4058 : 4530238 : if (i2_code_number >= 0)
4059 : : {
4060 : : /* recog_for_combine might have added CLOBBERs to newi2pat.
4061 : : Make sure NEWPAT does not depend on the clobbered regs. */
4062 : 3328382 : if (GET_CODE (newi2pat) == PARALLEL)
4063 : : {
4064 : 1244307 : for (i = XVECLEN (newi2pat, 0) - 1; i >= 0; i--)
4065 : 833072 : if (GET_CODE (XVECEXP (newi2pat, 0, i)) == CLOBBER)
4066 : : {
4067 : 421837 : rtx reg = XEXP (XVECEXP (newi2pat, 0, i), 0);
4068 : 421837 : if (reg_overlap_mentioned_p (reg, newpat))
4069 : : {
4070 : 2450 : undo_all ();
4071 : 2450 : return 0;
4072 : : }
4073 : : }
4074 : : }
4075 : :
4076 : 3325932 : insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
4077 : :
4078 : : /* Likewise, recog_for_combine might have added clobbers to NEWPAT.
4079 : : Checking that the SET0's SET_DEST and SET1's SET_DEST aren't
4080 : : mentioned/clobbered, ensures NEWI2PAT's SET_DEST is live. */
4081 : 3325932 : if (insn_code_number >= 0 && GET_CODE (newpat) == PARALLEL)
4082 : : {
4083 : 66915 : for (i = XVECLEN (newpat, 0) - 1; i >= 0; i--)
4084 : 44620 : if (GET_CODE (XVECEXP (newpat, 0, i)) == CLOBBER)
4085 : : {
4086 : 22325 : rtx reg = XEXP (XVECEXP (newpat, 0, i), 0);
4087 : 22325 : if (reg_overlap_mentioned_p (reg, SET_DEST (set0))
4088 : 22325 : || reg_overlap_mentioned_p (reg, SET_DEST (set1)))
4089 : : {
4090 : 0 : undo_all ();
4091 : 0 : return 0;
4092 : : }
4093 : : }
4094 : : }
4095 : :
4096 : : if (insn_code_number >= 0)
4097 : : split_i2i3 = true;
4098 : : }
4099 : : }
4100 : :
4101 : : /* If it still isn't recognized, fail and change things back the way they
4102 : : were. */
4103 : 27302709 : if ((insn_code_number < 0
4104 : : /* Is the result a reasonable ASM_OPERANDS? */
4105 : 30486583 : && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
4106 : : {
4107 : 26680753 : undo_all ();
4108 : 26680753 : return 0;
4109 : : }
4110 : :
4111 : : /* If we had to change another insn, make sure it is valid also. */
4112 : 3953748 : if (undobuf.other_insn)
4113 : : {
4114 : 209937 : CLEAR_HARD_REG_SET (newpat_used_regs);
4115 : :
4116 : 209937 : other_pat = PATTERN (undobuf.other_insn);
4117 : 209937 : other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
4118 : : &new_other_notes);
4119 : :
4120 : 209937 : if (other_code_number < 0 && ! check_asm_operands (other_pat))
4121 : : {
4122 : 7937 : undo_all ();
4123 : 7937 : return 0;
4124 : : }
4125 : : }
4126 : :
4127 : : /* Only allow this combination if insn_cost reports that the
4128 : : replacement instructions are cheaper than the originals. */
4129 : 3945811 : if (!combine_validate_cost (i0, i1, i2, i3, newpat, newi2pat, other_pat))
4130 : : {
4131 : 206246 : undo_all ();
4132 : 206246 : return 0;
4133 : : }
4134 : :
4135 : 3739565 : if (MAY_HAVE_DEBUG_BIND_INSNS)
4136 : : {
4137 : 2103636 : struct undo *undo;
4138 : :
4139 : 6353487 : for (undo = undobuf.undos; undo; undo = undo->next)
4140 : 4249851 : if (undo->kind == UNDO_MODE)
4141 : : {
4142 : 2953 : rtx reg = regno_reg_rtx[undo->where.regno];
4143 : 2953 : machine_mode new_mode = GET_MODE (reg);
4144 : 2953 : machine_mode old_mode = undo->old_contents.m;
4145 : :
4146 : : /* Temporarily revert mode back. */
4147 : 2953 : adjust_reg_mode (reg, old_mode);
4148 : :
4149 : 2953 : if (reg == i2dest && i2scratch)
4150 : : {
4151 : : /* If we used i2dest as a scratch register with a
4152 : : different mode, substitute it for the original
4153 : : i2src while its original mode is temporarily
4154 : : restored, and then clear i2scratch so that we don't
4155 : : do it again later. */
4156 : 2953 : propagate_for_debug (i2, last_combined_insn, reg, i2src,
4157 : : this_basic_block);
4158 : 2953 : i2scratch = false;
4159 : : /* Put back the new mode. */
4160 : 2953 : adjust_reg_mode (reg, new_mode);
4161 : : }
4162 : : else
4163 : : {
4164 : 0 : rtx tempreg = gen_raw_REG (old_mode, REGNO (reg));
4165 : 0 : rtx_insn *first, *last;
4166 : :
4167 : 0 : if (reg == i2dest)
4168 : : {
4169 : : first = i2;
4170 : : last = last_combined_insn;
4171 : : }
4172 : : else
4173 : : {
4174 : 0 : first = i3;
4175 : 0 : last = undobuf.other_insn;
4176 : 0 : gcc_assert (last);
4177 : 0 : if (DF_INSN_LUID (last)
4178 : 0 : < DF_INSN_LUID (last_combined_insn))
4179 : 0 : last = last_combined_insn;
4180 : : }
4181 : :
4182 : : /* We're dealing with a reg that changed mode but not
4183 : : meaning, so we want to turn it into a subreg for
4184 : : the new mode. However, because of REG sharing and
4185 : : because its mode had already changed, we have to do
4186 : : it in two steps. First, replace any debug uses of
4187 : : reg, with its original mode temporarily restored,
4188 : : with this copy we have created; then, replace the
4189 : : copy with the SUBREG of the original shared reg,
4190 : : once again changed to the new mode. */
4191 : 0 : propagate_for_debug (first, last, reg, tempreg,
4192 : : this_basic_block);
4193 : 0 : adjust_reg_mode (reg, new_mode);
4194 : 0 : propagate_for_debug (first, last, tempreg,
4195 : : lowpart_subreg (old_mode, reg, new_mode),
4196 : : this_basic_block);
4197 : : }
4198 : : }
4199 : : }
4200 : :
4201 : : /* If we will be able to accept this, we have made a
4202 : : change to the destination of I3. This requires us to
4203 : : do a few adjustments. */
4204 : :
4205 : 3739565 : if (changed_i3_dest)
4206 : : {
4207 : 16157 : PATTERN (i3) = newpat;
4208 : 16157 : adjust_for_new_dest (i3);
4209 : : }
4210 : :
4211 : : /* If I2 didn't change, this is not a combination (but a simplification or
4212 : : canonicalisation with context), which should not be done here. Doing
4213 : : it here explodes the algorithm. Don't. */
4214 : 3739565 : if (rtx_equal_p (newi2pat, PATTERN (i2)))
4215 : : {
4216 : 28061 : if (dump_file)
4217 : 1 : fprintf (dump_file, "i2 didn't change, not doing this\n");
4218 : 28061 : undo_all ();
4219 : 28061 : return 0;
4220 : : }
4221 : :
4222 : : /* We now know that we can do this combination. Merge the insns and
4223 : : update the status of registers and LOG_LINKS. */
4224 : :
4225 : 3711504 : if (undobuf.other_insn)
4226 : : {
4227 : 201634 : rtx note, next;
4228 : :
4229 : 201634 : PATTERN (undobuf.other_insn) = other_pat;
4230 : :
4231 : : /* If any of the notes in OTHER_INSN were REG_DEAD or REG_UNUSED,
4232 : : ensure that they are still valid. Then add any non-duplicate
4233 : : notes added by recog_for_combine. */
4234 : 602358 : for (note = REG_NOTES (undobuf.other_insn); note; note = next)
4235 : : {
4236 : 400724 : next = XEXP (note, 1);
4237 : :
4238 : 400724 : if ((REG_NOTE_KIND (note) == REG_DEAD
4239 : 204256 : && !reg_referenced_p (XEXP (note, 0),
4240 : 204256 : PATTERN (undobuf.other_insn)))
4241 : 396754 : ||(REG_NOTE_KIND (note) == REG_UNUSED
4242 : 28 : && !reg_set_p (XEXP (note, 0),
4243 : 28 : PATTERN (undobuf.other_insn)))
4244 : : /* Simply drop equal note since it may be no longer valid
4245 : : for other_insn. It may be possible to record that CC
4246 : : register is changed and only discard those notes, but
4247 : : in practice it's unnecessary complication and doesn't
4248 : : give any meaningful improvement.
4249 : :
4250 : : See PR78559. */
4251 : 396754 : || REG_NOTE_KIND (note) == REG_EQUAL
4252 : 797338 : || REG_NOTE_KIND (note) == REG_EQUIV)
4253 : 4110 : remove_note (undobuf.other_insn, note);
4254 : : }
4255 : :
4256 : 201634 : distribute_notes (new_other_notes, undobuf.other_insn,
4257 : : undobuf.other_insn, NULL, NULL_RTX, NULL_RTX,
4258 : : NULL_RTX);
4259 : : }
4260 : :
4261 : 3711504 : if (swap_i2i3)
4262 : : {
4263 : : /* I3 now uses what used to be its destination and which is now
4264 : : I2's destination. This requires us to do a few adjustments. */
4265 : 0 : PATTERN (i3) = newpat;
4266 : 0 : adjust_for_new_dest (i3);
4267 : : }
4268 : :
4269 : 3711504 : if (swap_i2i3 || split_i2i3)
4270 : : {
4271 : : /* We might need a LOG_LINK from I3 to I2. But then we used to
4272 : : have one, so we still will.
4273 : :
4274 : : However, some later insn might be using I2's dest and have
4275 : : a LOG_LINK pointing at I3. We should change it to point at
4276 : : I2 instead. */
4277 : :
4278 : : /* newi2pat is usually a SET here; however, recog_for_combine might
4279 : : have added some clobbers. */
4280 : 25574 : rtx x = newi2pat;
4281 : 25574 : if (GET_CODE (x) == PARALLEL)
4282 : 1048 : x = XVECEXP (newi2pat, 0, 0);
4283 : :
4284 : 25574 : if (REG_P (SET_DEST (x))
4285 : 0 : || (GET_CODE (SET_DEST (x)) == SUBREG
4286 : 0 : && REG_P (SUBREG_REG (SET_DEST (x)))))
4287 : : {
4288 : 25574 : unsigned int regno = reg_or_subregno (SET_DEST (x));
4289 : :
4290 : 25574 : bool done = false;
4291 : 470539 : for (rtx_insn *insn = NEXT_INSN (i3);
4292 : 470539 : !done
4293 : 470539 : && insn
4294 : 469195 : && INSN_P (insn)
4295 : 915504 : && BLOCK_FOR_INSN (insn) == this_basic_block;
4296 : 444965 : insn = NEXT_INSN (insn))
4297 : : {
4298 : 444965 : if (DEBUG_INSN_P (insn))
4299 : 155853 : continue;
4300 : 289112 : struct insn_link *link;
4301 : 539873 : FOR_EACH_LOG_LINK (link, insn)
4302 : 250771 : if (link->insn == i3 && link->regno == regno)
4303 : : {
4304 : 10 : link->insn = i2;
4305 : 10 : done = true;
4306 : 10 : break;
4307 : : }
4308 : : }
4309 : : }
4310 : : }
4311 : :
4312 : 3711504 : {
4313 : 3711504 : rtx i3notes, i2notes, i1notes = 0, i0notes = 0;
4314 : 3711504 : struct insn_link *i3links, *i2links, *i1links = 0, *i0links = 0;
4315 : 3711504 : rtx midnotes = 0;
4316 : 3711504 : int from_luid;
4317 : : /* Compute which registers we expect to eliminate. newi2pat may be setting
4318 : : either i3dest or i2dest, so we must check it. */
4319 : 67672 : rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
4320 : 3656851 : || i2dest_in_i2src || i2dest_in_i1src || i2dest_in_i0src
4321 : 3573635 : || !i2dest_killed
4322 : 7284091 : ? 0 : i2dest);
4323 : : /* For i1, we need to compute both local elimination and global
4324 : : elimination information with respect to newi2pat because i1dest
4325 : : may be the same as i3dest, in which case newi2pat may be setting
4326 : : i1dest. Global information is used when distributing REG_DEAD
4327 : : note for i2 and i3, in which case it does matter if newi2pat sets
4328 : : i1dest or not.
4329 : :
4330 : : Local information is used when distributing REG_DEAD note for i1,
4331 : : in which case it doesn't matter if newi2pat sets i1dest or not.
4332 : : See PR62151, if we have four insns combination:
4333 : : i0: r0 <- i0src
4334 : : i1: r1 <- i1src (using r0)
4335 : : REG_DEAD (r0)
4336 : : i2: r0 <- i2src (using r1)
4337 : : i3: r3 <- i3src (using r0)
4338 : : ix: using r0
4339 : : From i1's point of view, r0 is eliminated, no matter if it is set
4340 : : by newi2pat or not. In other words, REG_DEAD info for r0 in i1
4341 : : should be discarded.
4342 : :
4343 : : Note local information only affects cases in forms like "I1->I2->I3",
4344 : : "I0->I1->I2->I3" or "I0&I1->I2, I2->I3". For other cases like
4345 : : "I0->I1, I1&I2->I3" or "I1&I2->I3", newi2pat won't set i1dest or
4346 : : i0dest anyway. */
4347 : 93824 : rtx local_elim_i1 = (i1 == 0 || i1dest_in_i1src || i1dest_in_i0src
4348 : 93762 : || !i1dest_killed
4349 : 3711504 : ? 0 : i1dest);
4350 : 93762 : rtx elim_i1 = (local_elim_i1 == 0
4351 : 93762 : || (newi2pat && reg_set_p (i1dest, newi2pat))
4352 : 93762 : ? 0 : i1dest);
4353 : : /* Same case as i1. */
4354 : 5242 : rtx local_elim_i0 = (i0 == 0 || i0dest_in_i0src || !i0dest_killed
4355 : 3711504 : ? 0 : i0dest);
4356 : 5224 : rtx elim_i0 = (local_elim_i0 == 0
4357 : 5224 : || (newi2pat && reg_set_p (i0dest, newi2pat))
4358 : 5224 : ? 0 : i0dest);
4359 : :
4360 : : /* Get the old REG_NOTES and LOG_LINKS from all our insns and
4361 : : clear them. */
4362 : 3711504 : i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
4363 : 3711504 : i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
4364 : 3711504 : if (i1)
4365 : 93824 : i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
4366 : 3711504 : if (i0)
4367 : 5242 : i0notes = REG_NOTES (i0), i0links = LOG_LINKS (i0);
4368 : :
4369 : : /* Ensure that we do not have something that should not be shared but
4370 : : occurs multiple times in the new insns. Check this by first
4371 : : resetting all the `used' flags and then copying anything is shared. */
4372 : :
4373 : 3711504 : reset_used_flags (i3notes);
4374 : 3711504 : reset_used_flags (i2notes);
4375 : 3711504 : reset_used_flags (i1notes);
4376 : 3711504 : reset_used_flags (i0notes);
4377 : 3711504 : reset_used_flags (newpat);
4378 : 3711504 : reset_used_flags (newi2pat);
4379 : 3711504 : if (undobuf.other_insn)
4380 : 201634 : reset_used_flags (PATTERN (undobuf.other_insn));
4381 : :
4382 : 3711504 : i3notes = copy_rtx_if_shared (i3notes);
4383 : 3711504 : i2notes = copy_rtx_if_shared (i2notes);
4384 : 3711504 : i1notes = copy_rtx_if_shared (i1notes);
4385 : 3711504 : i0notes = copy_rtx_if_shared (i0notes);
4386 : 3711504 : newpat = copy_rtx_if_shared (newpat);
4387 : 3711504 : newi2pat = copy_rtx_if_shared (newi2pat);
4388 : 3711504 : if (undobuf.other_insn)
4389 : 201634 : reset_used_flags (PATTERN (undobuf.other_insn));
4390 : :
4391 : 3711504 : INSN_CODE (i3) = insn_code_number;
4392 : 3711504 : PATTERN (i3) = newpat;
4393 : :
4394 : 3711504 : if (CALL_P (i3) && CALL_INSN_FUNCTION_USAGE (i3))
4395 : : {
4396 : 212374 : for (rtx link = CALL_INSN_FUNCTION_USAGE (i3); link;
4397 : 140670 : link = XEXP (link, 1))
4398 : : {
4399 : 140670 : if (substed_i2)
4400 : : {
4401 : : /* I2SRC must still be meaningful at this point. Some
4402 : : splitting operations can invalidate I2SRC, but those
4403 : : operations do not apply to calls. */
4404 : 140670 : gcc_assert (i2src);
4405 : 140670 : XEXP (link, 0) = simplify_replace_rtx (XEXP (link, 0),
4406 : : i2dest, i2src);
4407 : : }
4408 : 140670 : if (substed_i1)
4409 : 0 : XEXP (link, 0) = simplify_replace_rtx (XEXP (link, 0),
4410 : : i1dest, i1src);
4411 : 140670 : if (substed_i0)
4412 : 0 : XEXP (link, 0) = simplify_replace_rtx (XEXP (link, 0),
4413 : : i0dest, i0src);
4414 : : }
4415 : : }
4416 : :
4417 : 3711504 : if (undobuf.other_insn)
4418 : 201634 : INSN_CODE (undobuf.other_insn) = other_code_number;
4419 : :
4420 : : /* We had one special case above where I2 had more than one set and
4421 : : we replaced a destination of one of those sets with the destination
4422 : : of I3. In that case, we have to update LOG_LINKS of insns later
4423 : : in this basic block. Note that this (expensive) case is rare.
4424 : :
4425 : : Also, in this case, we must pretend that all REG_NOTEs for I2
4426 : : actually came from I3, so that REG_UNUSED notes from I2 will be
4427 : : properly handled. */
4428 : :
4429 : 3711504 : if (i3_subst_into_i2)
4430 : : {
4431 : 150517 : for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
4432 : 102547 : if ((GET_CODE (XVECEXP (PATTERN (i2), 0, i)) == SET
4433 : 49217 : || GET_CODE (XVECEXP (PATTERN (i2), 0, i)) == CLOBBER)
4434 : 101765 : && REG_P (SET_DEST (XVECEXP (PATTERN (i2), 0, i)))
4435 : 93115 : && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
4436 : 195662 : && ! find_reg_note (i2, REG_UNUSED,
4437 : 93115 : SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
4438 : 22471604 : for (temp_insn = NEXT_INSN (i2);
4439 : : temp_insn
4440 : 22471604 : && (this_basic_block->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
4441 : 22388566 : || BB_HEAD (this_basic_block) != temp_insn);
4442 : 22430510 : temp_insn = NEXT_INSN (temp_insn))
4443 : 22430510 : if (temp_insn != i3 && NONDEBUG_INSN_P (temp_insn))
4444 : 14667254 : FOR_EACH_LOG_LINK (link, temp_insn)
4445 : 5446194 : if (link->insn == i2)
4446 : 505 : link->insn = i3;
4447 : :
4448 : 47970 : if (i3notes)
4449 : : {
4450 : : rtx link = i3notes;
4451 : 54711 : while (XEXP (link, 1))
4452 : : link = XEXP (link, 1);
4453 : 47970 : XEXP (link, 1) = i2notes;
4454 : : }
4455 : : else
4456 : : i3notes = i2notes;
4457 : : i2notes = 0;
4458 : : }
4459 : :
4460 : 3711504 : LOG_LINKS (i3) = NULL;
4461 : 3711504 : REG_NOTES (i3) = 0;
4462 : 3711504 : LOG_LINKS (i2) = NULL;
4463 : 3711504 : REG_NOTES (i2) = 0;
4464 : :
4465 : 3711504 : if (newi2pat)
4466 : : {
4467 : 67672 : if (MAY_HAVE_DEBUG_BIND_INSNS && i2scratch)
4468 : 11626 : propagate_for_debug (i2, last_combined_insn, i2dest, i2src,
4469 : : this_basic_block);
4470 : 67672 : INSN_CODE (i2) = i2_code_number;
4471 : 67672 : PATTERN (i2) = newi2pat;
4472 : : }
4473 : : else
4474 : : {
4475 : 3643832 : if (MAY_HAVE_DEBUG_BIND_INSNS && i2src)
4476 : 2040285 : propagate_for_debug (i2, last_combined_insn, i2dest, i2src,
4477 : : this_basic_block);
4478 : 3643832 : SET_INSN_DELETED (i2);
4479 : : }
4480 : :
4481 : 3711504 : if (i1)
4482 : : {
4483 : 93824 : LOG_LINKS (i1) = NULL;
4484 : 93824 : REG_NOTES (i1) = 0;
4485 : 93824 : if (MAY_HAVE_DEBUG_BIND_INSNS)
4486 : 50999 : propagate_for_debug (i1, last_combined_insn, i1dest, i1src,
4487 : : this_basic_block);
4488 : 93824 : SET_INSN_DELETED (i1);
4489 : : }
4490 : :
4491 : 3711504 : if (i0)
4492 : : {
4493 : 5242 : LOG_LINKS (i0) = NULL;
4494 : 5242 : REG_NOTES (i0) = 0;
4495 : 5242 : if (MAY_HAVE_DEBUG_BIND_INSNS)
4496 : 4015 : propagate_for_debug (i0, last_combined_insn, i0dest, i0src,
4497 : : this_basic_block);
4498 : 5242 : SET_INSN_DELETED (i0);
4499 : : }
4500 : :
4501 : : /* Get death notes for everything that is now used in either I3 or
4502 : : I2 and used to die in a previous insn. If we built two new
4503 : : patterns, move from I1 to I2 then I2 to I3 so that we get the
4504 : : proper movement on registers that I2 modifies. */
4505 : :
4506 : 3711504 : if (i0)
4507 : 5242 : from_luid = DF_INSN_LUID (i0);
4508 : 3706262 : else if (i1)
4509 : 88582 : from_luid = DF_INSN_LUID (i1);
4510 : : else
4511 : 3617680 : from_luid = DF_INSN_LUID (i2);
4512 : 3711504 : if (newi2pat)
4513 : 67672 : move_deaths (newi2pat, NULL_RTX, from_luid, i2, &midnotes);
4514 : 3711504 : move_deaths (newpat, newi2pat, from_luid, i3, &midnotes);
4515 : :
4516 : : /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
4517 : 3711504 : if (i3notes)
4518 : 6803626 : distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL,
4519 : : elim_i2, elim_i1, elim_i0);
4520 : 3711504 : if (i2notes)
4521 : 5406112 : distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL,
4522 : : elim_i2, elim_i1, elim_i0);
4523 : 3711504 : if (i1notes)
4524 : 61125 : distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL,
4525 : : elim_i2, local_elim_i1, local_elim_i0);
4526 : 3711504 : if (i0notes)
4527 : 4559 : distribute_notes (i0notes, i0, i3, newi2pat ? i2 : NULL,
4528 : : elim_i2, elim_i1, local_elim_i0);
4529 : 3711504 : if (midnotes)
4530 : 4622451 : distribute_notes (midnotes, NULL, i3, newi2pat ? i2 : NULL,
4531 : : elim_i2, elim_i1, elim_i0);
4532 : :
4533 : : /* Distribute any notes added to I2 or I3 by recog_for_combine. We
4534 : : know these are REG_UNUSED and want them to go to the desired insn,
4535 : : so we always pass it as i3. */
4536 : :
4537 : 3711504 : if (newi2pat && new_i2_notes)
4538 : 21825 : distribute_notes (new_i2_notes, i2, i2, NULL, NULL_RTX, NULL_RTX,
4539 : : NULL_RTX);
4540 : :
4541 : 3711504 : if (new_i3_notes)
4542 : 124785 : distribute_notes (new_i3_notes, i3, i3, NULL, NULL_RTX, NULL_RTX,
4543 : : NULL_RTX);
4544 : :
4545 : : /* If I3DEST was used in I3SRC, it really died in I3. We may need to
4546 : : put a REG_DEAD note for it somewhere. If NEWI2PAT exists and sets
4547 : : I3DEST, the death must be somewhere before I2, not I3. If we passed I3
4548 : : in that case, it might delete I2. Similarly for I2 and I1.
4549 : : Show an additional death due to the REG_DEAD note we make here. If
4550 : : we discard it in distribute_notes, we will decrement it again. */
4551 : :
4552 : 3711504 : if (i3dest_killed)
4553 : : {
4554 : 301415 : rtx new_note = alloc_reg_note (REG_DEAD, i3dest_killed, NULL_RTX);
4555 : 301415 : if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
4556 : 1251 : distribute_notes (new_note, NULL, i2, NULL, elim_i2,
4557 : : elim_i1, elim_i0);
4558 : : else
4559 : 599303 : distribute_notes (new_note, NULL, i3, newi2pat ? i2 : NULL,
4560 : : elim_i2, elim_i1, elim_i0);
4561 : : }
4562 : :
4563 : 3711504 : if (i2dest_in_i2src)
4564 : : {
4565 : 81702 : rtx new_note = alloc_reg_note (REG_DEAD, i2dest, NULL_RTX);
4566 : 81702 : if (newi2pat && reg_set_p (i2dest, newi2pat))
4567 : 102 : distribute_notes (new_note, NULL, i2, NULL, NULL_RTX,
4568 : : NULL_RTX, NULL_RTX);
4569 : : else
4570 : 163180 : distribute_notes (new_note, NULL, i3, newi2pat ? i2 : NULL,
4571 : : NULL_RTX, NULL_RTX, NULL_RTX);
4572 : : }
4573 : :
4574 : 3711504 : if (i1dest_in_i1src)
4575 : : {
4576 : 60 : rtx new_note = alloc_reg_note (REG_DEAD, i1dest, NULL_RTX);
4577 : 60 : if (newi2pat && reg_set_p (i1dest, newi2pat))
4578 : 1 : distribute_notes (new_note, NULL, i2, NULL, NULL_RTX,
4579 : : NULL_RTX, NULL_RTX);
4580 : : else
4581 : 102 : distribute_notes (new_note, NULL, i3, newi2pat ? i2 : NULL,
4582 : : NULL_RTX, NULL_RTX, NULL_RTX);
4583 : : }
4584 : :
4585 : 3711504 : if (i0dest_in_i0src)
4586 : : {
4587 : 18 : rtx new_note = alloc_reg_note (REG_DEAD, i0dest, NULL_RTX);
4588 : 18 : if (newi2pat && reg_set_p (i0dest, newi2pat))
4589 : 0 : distribute_notes (new_note, NULL, i2, NULL, NULL_RTX,
4590 : : NULL_RTX, NULL_RTX);
4591 : : else
4592 : 36 : distribute_notes (new_note, NULL, i3, newi2pat ? i2 : NULL,
4593 : : NULL_RTX, NULL_RTX, NULL_RTX);
4594 : : }
4595 : :
4596 : 3711504 : distribute_links (i3links);
4597 : 3711504 : distribute_links (i2links);
4598 : 3711504 : distribute_links (i1links);
4599 : 3711504 : distribute_links (i0links);
4600 : :
4601 : 3711504 : if (REG_P (i2dest))
4602 : : {
4603 : 3711504 : struct insn_link *link;
4604 : 3711504 : rtx_insn *i2_insn = 0;
4605 : 3711504 : rtx i2_val = 0, set;
4606 : :
4607 : : /* The insn that used to set this register doesn't exist, and
4608 : : this life of the register may not exist either. See if one of
4609 : : I3's links points to an insn that sets I2DEST. If it does,
4610 : : that is now the last known value for I2DEST. If we don't update
4611 : : this and I2 set the register to a value that depended on its old
4612 : : contents, we will get confused. If this insn is used, thing
4613 : : will be set correctly in combine_instructions. */
4614 : 6959446 : FOR_EACH_LOG_LINK (link, i3)
4615 : 3247942 : if ((set = single_set (link->insn)) != 0
4616 : 3247942 : && rtx_equal_p (i2dest, SET_DEST (set)))
4617 : 50177 : i2_insn = link->insn, i2_val = SET_SRC (set);
4618 : :
4619 : 3711504 : record_value_for_reg (i2dest, i2_insn, i2_val);
4620 : :
4621 : : /* If the reg formerly set in I2 died only once and that was in I3,
4622 : : zero its use count so it won't make `reload' do any work. */
4623 : 3711504 : if (! added_sets_2
4624 : 3627580 : && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
4625 : 3590189 : && ! i2dest_in_i2src
4626 : 7237164 : && REGNO (i2dest) < reg_n_sets_max)
4627 : 3525658 : INC_REG_N_SETS (REGNO (i2dest), -1);
4628 : : }
4629 : :
4630 : 3711504 : if (i1 && REG_P (i1dest))
4631 : : {
4632 : 93824 : struct insn_link *link;
4633 : 93824 : rtx_insn *i1_insn = 0;
4634 : 93824 : rtx i1_val = 0, set;
4635 : :
4636 : 166071 : FOR_EACH_LOG_LINK (link, i3)
4637 : 72247 : if ((set = single_set (link->insn)) != 0
4638 : 72247 : && rtx_equal_p (i1dest, SET_DEST (set)))
4639 : 177 : i1_insn = link->insn, i1_val = SET_SRC (set);
4640 : :
4641 : 93824 : record_value_for_reg (i1dest, i1_insn, i1_val);
4642 : :
4643 : 93824 : if (! added_sets_1
4644 : : && ! i1dest_in_i1src
4645 : 93824 : && REGNO (i1dest) < reg_n_sets_max)
4646 : 86907 : INC_REG_N_SETS (REGNO (i1dest), -1);
4647 : : }
4648 : :
4649 : 3711504 : if (i0 && REG_P (i0dest))
4650 : : {
4651 : 5242 : struct insn_link *link;
4652 : 5242 : rtx_insn *i0_insn = 0;
4653 : 5242 : rtx i0_val = 0, set;
4654 : :
4655 : 7302 : FOR_EACH_LOG_LINK (link, i3)
4656 : 2060 : if ((set = single_set (link->insn)) != 0
4657 : 2060 : && rtx_equal_p (i0dest, SET_DEST (set)))
4658 : 0 : i0_insn = link->insn, i0_val = SET_SRC (set);
4659 : :
4660 : 5242 : record_value_for_reg (i0dest, i0_insn, i0_val);
4661 : :
4662 : 5242 : if (! added_sets_0
4663 : : && ! i0dest_in_i0src
4664 : 5242 : && REGNO (i0dest) < reg_n_sets_max)
4665 : 5176 : INC_REG_N_SETS (REGNO (i0dest), -1);
4666 : : }
4667 : :
4668 : : /* Update reg_stat[].nonzero_bits et al for any changes that may have
4669 : : been made to this insn. The order is important, because newi2pat
4670 : : can affect nonzero_bits of newpat. */
4671 : 3711504 : if (newi2pat)
4672 : 67672 : note_pattern_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
4673 : 3711504 : note_pattern_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
4674 : : }
4675 : :
4676 : 3711504 : if (undobuf.other_insn != NULL_RTX)
4677 : : {
4678 : 201634 : if (dump_file)
4679 : : {
4680 : 12 : fprintf (dump_file, "modifying other_insn ");
4681 : 12 : dump_insn_slim (dump_file, undobuf.other_insn);
4682 : : }
4683 : 201634 : df_insn_rescan (undobuf.other_insn);
4684 : : }
4685 : :
4686 : 3711504 : if (i0 && !(NOTE_P (i0) && (NOTE_KIND (i0) == NOTE_INSN_DELETED)))
4687 : : {
4688 : 0 : if (dump_file)
4689 : : {
4690 : 0 : fprintf (dump_file, "modifying insn i0 ");
4691 : 0 : dump_insn_slim (dump_file, i0);
4692 : : }
4693 : 0 : df_insn_rescan (i0);
4694 : : }
4695 : :
4696 : 3711504 : if (i1 && !(NOTE_P (i1) && (NOTE_KIND (i1) == NOTE_INSN_DELETED)))
4697 : : {
4698 : 0 : if (dump_file)
4699 : : {
4700 : 0 : fprintf (dump_file, "modifying insn i1 ");
4701 : 0 : dump_insn_slim (dump_file, i1);
4702 : : }
4703 : 0 : df_insn_rescan (i1);
4704 : : }
4705 : :
4706 : 3711504 : if (i2 && !(NOTE_P (i2) && (NOTE_KIND (i2) == NOTE_INSN_DELETED)))
4707 : : {
4708 : 67672 : if (dump_file)
4709 : : {
4710 : 14 : fprintf (dump_file, "modifying insn i2 ");
4711 : 14 : dump_insn_slim (dump_file, i2);
4712 : : }
4713 : 67672 : df_insn_rescan (i2);
4714 : : }
4715 : :
4716 : 3711504 : if (i3 && !(NOTE_P (i3) && (NOTE_KIND (i3) == NOTE_INSN_DELETED)))
4717 : : {
4718 : 3711504 : if (dump_file)
4719 : : {
4720 : 239 : fprintf (dump_file, "modifying insn i3 ");
4721 : 239 : dump_insn_slim (dump_file, i3);
4722 : : }
4723 : 3711504 : df_insn_rescan (i3);
4724 : : }
4725 : :
4726 : : /* Set new_direct_jump_p if a new return or simple jump instruction
4727 : : has been created. Adjust the CFG accordingly. */
4728 : 3711504 : if (returnjump_p (i3) || any_uncondjump_p (i3))
4729 : : {
4730 : 277 : *new_direct_jump_p = 1;
4731 : 277 : mark_jump_label (PATTERN (i3), i3, 0);
4732 : 277 : update_cfg_for_uncondjump (i3);
4733 : : }
4734 : :
4735 : 3711504 : if (undobuf.other_insn != NULL_RTX
4736 : 3711504 : && (returnjump_p (undobuf.other_insn)
4737 : 201634 : || any_uncondjump_p (undobuf.other_insn)))
4738 : : {
4739 : 1978 : *new_direct_jump_p = 1;
4740 : 1978 : update_cfg_for_uncondjump (undobuf.other_insn);
4741 : : }
4742 : :
4743 : 3711504 : if (GET_CODE (PATTERN (i3)) == TRAP_IF
4744 : 3711504 : && XEXP (PATTERN (i3), 0) == const1_rtx)
4745 : : {
4746 : 0 : basic_block bb = BLOCK_FOR_INSN (i3);
4747 : 0 : gcc_assert (bb);
4748 : 0 : remove_edge (split_block (bb, i3));
4749 : 0 : emit_barrier_after_bb (bb);
4750 : 0 : *new_direct_jump_p = 1;
4751 : : }
4752 : :
4753 : 3711504 : if (undobuf.other_insn
4754 : 201634 : && GET_CODE (PATTERN (undobuf.other_insn)) == TRAP_IF
4755 : 3711504 : && XEXP (PATTERN (undobuf.other_insn), 0) == const1_rtx)
4756 : : {
4757 : 0 : basic_block bb = BLOCK_FOR_INSN (undobuf.other_insn);
4758 : 0 : gcc_assert (bb);
4759 : 0 : remove_edge (split_block (bb, undobuf.other_insn));
4760 : 0 : emit_barrier_after_bb (bb);
4761 : 0 : *new_direct_jump_p = 1;
4762 : : }
4763 : :
4764 : : /* A noop might also need cleaning up of CFG, if it comes from the
4765 : : simplification of a jump. */
4766 : 3711504 : if (JUMP_P (i3)
4767 : 45196 : && GET_CODE (newpat) == SET
4768 : 33645 : && SET_SRC (newpat) == pc_rtx
4769 : 425 : && SET_DEST (newpat) == pc_rtx)
4770 : : {
4771 : 425 : *new_direct_jump_p = 1;
4772 : 425 : update_cfg_for_uncondjump (i3);
4773 : : }
4774 : :
4775 : 3711504 : if (undobuf.other_insn != NULL_RTX
4776 : 201634 : && JUMP_P (undobuf.other_insn)
4777 : 196371 : && GET_CODE (PATTERN (undobuf.other_insn)) == SET
4778 : 196371 : && SET_SRC (PATTERN (undobuf.other_insn)) == pc_rtx
4779 : 3713152 : && SET_DEST (PATTERN (undobuf.other_insn)) == pc_rtx)
4780 : : {
4781 : 1648 : *new_direct_jump_p = 1;
4782 : 1648 : update_cfg_for_uncondjump (undobuf.other_insn);
4783 : : }
4784 : :
4785 : 3711504 : combine_successes++;
4786 : 3711504 : undo_commit ();
4787 : :
4788 : 3711504 : rtx_insn *ret = newi2pat ? i2 : i3;
4789 : 3711504 : if (added_links_insn && DF_INSN_LUID (added_links_insn) < DF_INSN_LUID (ret))
4790 : : ret = added_links_insn;
4791 : 3711504 : if (added_notes_insn && DF_INSN_LUID (added_notes_insn) < DF_INSN_LUID (ret))
4792 : : ret = added_notes_insn;
4793 : :
4794 : : return ret;
4795 : : }
4796 : :
4797 : : /* Get a marker for undoing to the current state. */
4798 : :
4799 : : static void *
4800 : 34885167 : get_undo_marker (void)
4801 : : {
4802 : 34885167 : return undobuf.undos;
4803 : : }
4804 : :
4805 : : /* Undo the modifications up to the marker. */
4806 : :
4807 : : static void
4808 : 41903848 : undo_to_marker (void *marker)
4809 : : {
4810 : 41903848 : struct undo *undo, *next;
4811 : :
4812 : 130065986 : for (undo = undobuf.undos; undo != marker; undo = next)
4813 : : {
4814 : 88162138 : gcc_assert (undo);
4815 : :
4816 : 88162138 : next = undo->next;
4817 : 88162138 : switch (undo->kind)
4818 : : {
4819 : 81368222 : case UNDO_RTX:
4820 : 81368222 : *undo->where.r = undo->old_contents.r;
4821 : 81368222 : break;
4822 : 6170079 : case UNDO_INT:
4823 : 6170079 : *undo->where.i = undo->old_contents.i;
4824 : 6170079 : break;
4825 : 560734 : case UNDO_MODE:
4826 : 560734 : adjust_reg_mode (regno_reg_rtx[undo->where.regno],
4827 : : undo->old_contents.m);
4828 : 560734 : break;
4829 : 63103 : case UNDO_LINKS:
4830 : 63103 : *undo->where.l = undo->old_contents.l;
4831 : 63103 : break;
4832 : 0 : default:
4833 : 0 : gcc_unreachable ();
4834 : : }
4835 : :
4836 : 88162138 : undo->next = undobuf.frees;
4837 : 88162138 : undobuf.frees = undo;
4838 : : }
4839 : :
4840 : 41903848 : undobuf.undos = (struct undo *) marker;
4841 : 41903848 : }
4842 : :
4843 : : /* Undo all the modifications recorded in undobuf. */
4844 : :
4845 : : static void
4846 : 40839225 : undo_all (void)
4847 : : {
4848 : 40839225 : undo_to_marker (0);
4849 : 0 : }
4850 : :
4851 : : /* We've committed to accepting the changes we made. Move all
4852 : : of the undos to the free list. */
4853 : :
4854 : : static void
4855 : 3711504 : undo_commit (void)
4856 : : {
4857 : 3711504 : struct undo *undo, *next;
4858 : :
4859 : 11009141 : for (undo = undobuf.undos; undo; undo = next)
4860 : : {
4861 : 7297637 : next = undo->next;
4862 : 7297637 : undo->next = undobuf.frees;
4863 : 7297637 : undobuf.frees = undo;
4864 : : }
4865 : 3711504 : undobuf.undos = 0;
4866 : 3711504 : }
4867 : :
4868 : : /* Find the innermost point within the rtx at LOC, possibly LOC itself,
4869 : : where we have an arithmetic expression and return that point. LOC will
4870 : : be inside INSN.
4871 : :
4872 : : try_combine will call this function to see if an insn can be split into
4873 : : two insns. */
4874 : :
4875 : : static rtx *
4876 : 29140060 : find_split_point (rtx *loc, rtx_insn *insn, bool set_src)
4877 : : {
4878 : 29140060 : rtx x = *loc;
4879 : 29140060 : enum rtx_code code = GET_CODE (x);
4880 : 29140060 : rtx *split;
4881 : 29140060 : unsigned HOST_WIDE_INT len = 0;
4882 : 29140060 : HOST_WIDE_INT pos = 0;
4883 : 29140060 : bool unsignedp = false;
4884 : 29140060 : rtx inner = NULL_RTX;
4885 : 29140060 : scalar_int_mode mode, inner_mode;
4886 : :
4887 : : /* First special-case some codes. */
4888 : 29140060 : switch (code)
4889 : : {
4890 : 975244 : case SUBREG:
4891 : : #ifdef INSN_SCHEDULING
4892 : : /* If we are making a paradoxical SUBREG invalid, it becomes a split
4893 : : point. */
4894 : 975244 : if (MEM_P (SUBREG_REG (x)))
4895 : : return loc;
4896 : : #endif
4897 : 963916 : return find_split_point (&SUBREG_REG (x), insn, false);
4898 : :
4899 : 1477626 : case MEM:
4900 : : /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
4901 : : using LO_SUM and HIGH. */
4902 : 1477626 : if (HAVE_lo_sum && (GET_CODE (XEXP (x, 0)) == CONST
4903 : : || GET_CODE (XEXP (x, 0)) == SYMBOL_REF))
4904 : : {
4905 : : machine_mode address_mode = get_address_mode (x);
4906 : :
4907 : : SUBST (XEXP (x, 0),
4908 : : gen_rtx_LO_SUM (address_mode,
4909 : : gen_rtx_HIGH (address_mode, XEXP (x, 0)),
4910 : : XEXP (x, 0)));
4911 : : return &XEXP (XEXP (x, 0), 0);
4912 : : }
4913 : :
4914 : : /* If we have a PLUS whose second operand is a constant and the
4915 : : address is not valid, perhaps we can split it up using
4916 : : the machine-specific way to split large constants. We use
4917 : : the first pseudo-reg (one of the virtual regs) as a placeholder;
4918 : : it will not remain in the result. */
4919 : 1477626 : if (GET_CODE (XEXP (x, 0)) == PLUS
4920 : 931436 : && CONST_INT_P (XEXP (XEXP (x, 0), 1))
4921 : 3072905 : && ! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
4922 : 663843 : MEM_ADDR_SPACE (x)))
4923 : : {
4924 : 115140 : rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
4925 : 115140 : unsigned int old_nregs, new_nregs;
4926 : 115140 : rtx_insn *seq = combine_split_insns (gen_rtx_SET (reg, XEXP (x, 0)),
4927 : : subst_insn, &old_nregs, &new_nregs);
4928 : :
4929 : : /* This should have produced two insns, each of which sets our
4930 : : placeholder. If the source of the second is a valid address,
4931 : : we can put both sources together and make a split point
4932 : : in the middle. */
4933 : :
4934 : 115140 : if (seq
4935 : 54 : && NEXT_INSN (seq) != NULL_RTX
4936 : 0 : && NEXT_INSN (NEXT_INSN (seq)) == NULL_RTX
4937 : 0 : && NONJUMP_INSN_P (seq)
4938 : 0 : && GET_CODE (PATTERN (seq)) == SET
4939 : 0 : && SET_DEST (PATTERN (seq)) == reg
4940 : 0 : && ! reg_mentioned_p (reg,
4941 : 0 : SET_SRC (PATTERN (seq)))
4942 : 0 : && NONJUMP_INSN_P (NEXT_INSN (seq))
4943 : 0 : && GET_CODE (PATTERN (NEXT_INSN (seq))) == SET
4944 : 0 : && SET_DEST (PATTERN (NEXT_INSN (seq))) == reg
4945 : 115140 : && memory_address_addr_space_p
4946 : 115140 : (GET_MODE (x), SET_SRC (PATTERN (NEXT_INSN (seq))),
4947 : 0 : MEM_ADDR_SPACE (x)))
4948 : : {
4949 : 0 : rtx src1 = SET_SRC (PATTERN (seq));
4950 : 0 : rtx src2 = SET_SRC (PATTERN (NEXT_INSN (seq)));
4951 : :
4952 : : /* Replace the placeholder in SRC2 with SRC1. If we can
4953 : : find where in SRC2 it was placed, that can become our
4954 : : split point and we can replace this address with SRC2.
4955 : : Just try two obvious places. */
4956 : :
4957 : 0 : src2 = replace_rtx (src2, reg, src1);
4958 : 0 : split = 0;
4959 : 0 : if (XEXP (src2, 0) == src1)
4960 : 0 : split = &XEXP (src2, 0);
4961 : 0 : else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
4962 : 0 : && XEXP (XEXP (src2, 0), 0) == src1)
4963 : 0 : split = &XEXP (XEXP (src2, 0), 0);
4964 : :
4965 : 0 : if (split)
4966 : : {
4967 : 0 : SUBST (XEXP (x, 0), src2);
4968 : 90937 : return split;
4969 : : }
4970 : : }
4971 : :
4972 : : /* If that didn't work and we have a nested plus, like:
4973 : : ((REG1 * CONST1) + REG2) + CONST2 and (REG1 + REG2) + CONST2
4974 : : is valid address, try to split (REG1 * CONST1). */
4975 : 115140 : if (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
4976 : 82238 : && !OBJECT_P (XEXP (XEXP (XEXP (x, 0), 0), 0))
4977 : 67332 : && OBJECT_P (XEXP (XEXP (XEXP (x, 0), 0), 1))
4978 : 67330 : && ! (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SUBREG
4979 : 10 : && OBJECT_P (SUBREG_REG (XEXP (XEXP (XEXP (x, 0),
4980 : : 0), 0)))))
4981 : : {
4982 : 67330 : rtx tem = XEXP (XEXP (XEXP (x, 0), 0), 0);
4983 : 67330 : XEXP (XEXP (XEXP (x, 0), 0), 0) = reg;
4984 : 134660 : if (memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
4985 : 67330 : MEM_ADDR_SPACE (x)))
4986 : : {
4987 : 57018 : XEXP (XEXP (XEXP (x, 0), 0), 0) = tem;
4988 : 57018 : return &XEXP (XEXP (XEXP (x, 0), 0), 0);
4989 : : }
4990 : 10312 : XEXP (XEXP (XEXP (x, 0), 0), 0) = tem;
4991 : 10312 : }
4992 : 47810 : else if (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
4993 : 14908 : && OBJECT_P (XEXP (XEXP (XEXP (x, 0), 0), 0))
4994 : 14906 : && !OBJECT_P (XEXP (XEXP (XEXP (x, 0), 0), 1))
4995 : 608 : && ! (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == SUBREG
4996 : 608 : && OBJECT_P (SUBREG_REG (XEXP (XEXP (XEXP (x, 0),
4997 : : 0), 1)))))
4998 : : {
4999 : 0 : rtx tem = XEXP (XEXP (XEXP (x, 0), 0), 1);
5000 : 0 : XEXP (XEXP (XEXP (x, 0), 0), 1) = reg;
5001 : 0 : if (memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
5002 : 0 : MEM_ADDR_SPACE (x)))
5003 : : {
5004 : 0 : XEXP (XEXP (XEXP (x, 0), 0), 1) = tem;
5005 : 0 : return &XEXP (XEXP (XEXP (x, 0), 0), 1);
5006 : : }
5007 : 0 : XEXP (XEXP (XEXP (x, 0), 0), 1) = tem;
5008 : : }
5009 : :
5010 : : /* If that didn't work, perhaps the first operand is complex and
5011 : : needs to be computed separately, so make a split point there.
5012 : : This will occur on machines that just support REG + CONST
5013 : : and have a constant moved through some previous computation. */
5014 : 58122 : if (!OBJECT_P (XEXP (XEXP (x, 0), 0))
5015 : 33919 : && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
5016 : 0 : && OBJECT_P (SUBREG_REG (XEXP (XEXP (x, 0), 0)))))
5017 : 33919 : return &XEXP (XEXP (x, 0), 0);
5018 : : }
5019 : :
5020 : : /* If we have a PLUS whose first operand is complex, try computing it
5021 : : separately by making a split there. */
5022 : 1386689 : if (GET_CODE (XEXP (x, 0)) == PLUS
5023 : 2387184 : && ! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
5024 : 840499 : MEM_ADDR_SPACE (x))
5025 : 159996 : && ! OBJECT_P (XEXP (XEXP (x, 0), 0))
5026 : 1500235 : && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
5027 : 986 : && OBJECT_P (SUBREG_REG (XEXP (XEXP (x, 0), 0)))))
5028 : 113542 : return &XEXP (XEXP (x, 0), 0);
5029 : : break;
5030 : :
5031 : 4336369 : case SET:
5032 : : /* See if we can split SET_SRC as it stands. */
5033 : 4336369 : split = find_split_point (&SET_SRC (x), insn, true);
5034 : 4336369 : if (split && split != &SET_SRC (x))
5035 : : return split;
5036 : :
5037 : : /* See if we can split SET_DEST as it stands. */
5038 : 453328 : split = find_split_point (&SET_DEST (x), insn, false);
5039 : 453328 : if (split && split != &SET_DEST (x))
5040 : : return split;
5041 : :
5042 : : /* See if this is a bitfield assignment with everything constant. If
5043 : : so, this is an IOR of an AND, so split it into that. */
5044 : 418219 : if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
5045 : 4594 : && is_a <scalar_int_mode> (GET_MODE (XEXP (SET_DEST (x), 0)),
5046 : : &inner_mode)
5047 : 4594 : && HWI_COMPUTABLE_MODE_P (inner_mode)
5048 : 4594 : && CONST_INT_P (XEXP (SET_DEST (x), 1))
5049 : 4594 : && CONST_INT_P (XEXP (SET_DEST (x), 2))
5050 : 4426 : && CONST_INT_P (SET_SRC (x))
5051 : 483 : && ((INTVAL (XEXP (SET_DEST (x), 1))
5052 : 483 : + INTVAL (XEXP (SET_DEST (x), 2)))
5053 : 483 : <= GET_MODE_PRECISION (inner_mode))
5054 : 418702 : && ! side_effects_p (XEXP (SET_DEST (x), 0)))
5055 : : {
5056 : 466 : HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2));
5057 : 466 : unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1));
5058 : 466 : rtx dest = XEXP (SET_DEST (x), 0);
5059 : 466 : unsigned HOST_WIDE_INT mask = (HOST_WIDE_INT_1U << len) - 1;
5060 : 466 : unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x)) & mask;
5061 : 466 : rtx or_mask;
5062 : :
5063 : 466 : if (BITS_BIG_ENDIAN)
5064 : : pos = GET_MODE_PRECISION (inner_mode) - len - pos;
5065 : :
5066 : 466 : or_mask = gen_int_mode (src << pos, inner_mode);
5067 : 466 : if (src == mask)
5068 : 0 : SUBST (SET_SRC (x),
5069 : : simplify_gen_binary (IOR, inner_mode, dest, or_mask));
5070 : : else
5071 : : {
5072 : 466 : rtx negmask = gen_int_mode (~(mask << pos), inner_mode);
5073 : 466 : SUBST (SET_SRC (x),
5074 : : simplify_gen_binary (IOR, inner_mode,
5075 : : simplify_gen_binary (AND, inner_mode,
5076 : : dest, negmask),
5077 : : or_mask));
5078 : : }
5079 : :
5080 : 466 : SUBST (SET_DEST (x), dest);
5081 : :
5082 : 466 : split = find_split_point (&SET_SRC (x), insn, true);
5083 : 466 : if (split && split != &SET_SRC (x))
5084 : : return split;
5085 : : }
5086 : :
5087 : : /* Otherwise, see if this is an operation that we can split into two.
5088 : : If so, try to split that. */
5089 : 417753 : code = GET_CODE (SET_SRC (x));
5090 : :
5091 : 417753 : switch (code)
5092 : : {
5093 : 14944 : case AND:
5094 : : /* If we are AND'ing with a large constant that is only a single
5095 : : bit and the result is only being used in a context where we
5096 : : need to know if it is zero or nonzero, replace it with a bit
5097 : : extraction. This will avoid the large constant, which might
5098 : : have taken more than one insn to make. If the constant were
5099 : : not a valid argument to the AND but took only one insn to make,
5100 : : this is no worse, but if it took more than one insn, it will
5101 : : be better. */
5102 : :
5103 : 14944 : if (CONST_INT_P (XEXP (SET_SRC (x), 1))
5104 : 10958 : && REG_P (XEXP (SET_SRC (x), 0))
5105 : 554 : && (pos = exact_log2 (UINTVAL (XEXP (SET_SRC (x), 1)))) >= 7
5106 : 1 : && REG_P (SET_DEST (x))
5107 : 0 : && (split = find_single_use (SET_DEST (x), insn, NULL)) != 0
5108 : 0 : && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
5109 : 0 : && XEXP (*split, 0) == SET_DEST (x)
5110 : 14944 : && XEXP (*split, 1) == const0_rtx)
5111 : : {
5112 : 0 : rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
5113 : 0 : XEXP (SET_SRC (x), 0),
5114 : : pos, NULL_RTX, 1,
5115 : : true, false, false);
5116 : 0 : if (extraction != 0)
5117 : : {
5118 : 0 : SUBST (SET_SRC (x), extraction);
5119 : 0 : return find_split_point (loc, insn, false);
5120 : : }
5121 : : }
5122 : : break;
5123 : :
5124 : : case NE:
5125 : : /* If STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
5126 : : is known to be on, this can be converted into a NEG of a shift. */
5127 : : if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
5128 : : && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
5129 : : && ((pos = exact_log2 (nonzero_bits (XEXP (SET_SRC (x), 0),
5130 : : GET_MODE (XEXP (SET_SRC (x),
5131 : : 0))))) >= 1))
5132 : : {
5133 : : machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
5134 : : rtx pos_rtx = gen_int_shift_amount (mode, pos);
5135 : : SUBST (SET_SRC (x),
5136 : : gen_rtx_NEG (mode,
5137 : : gen_rtx_LSHIFTRT (mode,
5138 : : XEXP (SET_SRC (x), 0),
5139 : : pos_rtx)));
5140 : :
5141 : : split = find_split_point (&SET_SRC (x), insn, true);
5142 : : if (split && split != &SET_SRC (x))
5143 : : return split;
5144 : : }
5145 : : break;
5146 : :
5147 : 759 : case SIGN_EXTEND:
5148 : 759 : inner = XEXP (SET_SRC (x), 0);
5149 : :
5150 : : /* We can't optimize if either mode is a partial integer
5151 : : mode as we don't know how many bits are significant
5152 : : in those modes. */
5153 : 759 : if (!is_int_mode (GET_MODE (inner), &inner_mode)
5154 : 755 : || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
5155 : : break;
5156 : :
5157 : 755 : pos = 0;
5158 : 755 : len = GET_MODE_PRECISION (inner_mode);
5159 : 755 : unsignedp = false;
5160 : 755 : break;
5161 : :
5162 : 11695 : case SIGN_EXTRACT:
5163 : 11695 : case ZERO_EXTRACT:
5164 : 11695 : if (is_a <scalar_int_mode> (GET_MODE (XEXP (SET_SRC (x), 0)),
5165 : : &inner_mode)
5166 : 11355 : && CONST_INT_P (XEXP (SET_SRC (x), 1))
5167 : 11355 : && CONST_INT_P (XEXP (SET_SRC (x), 2)))
5168 : : {
5169 : 10624 : inner = XEXP (SET_SRC (x), 0);
5170 : 10624 : len = INTVAL (XEXP (SET_SRC (x), 1));
5171 : 10624 : pos = INTVAL (XEXP (SET_SRC (x), 2));
5172 : :
5173 : 10624 : if (BITS_BIG_ENDIAN)
5174 : : pos = GET_MODE_PRECISION (inner_mode) - len - pos;
5175 : 10624 : unsignedp = (code == ZERO_EXTRACT);
5176 : : }
5177 : : break;
5178 : :
5179 : : default:
5180 : : break;
5181 : : }
5182 : :
5183 : 417753 : if (len
5184 : 11379 : && known_subrange_p (pos, len,
5185 : 11379 : 0, GET_MODE_PRECISION (GET_MODE (inner)))
5186 : 429132 : && is_a <scalar_int_mode> (GET_MODE (SET_SRC (x)), &mode))
5187 : : {
5188 : : /* For unsigned, we have a choice of a shift followed by an
5189 : : AND or two shifts. Use two shifts for field sizes where the
5190 : : constant might be too large. We assume here that we can
5191 : : always at least get 8-bit constants in an AND insn, which is
5192 : : true for every current RISC. */
5193 : :
5194 : 11379 : if (unsignedp && len <= 8)
5195 : : {
5196 : 4653 : unsigned HOST_WIDE_INT mask
5197 : 4653 : = (HOST_WIDE_INT_1U << len) - 1;
5198 : 4653 : rtx pos_rtx = gen_int_shift_amount (mode, pos);
5199 : 4653 : SUBST (SET_SRC (x),
5200 : : gen_rtx_AND (mode,
5201 : : gen_rtx_LSHIFTRT
5202 : : (mode, gen_lowpart (mode, inner), pos_rtx),
5203 : : gen_int_mode (mask, mode)));
5204 : :
5205 : 4653 : split = find_split_point (&SET_SRC (x), insn, true);
5206 : 4653 : if (split && split != &SET_SRC (x))
5207 : 29140060 : return split;
5208 : : }
5209 : : else
5210 : : {
5211 : 6726 : int left_bits = GET_MODE_PRECISION (mode) - len - pos;
5212 : 6726 : int right_bits = GET_MODE_PRECISION (mode) - len;
5213 : 13452 : SUBST (SET_SRC (x),
5214 : : gen_rtx_fmt_ee
5215 : : (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
5216 : : gen_rtx_ASHIFT (mode,
5217 : : gen_lowpart (mode, inner),
5218 : : gen_int_shift_amount (mode, left_bits)),
5219 : : gen_int_shift_amount (mode, right_bits)));
5220 : :
5221 : 6726 : split = find_split_point (&SET_SRC (x), insn, true);
5222 : 6726 : if (split && split != &SET_SRC (x))
5223 : 29140060 : return split;
5224 : : }
5225 : : }
5226 : :
5227 : : /* See if this is a simple operation with a constant as the second
5228 : : operand. It might be that this constant is out of range and hence
5229 : : could be used as a split point. */
5230 : 406374 : if (BINARY_P (SET_SRC (x))
5231 : 200866 : && CONSTANT_P (XEXP (SET_SRC (x), 1))
5232 : 108214 : && (OBJECT_P (XEXP (SET_SRC (x), 0))
5233 : 20835 : || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
5234 : 11648 : && OBJECT_P (SUBREG_REG (XEXP (SET_SRC (x), 0))))))
5235 : 89606 : return &XEXP (SET_SRC (x), 1);
5236 : :
5237 : : /* Finally, see if this is a simple operation with its first operand
5238 : : not in a register. The operation might require this operand in a
5239 : : register, so return it as a split point. We can always do this
5240 : : because if the first operand were another operation, we would have
5241 : : already found it as a split point. */
5242 : 316768 : if ((BINARY_P (SET_SRC (x)) || UNARY_P (SET_SRC (x)))
5243 : 316768 : && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
5244 : 111508 : return &XEXP (SET_SRC (x), 0);
5245 : :
5246 : : return 0;
5247 : :
5248 : 1157602 : case AND:
5249 : 1157602 : case IOR:
5250 : : /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
5251 : : it is better to write this as (not (ior A B)) so we can split it.
5252 : : Similarly for IOR. */
5253 : 1157602 : if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
5254 : : {
5255 : 1204 : SUBST (*loc,
5256 : : gen_rtx_NOT (GET_MODE (x),
5257 : : gen_rtx_fmt_ee (code == IOR ? AND : IOR,
5258 : : GET_MODE (x),
5259 : : XEXP (XEXP (x, 0), 0),
5260 : : XEXP (XEXP (x, 1), 0))));
5261 : 602 : return find_split_point (loc, insn, set_src);
5262 : : }
5263 : :
5264 : : /* Many RISC machines have a large set of logical insns. If the
5265 : : second operand is a NOT, put it first so we will try to split the
5266 : : other operand first. */
5267 : 1157000 : if (GET_CODE (XEXP (x, 1)) == NOT)
5268 : : {
5269 : 4858 : rtx tem = XEXP (x, 0);
5270 : 4858 : SUBST (XEXP (x, 0), XEXP (x, 1));
5271 : 4858 : SUBST (XEXP (x, 1), tem);
5272 : : }
5273 : : break;
5274 : :
5275 : 2971102 : case PLUS:
5276 : 2971102 : case MINUS:
5277 : : /* Canonicalization can produce (minus A (mult B C)), where C is a
5278 : : constant. It may be better to try splitting (plus (mult B -C) A)
5279 : : instead if this isn't a multiply by a power of two. */
5280 : 194838 : if (set_src && code == MINUS && GET_CODE (XEXP (x, 1)) == MULT
5281 : 23671 : && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
5282 : 2978924 : && !pow2p_hwi (INTVAL (XEXP (XEXP (x, 1), 1))))
5283 : : {
5284 : 7822 : machine_mode mode = GET_MODE (x);
5285 : 7822 : unsigned HOST_WIDE_INT this_int = INTVAL (XEXP (XEXP (x, 1), 1));
5286 : 7822 : HOST_WIDE_INT other_int = trunc_int_for_mode (-this_int, mode);
5287 : 7822 : SUBST (*loc, gen_rtx_PLUS (mode,
5288 : : gen_rtx_MULT (mode,
5289 : : XEXP (XEXP (x, 1), 0),
5290 : : gen_int_mode (other_int,
5291 : : mode)),
5292 : : XEXP (x, 0)));
5293 : 7822 : return find_split_point (loc, insn, set_src);
5294 : : }
5295 : :
5296 : : /* Split at a multiply-accumulate instruction. However if this is
5297 : : the SET_SRC, we likely do not have such an instruction and it's
5298 : : worthless to try this split. */
5299 : 2963280 : if (!set_src
5300 : 1735012 : && (GET_CODE (XEXP (x, 0)) == MULT
5301 : 1623998 : || (GET_CODE (XEXP (x, 0)) == ASHIFT
5302 : 113823 : && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
5303 : : return loc;
5304 : :
5305 : : default:
5306 : : break;
5307 : : }
5308 : :
5309 : : /* Otherwise, select our actions depending on our rtx class. */
5310 : 23392890 : switch (GET_RTX_CLASS (code))
5311 : : {
5312 : 1219501 : case RTX_BITFIELD_OPS: /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
5313 : 1219501 : case RTX_TERNARY:
5314 : 1219501 : split = find_split_point (&XEXP (x, 2), insn, false);
5315 : 1219501 : if (split)
5316 : : return split;
5317 : : /* fall through */
5318 : 9260669 : case RTX_BIN_ARITH:
5319 : 9260669 : case RTX_COMM_ARITH:
5320 : 9260669 : case RTX_COMPARE:
5321 : 9260669 : case RTX_COMM_COMPARE:
5322 : 9260669 : split = find_split_point (&XEXP (x, 1), insn, false);
5323 : 9260669 : if (split)
5324 : : return split;
5325 : : /* fall through */
5326 : 8882654 : case RTX_UNARY:
5327 : : /* Some machines have (and (shift ...) ...) insns. If X is not
5328 : : an AND, but XEXP (X, 0) is, use it as our split point. */
5329 : 8882654 : if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
5330 : 333015 : return &XEXP (x, 0);
5331 : :
5332 : 8549639 : split = find_split_point (&XEXP (x, 0), insn, false);
5333 : 8549639 : if (split)
5334 : : return split;
5335 : : return loc;
5336 : :
5337 : : default:
5338 : : /* Otherwise, we don't have a split point. */
5339 : : return 0;
5340 : : }
5341 : : }
5342 : :
5343 : : /* Throughout X, replace FROM with TO, and return the result.
5344 : : The result is TO if X is FROM;
5345 : : otherwise the result is X, but its contents may have been modified.
5346 : : If they were modified, a record was made in undobuf so that
5347 : : undo_all will (among other things) return X to its original state.
5348 : :
5349 : : If the number of changes necessary is too much to record to undo,
5350 : : the excess changes are not made, so the result is invalid.
5351 : : The changes already made can still be undone.
5352 : : undobuf.num_undo is incremented for such changes, so by testing that
5353 : : the caller can tell whether the result is valid.
5354 : :
5355 : : `n_occurrences' is incremented each time FROM is replaced.
5356 : :
5357 : : IN_DEST is true if we are processing the SET_DEST of a SET.
5358 : :
5359 : : IN_COND is true if we are at the top level of a condition.
5360 : :
5361 : : UNIQUE_COPY is true if each substitution must be unique. We do this
5362 : : by copying if `n_occurrences' is nonzero. */
5363 : :
5364 : : static rtx
5365 : 376449469 : subst (rtx x, rtx from, rtx to, bool in_dest, bool in_cond, bool unique_copy)
5366 : : {
5367 : 376449469 : enum rtx_code code = GET_CODE (x);
5368 : 376449469 : machine_mode op0_mode = VOIDmode;
5369 : 376449469 : const char *fmt;
5370 : 376449469 : int len, i;
5371 : 376449469 : rtx new_rtx;
5372 : :
5373 : : /* Two expressions are equal if they are identical copies of a shared
5374 : : RTX or if they are both registers with the same register number
5375 : : and mode. */
5376 : :
5377 : : #define COMBINE_RTX_EQUAL_P(X,Y) \
5378 : : ((X) == (Y) \
5379 : : || (REG_P (X) && REG_P (Y) \
5380 : : && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
5381 : :
5382 : : /* Do not substitute into clobbers of regs -- this will never result in
5383 : : valid RTL. */
5384 : 376449469 : if (GET_CODE (x) == CLOBBER && REG_P (XEXP (x, 0)))
5385 : : return x;
5386 : :
5387 : 366592711 : if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
5388 : : {
5389 : 0 : n_occurrences++;
5390 : 0 : return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
5391 : : }
5392 : :
5393 : : /* If X and FROM are the same register but different modes, they
5394 : : will not have been seen as equal above. However, the log links code
5395 : : will make a LOG_LINKS entry for that case. If we do nothing, we
5396 : : will try to rerecognize our original insn and, when it succeeds,
5397 : : we will delete the feeding insn, which is incorrect.
5398 : :
5399 : : So force this insn not to match in this (rare) case. */
5400 : 81778179 : if (! in_dest && code == REG && REG_P (from)
5401 : 396128642 : && reg_overlap_mentioned_p (x, from))
5402 : 4070 : return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
5403 : :
5404 : : /* If this is an object, we are done unless it is a MEM or LO_SUM, both
5405 : : of which may contain things that can be combined. */
5406 : 366588641 : if (code != MEM && code != LO_SUM && OBJECT_P (x))
5407 : : return x;
5408 : :
5409 : : /* It is possible to have a subexpression appear twice in the insn.
5410 : : Suppose that FROM is a register that appears within TO.
5411 : : Then, after that subexpression has been scanned once by `subst',
5412 : : the second time it is scanned, TO may be found. If we were
5413 : : to scan TO here, we would find FROM within it and create a
5414 : : self-referent rtl structure which is completely wrong. */
5415 : 197070254 : if (COMBINE_RTX_EQUAL_P (x, to))
5416 : : return to;
5417 : :
5418 : : /* Parallel asm_operands need special attention because all of the
5419 : : inputs are shared across the arms. Furthermore, unsharing the
5420 : : rtl results in recognition failures. Failure to handle this case
5421 : : specially can result in circular rtl.
5422 : :
5423 : : Solve this by doing a normal pass across the first entry of the
5424 : : parallel, and only processing the SET_DESTs of the subsequent
5425 : : entries. Ug. */
5426 : :
5427 : 196972616 : if (code == PARALLEL
5428 : 12153175 : && GET_CODE (XVECEXP (x, 0, 0)) == SET
5429 : 10464929 : && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
5430 : : {
5431 : 18910 : new_rtx = subst (XVECEXP (x, 0, 0), from, to, false, false, unique_copy);
5432 : :
5433 : : /* If this substitution failed, this whole thing fails. */
5434 : 18910 : if (GET_CODE (new_rtx) == CLOBBER
5435 : 0 : && XEXP (new_rtx, 0) == const0_rtx)
5436 : : return new_rtx;
5437 : :
5438 : 18910 : SUBST (XVECEXP (x, 0, 0), new_rtx);
5439 : :
5440 : 94930 : for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
5441 : : {
5442 : 76020 : rtx dest = SET_DEST (XVECEXP (x, 0, i));
5443 : :
5444 : 76020 : if (!REG_P (dest) && GET_CODE (dest) != PC)
5445 : : {
5446 : 2013 : new_rtx = subst (dest, from, to, false, false, unique_copy);
5447 : :
5448 : : /* If this substitution failed, this whole thing fails. */
5449 : 2013 : if (GET_CODE (new_rtx) == CLOBBER
5450 : 0 : && XEXP (new_rtx, 0) == const0_rtx)
5451 : : return new_rtx;
5452 : :
5453 : 2013 : SUBST (SET_DEST (XVECEXP (x, 0, i)), new_rtx);
5454 : : }
5455 : : }
5456 : : }
5457 : : else
5458 : : {
5459 : 196953706 : len = GET_RTX_LENGTH (code);
5460 : 196953706 : fmt = GET_RTX_FORMAT (code);
5461 : :
5462 : : /* We don't need to process a SET_DEST that is a register or PC, so
5463 : : set up to skip this common case. All other cases where we want
5464 : : to suppress replacing something inside a SET_SRC are handled via
5465 : : the IN_DEST operand. */
5466 : 196953706 : if (code == SET
5467 : 43678500 : && (REG_P (SET_DEST (x))
5468 : 43678500 : || GET_CODE (SET_DEST (x)) == PC))
5469 : 196953706 : fmt = "ie";
5470 : :
5471 : : /* Trying to simplify the operands of a widening MULT is not likely
5472 : : to create RTL matching a machine insn. */
5473 : 196953706 : if (code == MULT
5474 : 4577442 : && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
5475 : 4577442 : || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
5476 : 289014 : && (GET_CODE (XEXP (x, 1)) == ZERO_EXTEND
5477 : 289014 : || GET_CODE (XEXP (x, 1)) == SIGN_EXTEND)
5478 : 204114 : && REG_P (XEXP (XEXP (x, 0), 0))
5479 : 91912 : && REG_P (XEXP (XEXP (x, 1), 0))
5480 : 73061 : && from == to)
5481 : : return x;
5482 : :
5483 : :
5484 : : /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
5485 : : constant. */
5486 : 196912936 : if (fmt[0] == 'e')
5487 : 144656086 : op0_mode = GET_MODE (XEXP (x, 0));
5488 : :
5489 : 581475482 : for (i = 0; i < len; i++)
5490 : : {
5491 : 385579281 : if (fmt[i] == 'E')
5492 : : {
5493 : 14218557 : int j;
5494 : 45148835 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5495 : : {
5496 : 31089129 : if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
5497 : : {
5498 : 1503 : new_rtx = (unique_copy && n_occurrences
5499 : 261306 : ? copy_rtx (to) : to);
5500 : 261281 : n_occurrences++;
5501 : : }
5502 : : else
5503 : : {
5504 : 30827848 : new_rtx = subst (XVECEXP (x, i, j), from, to,
5505 : : false, false, unique_copy);
5506 : :
5507 : : /* If this substitution failed, this whole thing
5508 : : fails. */
5509 : 30827848 : if (GET_CODE (new_rtx) == CLOBBER
5510 : 10355589 : && XEXP (new_rtx, 0) == const0_rtx)
5511 : : return new_rtx;
5512 : : }
5513 : :
5514 : 30930278 : SUBST (XVECEXP (x, i, j), new_rtx);
5515 : : }
5516 : : }
5517 : 371360724 : else if (fmt[i] == 'e')
5518 : : {
5519 : : /* If this is a register being set, ignore it. */
5520 : 302906140 : new_rtx = XEXP (x, i);
5521 : 302906140 : if (in_dest
5522 : 302906140 : && i == 0
5523 : 5675422 : && (((code == SUBREG || code == ZERO_EXTRACT)
5524 : 317015 : && REG_P (new_rtx))
5525 : 5360818 : || code == STRICT_LOW_PART))
5526 : : ;
5527 : :
5528 : 302580306 : else if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
5529 : : {
5530 : : /* In general, don't install a subreg involving two
5531 : : modes not tieable. It can worsen register
5532 : : allocation, and can even make invalid reload
5533 : : insns, since the reg inside may need to be copied
5534 : : from in the outside mode, and that may be invalid
5535 : : if it is an fp reg copied in integer mode.
5536 : :
5537 : : We allow an exception to this: It is valid if
5538 : : it is inside another SUBREG and the mode of that
5539 : : SUBREG and the mode of the inside of TO is
5540 : : tieable. */
5541 : :
5542 : 43787985 : if (GET_CODE (to) == SUBREG
5543 : 533300 : && !targetm.modes_tieable_p (GET_MODE (to),
5544 : 533300 : GET_MODE (SUBREG_REG (to)))
5545 : 44127449 : && ! (code == SUBREG
5546 : 25494 : && (targetm.modes_tieable_p
5547 : 25494 : (GET_MODE (x), GET_MODE (SUBREG_REG (to))))))
5548 : 311746 : return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
5549 : :
5550 : 43476239 : if (code == SUBREG
5551 : 2303557 : && REG_P (to)
5552 : 159534 : && REGNO (to) < FIRST_PSEUDO_REGISTER
5553 : 43476250 : && simplify_subreg_regno (REGNO (to), GET_MODE (to),
5554 : 11 : SUBREG_BYTE (x),
5555 : 11 : GET_MODE (x)) < 0)
5556 : 6 : return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
5557 : :
5558 : 43476233 : new_rtx = (unique_copy && n_occurrences ? copy_rtx (to) : to);
5559 : 43476233 : n_occurrences++;
5560 : : }
5561 : : else
5562 : : /* If we are in a SET_DEST, suppress most cases unless we
5563 : : have gone inside a MEM, in which case we want to
5564 : : simplify the address. We assume here that things that
5565 : : are actually part of the destination have their inner
5566 : : parts in the first expression. This is true for SUBREG,
5567 : : STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
5568 : : things aside from REG and MEM that should appear in a
5569 : : SET_DEST. */
5570 : 258792321 : new_rtx = subst (XEXP (x, i), from, to,
5571 : : (((in_dest
5572 : 5077769 : && (code == SUBREG || code == STRICT_LOW_PART
5573 : 5077769 : || code == ZERO_EXTRACT))
5574 : 258784607 : || code == SET)
5575 : 45081298 : && i == 0),
5576 : 258792321 : code == IF_THEN_ELSE && i == 0,
5577 : : unique_copy);
5578 : :
5579 : : /* If we found that we will have to reject this combination,
5580 : : indicate that by returning the CLOBBER ourselves, rather than
5581 : : an expression containing it. This will speed things up as
5582 : : well as prevent accidents where two CLOBBERs are considered
5583 : : to be equal, thus producing an incorrect simplification. */
5584 : :
5585 : 302594388 : if (GET_CODE (new_rtx) == CLOBBER && XEXP (new_rtx, 0) == const0_rtx)
5586 : : return new_rtx;
5587 : :
5588 : 302048506 : if (GET_CODE (x) == SUBREG && CONST_SCALAR_INT_P (new_rtx))
5589 : : {
5590 : 29901 : machine_mode mode = GET_MODE (x);
5591 : :
5592 : 59802 : x = simplify_subreg (GET_MODE (x), new_rtx,
5593 : 29901 : GET_MODE (SUBREG_REG (x)),
5594 : 29901 : SUBREG_BYTE (x));
5595 : 29901 : if (! x)
5596 : 2 : x = gen_rtx_CLOBBER (mode, const0_rtx);
5597 : : }
5598 : 302018605 : else if (CONST_SCALAR_INT_P (new_rtx)
5599 : : && (GET_CODE (x) == ZERO_EXTEND
5600 : 55477668 : || GET_CODE (x) == SIGN_EXTEND
5601 : : || GET_CODE (x) == FLOAT
5602 : : || GET_CODE (x) == UNSIGNED_FLOAT))
5603 : : {
5604 : 142328 : x = simplify_unary_operation (GET_CODE (x), GET_MODE (x),
5605 : : new_rtx,
5606 : 71164 : GET_MODE (XEXP (x, 0)));
5607 : 71164 : if (!x)
5608 : 250 : return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
5609 : : }
5610 : : /* CONST_INTs shouldn't be substituted into PRE_DEC, PRE_MODIFY
5611 : : etc. arguments, otherwise we can ICE before trying to recog
5612 : : it. See PR104446. */
5613 : 301947441 : else if (CONST_SCALAR_INT_P (new_rtx)
5614 : 55406504 : && GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
5615 : 0 : return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
5616 : : else
5617 : 301947441 : SUBST (XEXP (x, i), new_rtx);
5618 : : }
5619 : : }
5620 : : }
5621 : :
5622 : : /* Check if we are loading something from the constant pool via float
5623 : : extension; in this case we would undo compress_float_constant
5624 : : optimization and degenerate constant load to an immediate value. */
5625 : 195915111 : if (GET_CODE (x) == FLOAT_EXTEND
5626 : 303944 : && MEM_P (XEXP (x, 0))
5627 : 195977696 : && MEM_READONLY_P (XEXP (x, 0)))
5628 : : {
5629 : 35598 : rtx tmp = avoid_constant_pool_reference (x);
5630 : 35598 : if (x != tmp)
5631 : : return x;
5632 : : }
5633 : :
5634 : : /* Try to simplify X. If the simplification changed the code, it is likely
5635 : : that further simplification will help, so loop, but limit the number
5636 : : of repetitions that will be performed. */
5637 : :
5638 : 203349405 : for (i = 0; i < 4; i++)
5639 : : {
5640 : : /* If X is sufficiently simple, don't bother trying to do anything
5641 : : with it. */
5642 : 203334925 : if (code != CONST_INT && code != REG && code != CLOBBER)
5643 : 202635101 : x = combine_simplify_rtx (x, op0_mode, in_dest, in_cond);
5644 : :
5645 : 203334925 : if (GET_CODE (x) == code)
5646 : : break;
5647 : :
5648 : 7469754 : code = GET_CODE (x);
5649 : :
5650 : : /* We no longer know the original mode of operand 0 since we
5651 : : have changed the form of X) */
5652 : 7469754 : op0_mode = VOIDmode;
5653 : : }
5654 : :
5655 : : return x;
5656 : : }
5657 : :
5658 : : /* If X is a commutative operation whose operands are not in the canonical
5659 : : order, use substitutions to swap them. */
5660 : :
5661 : : static void
5662 : 628030046 : maybe_swap_commutative_operands (rtx x)
5663 : : {
5664 : 628030046 : if (COMMUTATIVE_ARITH_P (x)
5665 : 628030046 : && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
5666 : : {
5667 : 3357999 : rtx temp = XEXP (x, 0);
5668 : 3357999 : SUBST (XEXP (x, 0), XEXP (x, 1));
5669 : 3357999 : SUBST (XEXP (x, 1), temp);
5670 : : }
5671 : :
5672 : : /* Canonicalize (vec_merge (fma op2 op1 op3) op1 mask) to
5673 : : (vec_merge (fma op1 op2 op3) op1 mask). */
5674 : 628030046 : if (GET_CODE (x) == VEC_MERGE
5675 : 694831 : && GET_CODE (XEXP (x, 0)) == FMA)
5676 : : {
5677 : 26385 : rtx fma_op1 = XEXP (XEXP (x, 0), 0);
5678 : 26385 : rtx fma_op2 = XEXP (XEXP (x, 0), 1);
5679 : 26385 : rtx masked_op = XEXP (x, 1);
5680 : 26385 : if (rtx_equal_p (masked_op, fma_op2))
5681 : : {
5682 : 210 : if (GET_CODE (fma_op1) == NEG)
5683 : : {
5684 : : /* Keep the negate canonicalized to the first operand. */
5685 : 142 : fma_op1 = XEXP (fma_op1, 0);
5686 : 142 : SUBST (XEXP (XEXP (XEXP (x, 0), 0), 0), fma_op2);
5687 : 142 : SUBST (XEXP (XEXP (x, 0), 1), fma_op1);
5688 : : }
5689 : : else
5690 : : {
5691 : 68 : SUBST (XEXP (XEXP (x, 0), 0), fma_op2);
5692 : 68 : SUBST (XEXP (XEXP (x, 0), 1), fma_op1);
5693 : : }
5694 : : }
5695 : : }
5696 : :
5697 : 628030046 : unsigned n_elts = 0;
5698 : 628030046 : if (GET_CODE (x) == VEC_MERGE
5699 : 694831 : && CONST_INT_P (XEXP (x, 2))
5700 : 698264 : && GET_MODE_NUNITS (GET_MODE (x)).is_constant (&n_elts)
5701 : 628379178 : && (swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1))
5702 : : /* Two operands have same precedence, then
5703 : : first bit of mask select first operand. */
5704 : 319824 : || (!swap_commutative_operands_p (XEXP (x, 1), XEXP (x, 0))
5705 : 63312 : && !(UINTVAL (XEXP (x, 2)) & 1))))
5706 : : {
5707 : 43083 : rtx temp = XEXP (x, 0);
5708 : 43083 : unsigned HOST_WIDE_INT sel = UINTVAL (XEXP (x, 2));
5709 : 43083 : unsigned HOST_WIDE_INT mask = HOST_WIDE_INT_1U;
5710 : 43083 : if (n_elts == HOST_BITS_PER_WIDE_INT)
5711 : : mask = -1;
5712 : : else
5713 : 43073 : mask = (HOST_WIDE_INT_1U << n_elts) - 1;
5714 : 43083 : SUBST (XEXP (x, 0), XEXP (x, 1));
5715 : 43083 : SUBST (XEXP (x, 1), temp);
5716 : 43083 : SUBST (XEXP (x, 2), GEN_INT (~sel & mask));
5717 : : }
5718 : 628030046 : }
5719 : :
5720 : : /* Simplify X, a piece of RTL. We just operate on the expression at the
5721 : : outer level; call `subst' to simplify recursively. Return the new
5722 : : expression.
5723 : :
5724 : : OP0_MODE is the original mode of XEXP (x, 0). IN_DEST is true
5725 : : if we are inside a SET_DEST. IN_COND is true if we are at the top level
5726 : : of a condition. */
5727 : :
5728 : : static rtx
5729 : 202891544 : combine_simplify_rtx (rtx x, machine_mode op0_mode, bool in_dest, bool in_cond)
5730 : : {
5731 : 202891544 : enum rtx_code code = GET_CODE (x);
5732 : 202891544 : machine_mode mode = GET_MODE (x);
5733 : 202891544 : scalar_int_mode int_mode;
5734 : 202891544 : rtx temp;
5735 : 202891544 : int i;
5736 : :
5737 : : /* If this is a commutative operation, put a constant last and a complex
5738 : : expression first. We don't need to do this for comparisons here. */
5739 : 202891544 : maybe_swap_commutative_operands (x);
5740 : :
5741 : : /* Try to fold this expression in case we have constants that weren't
5742 : : present before. */
5743 : 202891544 : temp = 0;
5744 : 202891544 : switch (GET_RTX_CLASS (code))
5745 : : {
5746 : 6770513 : case RTX_UNARY:
5747 : 6770513 : if (op0_mode == VOIDmode)
5748 : 151845 : op0_mode = GET_MODE (XEXP (x, 0));
5749 : 6770513 : temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
5750 : 6770513 : break;
5751 : 15530535 : case RTX_COMPARE:
5752 : 15530535 : case RTX_COMM_COMPARE:
5753 : 15530535 : {
5754 : 15530535 : machine_mode cmp_mode = GET_MODE (XEXP (x, 0));
5755 : 15530535 : if (cmp_mode == VOIDmode)
5756 : : {
5757 : 46497 : cmp_mode = GET_MODE (XEXP (x, 1));
5758 : 46497 : if (cmp_mode == VOIDmode)
5759 : 7795 : cmp_mode = op0_mode;
5760 : : }
5761 : 15530535 : temp = simplify_relational_operation (code, mode, cmp_mode,
5762 : : XEXP (x, 0), XEXP (x, 1));
5763 : : }
5764 : 15530535 : break;
5765 : 81025847 : case RTX_COMM_ARITH:
5766 : 81025847 : case RTX_BIN_ARITH:
5767 : 81025847 : temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
5768 : 81025847 : break;
5769 : 12225493 : case RTX_BITFIELD_OPS:
5770 : 12225493 : case RTX_TERNARY:
5771 : 12225493 : temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
5772 : : XEXP (x, 1), XEXP (x, 2));
5773 : 12225493 : break;
5774 : : default:
5775 : : break;
5776 : : }
5777 : :
5778 : 115552388 : if (temp)
5779 : : {
5780 : 15234082 : x = temp;
5781 : 15234082 : code = GET_CODE (temp);
5782 : 15234082 : op0_mode = VOIDmode;
5783 : 15234082 : mode = GET_MODE (temp);
5784 : : }
5785 : :
5786 : : /* If this is a simple operation applied to an IF_THEN_ELSE, try
5787 : : applying it to the arms of the IF_THEN_ELSE. This often simplifies
5788 : : things. Check for cases where both arms are testing the same
5789 : : condition.
5790 : :
5791 : : Don't do anything if all operands are very simple. */
5792 : :
5793 : 202891544 : if ((BINARY_P (x)
5794 : 96258409 : && ((!OBJECT_P (XEXP (x, 0))
5795 : 37887515 : && ! (GET_CODE (XEXP (x, 0)) == SUBREG
5796 : 4552081 : && OBJECT_P (SUBREG_REG (XEXP (x, 0)))))
5797 : 60944382 : || (!OBJECT_P (XEXP (x, 1))
5798 : 4237688 : && ! (GET_CODE (XEXP (x, 1)) == SUBREG
5799 : 1547580 : && OBJECT_P (SUBREG_REG (XEXP (x, 1)))))))
5800 : 164635654 : || (UNARY_P (x)
5801 : 6669429 : && (!OBJECT_P (XEXP (x, 0))
5802 : 2866366 : && ! (GET_CODE (XEXP (x, 0)) == SUBREG
5803 : 651680 : && OBJECT_P (SUBREG_REG (XEXP (x, 0)))))))
5804 : : {
5805 : 40543247 : rtx cond, true_rtx, false_rtx;
5806 : :
5807 : 40543247 : cond = if_then_else_cond (x, &true_rtx, &false_rtx);
5808 : 40543247 : if (cond != 0
5809 : : /* If everything is a comparison, what we have is highly unlikely
5810 : : to be simpler, so don't use it. */
5811 : 3575775 : && ! (COMPARISON_P (x)
5812 : 982577 : && (COMPARISON_P (true_rtx) || COMPARISON_P (false_rtx)))
5813 : : /* Similarly, if we end up with one of the expressions the same
5814 : : as the original, it is certainly not simpler. */
5815 : 3432159 : && ! rtx_equal_p (x, true_rtx)
5816 : 43975406 : && ! rtx_equal_p (x, false_rtx))
5817 : : {
5818 : 3432159 : rtx cop1 = const0_rtx;
5819 : 3432159 : enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
5820 : :
5821 : 3432159 : if (cond_code == NE && COMPARISON_P (cond))
5822 : 559775 : return x;
5823 : :
5824 : : /* Simplify the alternative arms; this may collapse the true and
5825 : : false arms to store-flag values. Be careful to use copy_rtx
5826 : : here since true_rtx or false_rtx might share RTL with x as a
5827 : : result of the if_then_else_cond call above. */
5828 : 2872384 : true_rtx = subst (copy_rtx (true_rtx), pc_rtx, pc_rtx,
5829 : : false, false, false);
5830 : 2872384 : false_rtx = subst (copy_rtx (false_rtx), pc_rtx, pc_rtx,
5831 : : false, false, false);
5832 : :
5833 : : /* If true_rtx and false_rtx are not general_operands, an if_then_else
5834 : : is unlikely to be simpler. */
5835 : 2872384 : if (general_operand (true_rtx, VOIDmode)
5836 : 2872384 : && general_operand (false_rtx, VOIDmode))
5837 : : {
5838 : 1108314 : enum rtx_code reversed;
5839 : :
5840 : : /* Restarting if we generate a store-flag expression will cause
5841 : : us to loop. Just drop through in this case. */
5842 : :
5843 : : /* If the result values are STORE_FLAG_VALUE and zero, we can
5844 : : just make the comparison operation. */
5845 : 1108314 : if (true_rtx == const_true_rtx && false_rtx == const0_rtx)
5846 : 411560 : x = simplify_gen_relational (cond_code, mode, VOIDmode,
5847 : : cond, cop1);
5848 : 489146 : else if (true_rtx == const0_rtx && false_rtx == const_true_rtx
5849 : 696754 : && ((reversed = reversed_comparison_code_parts
5850 : 424575 : (cond_code, cond, cop1, NULL))
5851 : : != UNKNOWN))
5852 : 424575 : x = simplify_gen_relational (reversed, mode, VOIDmode,
5853 : : cond, cop1);
5854 : :
5855 : : /* Likewise, we can make the negate of a comparison operation
5856 : : if the result values are - STORE_FLAG_VALUE and zero. */
5857 : 272179 : else if (CONST_INT_P (true_rtx)
5858 : 187889 : && INTVAL (true_rtx) == - STORE_FLAG_VALUE
5859 : 39192 : && false_rtx == const0_rtx)
5860 : 37589 : x = simplify_gen_unary (NEG, mode,
5861 : : simplify_gen_relational (cond_code,
5862 : : mode, VOIDmode,
5863 : : cond, cop1),
5864 : : mode);
5865 : 234590 : else if (CONST_INT_P (false_rtx)
5866 : 170766 : && INTVAL (false_rtx) == - STORE_FLAG_VALUE
5867 : 22857 : && true_rtx == const0_rtx
5868 : 234590 : && ((reversed = reversed_comparison_code_parts
5869 : 20143 : (cond_code, cond, cop1, NULL))
5870 : : != UNKNOWN))
5871 : 20140 : x = simplify_gen_unary (NEG, mode,
5872 : : simplify_gen_relational (reversed,
5873 : : mode, VOIDmode,
5874 : : cond, cop1),
5875 : : mode);
5876 : :
5877 : 1108314 : code = GET_CODE (x);
5878 : 1108314 : op0_mode = VOIDmode;
5879 : : }
5880 : : }
5881 : : }
5882 : :
5883 : : /* First see if we can apply the inverse distributive law. */
5884 : 202331769 : if (code == PLUS || code == MINUS
5885 : 202331769 : || code == AND || code == IOR || code == XOR)
5886 : : {
5887 : 45845622 : x = apply_distributive_law (x);
5888 : 45845622 : code = GET_CODE (x);
5889 : 45845622 : op0_mode = VOIDmode;
5890 : : }
5891 : :
5892 : : /* If CODE is an associative operation not otherwise handled, see if we
5893 : : can associate some operands. This can win if they are constants or
5894 : : if they are logically related (i.e. (a & b) & a). */
5895 : 202331769 : if ((code == PLUS || code == MINUS || code == MULT || code == DIV
5896 : : || code == AND || code == IOR || code == XOR
5897 : : || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
5898 : 49649458 : && ((INTEGRAL_MODE_P (mode) && code != DIV)
5899 : 4468887 : || (flag_associative_math && FLOAT_MODE_P (mode))))
5900 : : {
5901 : 45804320 : if (GET_CODE (XEXP (x, 0)) == code)
5902 : : {
5903 : 3893348 : rtx other = XEXP (XEXP (x, 0), 0);
5904 : 3893348 : rtx inner_op0 = XEXP (XEXP (x, 0), 1);
5905 : 3893348 : rtx inner_op1 = XEXP (x, 1);
5906 : 3893348 : rtx inner;
5907 : :
5908 : : /* Make sure we pass the constant operand if any as the second
5909 : : one if this is a commutative operation. */
5910 : 3893348 : if (CONSTANT_P (inner_op0) && COMMUTATIVE_ARITH_P (x))
5911 : : std::swap (inner_op0, inner_op1);
5912 : 3893348 : inner = simplify_binary_operation (code == MINUS ? PLUS
5913 : 3809601 : : code == DIV ? MULT
5914 : : : code,
5915 : : mode, inner_op0, inner_op1);
5916 : :
5917 : : /* For commutative operations, try the other pair if that one
5918 : : didn't simplify. */
5919 : 3893348 : if (inner == 0 && COMMUTATIVE_ARITH_P (x))
5920 : : {
5921 : 3780173 : other = XEXP (XEXP (x, 0), 1);
5922 : 3780173 : inner = simplify_binary_operation (code, mode,
5923 : : XEXP (XEXP (x, 0), 0),
5924 : : XEXP (x, 1));
5925 : : }
5926 : :
5927 : 3860814 : if (inner)
5928 : 256561 : return simplify_gen_binary (code, mode, other, inner);
5929 : : }
5930 : : }
5931 : :
5932 : : /* A little bit of algebraic simplification here. */
5933 : 202075208 : switch (code)
5934 : : {
5935 : 20632982 : case MEM:
5936 : : /* Ensure that our address has any ASHIFTs converted to MULT in case
5937 : : address-recognizing predicates are called later. */
5938 : 20632982 : temp = make_compound_operation (XEXP (x, 0), MEM);
5939 : 20632982 : SUBST (XEXP (x, 0), temp);
5940 : 20632982 : break;
5941 : :
5942 : 7925134 : case SUBREG:
5943 : 7925134 : if (op0_mode == VOIDmode)
5944 : 163426 : op0_mode = GET_MODE (SUBREG_REG (x));
5945 : :
5946 : : /* See if this can be moved to simplify_subreg. */
5947 : 7925134 : if (CONSTANT_P (SUBREG_REG (x))
5948 : 14220 : && known_eq (subreg_lowpart_offset (mode, op0_mode), SUBREG_BYTE (x))
5949 : : /* Don't call gen_lowpart if the inner mode
5950 : : is VOIDmode and we cannot simplify it, as SUBREG without
5951 : : inner mode is invalid. */
5952 : 7939354 : && (GET_MODE (SUBREG_REG (x)) != VOIDmode
5953 : 0 : || gen_lowpart_common (mode, SUBREG_REG (x))))
5954 : 14220 : return gen_lowpart (mode, SUBREG_REG (x));
5955 : :
5956 : 7910914 : if (GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_CC)
5957 : : break;
5958 : 7910914 : {
5959 : 7910914 : rtx temp;
5960 : 15821828 : temp = simplify_subreg (mode, SUBREG_REG (x), op0_mode,
5961 : 7910914 : SUBREG_BYTE (x));
5962 : 7910914 : if (temp)
5963 : 202891544 : return temp;
5964 : :
5965 : : /* If op is known to have all lower bits zero, the result is zero. */
5966 : 7428476 : scalar_int_mode int_mode, int_op0_mode;
5967 : 7428476 : if (!in_dest
5968 : 4471658 : && is_a <scalar_int_mode> (mode, &int_mode)
5969 : 4395252 : && is_a <scalar_int_mode> (op0_mode, &int_op0_mode)
5970 : 4395252 : && (GET_MODE_PRECISION (int_mode)
5971 : 4395252 : < GET_MODE_PRECISION (int_op0_mode))
5972 : 3909886 : && known_eq (subreg_lowpart_offset (int_mode, int_op0_mode),
5973 : : SUBREG_BYTE (x))
5974 : 3446464 : && HWI_COMPUTABLE_MODE_P (int_op0_mode)
5975 : 3185106 : && ((nonzero_bits (SUBREG_REG (x), int_op0_mode)
5976 : 3185106 : & GET_MODE_MASK (int_mode)) == 0)
5977 : 7429144 : && !side_effects_p (SUBREG_REG (x)))
5978 : 668 : return CONST0_RTX (int_mode);
5979 : : }
5980 : :
5981 : : /* Don't change the mode of the MEM if that would change the meaning
5982 : : of the address. */
5983 : 7427808 : if (MEM_P (SUBREG_REG (x))
5984 : 7427808 : && (MEM_VOLATILE_P (SUBREG_REG (x))
5985 : 69762 : || mode_dependent_address_p (XEXP (SUBREG_REG (x), 0),
5986 : 69827 : MEM_ADDR_SPACE (SUBREG_REG (x)))))
5987 : 44061 : return gen_rtx_CLOBBER (mode, const0_rtx);
5988 : :
5989 : : /* Note that we cannot do any narrowing for non-constants since
5990 : : we might have been counting on using the fact that some bits were
5991 : : zero. We now do this in the SET. */
5992 : :
5993 : : break;
5994 : :
5995 : 357168 : case NEG:
5996 : 357168 : temp = expand_compound_operation (XEXP (x, 0));
5997 : :
5998 : : /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
5999 : : replaced by (lshiftrt X C). This will convert
6000 : : (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
6001 : :
6002 : 357168 : if (GET_CODE (temp) == ASHIFTRT
6003 : 13715 : && CONST_INT_P (XEXP (temp, 1))
6004 : 384536 : && INTVAL (XEXP (temp, 1)) == GET_MODE_UNIT_PRECISION (mode) - 1)
6005 : 0 : return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (temp, 0),
6006 : 0 : INTVAL (XEXP (temp, 1)));
6007 : :
6008 : : /* If X has only a single bit that might be nonzero, say, bit I, convert
6009 : : (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
6010 : : MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
6011 : : (sign_extract X 1 Y). But only do this if TEMP isn't a register
6012 : : or a SUBREG of one since we'd be making the expression more
6013 : : complex if it was just a register. */
6014 : :
6015 : 357168 : if (!REG_P (temp)
6016 : 169928 : && ! (GET_CODE (temp) == SUBREG
6017 : 20881 : && REG_P (SUBREG_REG (temp)))
6018 : 203018448 : && is_a <scalar_int_mode> (mode, &int_mode)
6019 : 484072 : && (i = exact_log2 (nonzero_bits (temp, int_mode))) >= 0)
6020 : : {
6021 : 58465 : rtx temp1 = simplify_shift_const
6022 : 58465 : (NULL_RTX, ASHIFTRT, int_mode,
6023 : : simplify_shift_const (NULL_RTX, ASHIFT, int_mode, temp,
6024 : 58465 : GET_MODE_PRECISION (int_mode) - 1 - i),
6025 : 58465 : GET_MODE_PRECISION (int_mode) - 1 - i);
6026 : :
6027 : : /* If all we did was surround TEMP with the two shifts, we
6028 : : haven't improved anything, so don't use it. Otherwise,
6029 : : we are better off with TEMP1. */
6030 : 58465 : if (GET_CODE (temp1) != ASHIFTRT
6031 : 58271 : || GET_CODE (XEXP (temp1, 0)) != ASHIFT
6032 : 58271 : || XEXP (XEXP (temp1, 0), 0) != temp)
6033 : : return temp1;
6034 : : }
6035 : : break;
6036 : :
6037 : 8803 : case TRUNCATE:
6038 : : /* We can't handle truncation to a partial integer mode here
6039 : : because we don't know the real bitsize of the partial
6040 : : integer mode. */
6041 : 8803 : if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
6042 : : break;
6043 : :
6044 : 8803 : if (HWI_COMPUTABLE_MODE_P (mode))
6045 : 0 : SUBST (XEXP (x, 0),
6046 : : force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
6047 : : GET_MODE_MASK (mode), false));
6048 : :
6049 : : /* We can truncate a constant value and return it. */
6050 : 8803 : {
6051 : 8803 : poly_int64 c;
6052 : 8803 : if (poly_int_rtx_p (XEXP (x, 0), &c))
6053 : 0 : return gen_int_mode (c, mode);
6054 : : }
6055 : :
6056 : : /* Similarly to what we do in simplify-rtx.cc, a truncate of a register
6057 : : whose value is a comparison can be replaced with a subreg if
6058 : : STORE_FLAG_VALUE permits. */
6059 : 8803 : if (HWI_COMPUTABLE_MODE_P (mode)
6060 : 0 : && (STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
6061 : 0 : && (temp = get_last_value (XEXP (x, 0)))
6062 : 0 : && COMPARISON_P (temp)
6063 : 8803 : && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (XEXP (x, 0))))
6064 : 0 : return gen_lowpart (mode, XEXP (x, 0));
6065 : : break;
6066 : :
6067 : 8171 : case CONST:
6068 : : /* (const (const X)) can become (const X). Do it this way rather than
6069 : : returning the inner CONST since CONST can be shared with a
6070 : : REG_EQUAL note. */
6071 : 8171 : if (GET_CODE (XEXP (x, 0)) == CONST)
6072 : 0 : SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
6073 : : break;
6074 : :
6075 : : case LO_SUM:
6076 : : /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
6077 : : can add in an offset. find_split_point will split this address up
6078 : : again if it doesn't match. */
6079 : : if (HAVE_lo_sum && GET_CODE (XEXP (x, 0)) == HIGH
6080 : : && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
6081 : : return XEXP (x, 1);
6082 : : break;
6083 : :
6084 : 31077900 : case PLUS:
6085 : : /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
6086 : : when c is (const_int (pow2 + 1) / 2) is a sign extension of a
6087 : : bit-field and can be replaced by either a sign_extend or a
6088 : : sign_extract. The `and' may be a zero_extend and the two
6089 : : <c>, -<c> constants may be reversed. */
6090 : 31077900 : if (GET_CODE (XEXP (x, 0)) == XOR
6091 : 31077900 : && is_a <scalar_int_mode> (mode, &int_mode)
6092 : 12780 : && CONST_INT_P (XEXP (x, 1))
6093 : 4347 : && CONST_INT_P (XEXP (XEXP (x, 0), 1))
6094 : 3820 : && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1))
6095 : 76 : && ((i = exact_log2 (UINTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
6096 : 0 : || (i = exact_log2 (UINTVAL (XEXP (x, 1)))) >= 0)
6097 : 38 : && HWI_COMPUTABLE_MODE_P (int_mode)
6098 : 31077938 : && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
6099 : 0 : && CONST_INT_P (XEXP (XEXP (XEXP (x, 0), 0), 1))
6100 : 0 : && (UINTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
6101 : 0 : == (HOST_WIDE_INT_1U << (i + 1)) - 1))
6102 : 38 : || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
6103 : 0 : && known_eq ((GET_MODE_PRECISION
6104 : : (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))),
6105 : : (unsigned int) i + 1))))
6106 : 0 : return simplify_shift_const
6107 : 0 : (NULL_RTX, ASHIFTRT, int_mode,
6108 : : simplify_shift_const (NULL_RTX, ASHIFT, int_mode,
6109 : : XEXP (XEXP (XEXP (x, 0), 0), 0),
6110 : 0 : GET_MODE_PRECISION (int_mode) - (i + 1)),
6111 : 0 : GET_MODE_PRECISION (int_mode) - (i + 1));
6112 : :
6113 : : /* If only the low-order bit of X is possibly nonzero, (plus x -1)
6114 : : can become (ashiftrt (ashift (xor x 1) C) C) where C is
6115 : : the bitsize of the mode - 1. This allows simplification of
6116 : : "a = (b & 8) == 0;" */
6117 : 31077900 : if (XEXP (x, 1) == constm1_rtx
6118 : 666243 : && !REG_P (XEXP (x, 0))
6119 : 290081 : && ! (GET_CODE (XEXP (x, 0)) == SUBREG
6120 : 37406 : && REG_P (SUBREG_REG (XEXP (x, 0))))
6121 : 31324197 : && is_a <scalar_int_mode> (mode, &int_mode)
6122 : 31332241 : && nonzero_bits (XEXP (x, 0), int_mode) == 1)
6123 : 8044 : return simplify_shift_const
6124 : 8044 : (NULL_RTX, ASHIFTRT, int_mode,
6125 : : simplify_shift_const (NULL_RTX, ASHIFT, int_mode,
6126 : : gen_rtx_XOR (int_mode, XEXP (x, 0),
6127 : : const1_rtx),
6128 : 8044 : GET_MODE_PRECISION (int_mode) - 1),
6129 : 16088 : GET_MODE_PRECISION (int_mode) - 1);
6130 : :
6131 : : /* If we are adding two things that have no bits in common, convert
6132 : : the addition into an IOR. This will often be further simplified,
6133 : : for example in cases like ((a & 1) + (a & 2)), which can
6134 : : become a & 3. */
6135 : :
6136 : 31069856 : if (HWI_COMPUTABLE_MODE_P (mode)
6137 : 27275533 : && (nonzero_bits (XEXP (x, 0), mode)
6138 : 27275533 : & nonzero_bits (XEXP (x, 1), mode)) == 0)
6139 : : {
6140 : : /* Try to simplify the expression further. */
6141 : 256443 : rtx tor = simplify_gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
6142 : 256443 : temp = combine_simplify_rtx (tor, VOIDmode, in_dest, false);
6143 : :
6144 : : /* If we could, great. If not, do not go ahead with the IOR
6145 : : replacement, since PLUS appears in many special purpose
6146 : : address arithmetic instructions. */
6147 : 256443 : if (GET_CODE (temp) != CLOBBER
6148 : 256443 : && (GET_CODE (temp) != IOR
6149 : 252033 : || ((XEXP (temp, 0) != XEXP (x, 0)
6150 : 250564 : || XEXP (temp, 1) != XEXP (x, 1))
6151 : 1469 : && (XEXP (temp, 0) != XEXP (x, 1)
6152 : 0 : || XEXP (temp, 1) != XEXP (x, 0)))))
6153 : : return temp;
6154 : : }
6155 : :
6156 : : /* Canonicalize x + x into x << 1. */
6157 : 31063977 : if (GET_MODE_CLASS (mode) == MODE_INT
6158 : 27584202 : && rtx_equal_p (XEXP (x, 0), XEXP (x, 1))
6159 : 31066865 : && !side_effects_p (XEXP (x, 0)))
6160 : 2888 : return simplify_gen_binary (ASHIFT, mode, XEXP (x, 0), const1_rtx);
6161 : :
6162 : : break;
6163 : :
6164 : 3546573 : case MINUS:
6165 : : /* (minus <foo> (and <foo> (const_int -pow2))) becomes
6166 : : (and <foo> (const_int pow2-1)) */
6167 : 3546573 : if (is_a <scalar_int_mode> (mode, &int_mode)
6168 : 2997102 : && GET_CODE (XEXP (x, 1)) == AND
6169 : 148091 : && CONST_INT_P (XEXP (XEXP (x, 1), 1))
6170 : 145359 : && pow2p_hwi (-UINTVAL (XEXP (XEXP (x, 1), 1)))
6171 : 85915 : && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
6172 : 0 : return simplify_and_const_int (NULL_RTX, int_mode, XEXP (x, 0),
6173 : 0 : -INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
6174 : : break;
6175 : :
6176 : 2972905 : case MULT:
6177 : : /* If we have (mult (plus A B) C), apply the distributive law and then
6178 : : the inverse distributive law to see if things simplify. This
6179 : : occurs mostly in addresses, often when unrolling loops. */
6180 : :
6181 : 2972905 : if (GET_CODE (XEXP (x, 0)) == PLUS)
6182 : : {
6183 : 278812 : rtx result = distribute_and_simplify_rtx (x, 0);
6184 : 278812 : if (result)
6185 : : return result;
6186 : : }
6187 : :
6188 : : /* Try simplify a*(b/c) as (a*b)/c. */
6189 : 2970100 : if (FLOAT_MODE_P (mode) && flag_associative_math
6190 : 199272 : && GET_CODE (XEXP (x, 0)) == DIV)
6191 : : {
6192 : 309 : rtx tem = simplify_binary_operation (MULT, mode,
6193 : : XEXP (XEXP (x, 0), 0),
6194 : : XEXP (x, 1));
6195 : 309 : if (tem)
6196 : 34 : return simplify_gen_binary (DIV, mode, tem, XEXP (XEXP (x, 0), 1));
6197 : : }
6198 : : break;
6199 : :
6200 : 117735 : case UDIV:
6201 : : /* If this is a divide by a power of two, treat it as a shift if
6202 : : its first operand is a shift. */
6203 : 117735 : if (is_a <scalar_int_mode> (mode, &int_mode)
6204 : 117735 : && CONST_INT_P (XEXP (x, 1))
6205 : 1779 : && (i = exact_log2 (UINTVAL (XEXP (x, 1)))) >= 0
6206 : 0 : && (GET_CODE (XEXP (x, 0)) == ASHIFT
6207 : 0 : || GET_CODE (XEXP (x, 0)) == LSHIFTRT
6208 : 0 : || GET_CODE (XEXP (x, 0)) == ASHIFTRT
6209 : 0 : || GET_CODE (XEXP (x, 0)) == ROTATE
6210 : 0 : || GET_CODE (XEXP (x, 0)) == ROTATERT))
6211 : 0 : return simplify_shift_const (NULL_RTX, LSHIFTRT, int_mode,
6212 : 0 : XEXP (x, 0), i);
6213 : : break;
6214 : :
6215 : 15491063 : case EQ: case NE:
6216 : 15491063 : case GT: case GTU: case GE: case GEU:
6217 : 15491063 : case LT: case LTU: case LE: case LEU:
6218 : 15491063 : case UNEQ: case LTGT:
6219 : 15491063 : case UNGT: case UNGE:
6220 : 15491063 : case UNLT: case UNLE:
6221 : 15491063 : case UNORDERED: case ORDERED:
6222 : : /* If the first operand is a condition code, we can't do anything
6223 : : with it. */
6224 : 15491063 : if (GET_CODE (XEXP (x, 0)) == COMPARE
6225 : 15491063 : || GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC)
6226 : : {
6227 : 11639649 : rtx op0 = XEXP (x, 0);
6228 : 11639649 : rtx op1 = XEXP (x, 1);
6229 : 11639649 : enum rtx_code new_code;
6230 : :
6231 : 11639649 : if (GET_CODE (op0) == COMPARE)
6232 : 0 : op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
6233 : :
6234 : : /* Simplify our comparison, if possible. */
6235 : 11639649 : new_code = simplify_comparison (code, &op0, &op1);
6236 : :
6237 : : /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
6238 : : if only the low-order bit is possibly nonzero in X (such as when
6239 : : X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
6240 : : (xor X 1) or (minus 1 X); we use the former. Finally, if X is
6241 : : known to be either 0 or -1, NE becomes a NEG and EQ becomes
6242 : : (plus X 1).
6243 : :
6244 : : Remove any ZERO_EXTRACT we made when thinking this was a
6245 : : comparison. It may now be simpler to use, e.g., an AND. If a
6246 : : ZERO_EXTRACT is indeed appropriate, it will be placed back by
6247 : : the call to make_compound_operation in the SET case.
6248 : :
6249 : : Don't apply these optimizations if the caller would
6250 : : prefer a comparison rather than a value.
6251 : : E.g., for the condition in an IF_THEN_ELSE most targets need
6252 : : an explicit comparison. */
6253 : :
6254 : 11639649 : if (in_cond)
6255 : : ;
6256 : :
6257 : 1972637 : else if (STORE_FLAG_VALUE == 1
6258 : : && new_code == NE
6259 : 2345763 : && is_int_mode (mode, &int_mode)
6260 : 373258 : && op1 == const0_rtx
6261 : 181123 : && int_mode == GET_MODE (op0)
6262 : 2035509 : && nonzero_bits (op0, int_mode) == 1)
6263 : 132 : return gen_lowpart (int_mode,
6264 : 363426 : expand_compound_operation (op0));
6265 : :
6266 : 1972505 : else if (STORE_FLAG_VALUE == 1
6267 : : && new_code == NE
6268 : 2344987 : && is_int_mode (mode, &int_mode)
6269 : 373126 : && op1 == const0_rtx
6270 : 180991 : && int_mode == GET_MODE (op0)
6271 : 2035245 : && (num_sign_bit_copies (op0, int_mode)
6272 : 62740 : == GET_MODE_PRECISION (int_mode)))
6273 : : {
6274 : 644 : op0 = expand_compound_operation (op0);
6275 : 644 : return simplify_gen_unary (NEG, int_mode,
6276 : 644 : gen_lowpart (int_mode, op0),
6277 : 644 : int_mode);
6278 : : }
6279 : :
6280 : 1971861 : else if (STORE_FLAG_VALUE == 1
6281 : : && new_code == EQ
6282 : 2327373 : && is_int_mode (mode, &int_mode)
6283 : 356911 : && op1 == const0_rtx
6284 : 200539 : && int_mode == GET_MODE (op0)
6285 : 2085849 : && nonzero_bits (op0, int_mode) == 1)
6286 : : {
6287 : 1399 : op0 = expand_compound_operation (op0);
6288 : 1399 : return simplify_gen_binary (XOR, int_mode,
6289 : 1399 : gen_lowpart (int_mode, op0),
6290 : 1399 : const1_rtx);
6291 : : }
6292 : :
6293 : 1970462 : else if (STORE_FLAG_VALUE == 1
6294 : : && new_code == EQ
6295 : 11992435 : && is_int_mode (mode, &int_mode)
6296 : 355512 : && op1 == const0_rtx
6297 : 199140 : && int_mode == GET_MODE (op0)
6298 : 2083051 : && (num_sign_bit_copies (op0, int_mode)
6299 : 112589 : == GET_MODE_PRECISION (int_mode)))
6300 : : {
6301 : 551 : op0 = expand_compound_operation (op0);
6302 : 551 : return plus_constant (int_mode, gen_lowpart (int_mode, op0), 1);
6303 : : }
6304 : :
6305 : : /* If STORE_FLAG_VALUE is -1, we have cases similar to
6306 : : those above. */
6307 : 11636923 : if (in_cond)
6308 : : ;
6309 : :
6310 : 11636923 : else if (STORE_FLAG_VALUE == -1
6311 : : && new_code == NE
6312 : : && is_int_mode (mode, &int_mode)
6313 : : && op1 == const0_rtx
6314 : : && int_mode == GET_MODE (op0)
6315 : : && (num_sign_bit_copies (op0, int_mode)
6316 : : == GET_MODE_PRECISION (int_mode)))
6317 : : return gen_lowpart (int_mode, expand_compound_operation (op0));
6318 : :
6319 : 11636923 : else if (STORE_FLAG_VALUE == -1
6320 : : && new_code == NE
6321 : : && is_int_mode (mode, &int_mode)
6322 : : && op1 == const0_rtx
6323 : : && int_mode == GET_MODE (op0)
6324 : : && nonzero_bits (op0, int_mode) == 1)
6325 : : {
6326 : : op0 = expand_compound_operation (op0);
6327 : : return simplify_gen_unary (NEG, int_mode,
6328 : : gen_lowpart (int_mode, op0),
6329 : : int_mode);
6330 : : }
6331 : :
6332 : 11636923 : else if (STORE_FLAG_VALUE == -1
6333 : : && new_code == EQ
6334 : : && is_int_mode (mode, &int_mode)
6335 : : && op1 == const0_rtx
6336 : : && int_mode == GET_MODE (op0)
6337 : : && (num_sign_bit_copies (op0, int_mode)
6338 : : == GET_MODE_PRECISION (int_mode)))
6339 : : {
6340 : : op0 = expand_compound_operation (op0);
6341 : : return simplify_gen_unary (NOT, int_mode,
6342 : : gen_lowpart (int_mode, op0),
6343 : : int_mode);
6344 : : }
6345 : :
6346 : : /* If X is 0/1, (eq X 0) is X-1. */
6347 : 11636923 : else if (STORE_FLAG_VALUE == -1
6348 : : && new_code == EQ
6349 : : && is_int_mode (mode, &int_mode)
6350 : : && op1 == const0_rtx
6351 : : && int_mode == GET_MODE (op0)
6352 : : && nonzero_bits (op0, int_mode) == 1)
6353 : : {
6354 : : op0 = expand_compound_operation (op0);
6355 : : return plus_constant (int_mode, gen_lowpart (int_mode, op0), -1);
6356 : : }
6357 : :
6358 : : /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
6359 : : one bit that might be nonzero, we can convert (ne x 0) to
6360 : : (ashift x c) where C puts the bit in the sign bit. Remove any
6361 : : AND with STORE_FLAG_VALUE when we are done, since we are only
6362 : : going to test the sign bit. */
6363 : 11636923 : if (new_code == NE
6364 : 12005527 : && is_int_mode (mode, &int_mode)
6365 : 372568 : && HWI_COMPUTABLE_MODE_P (int_mode)
6366 : 368604 : && val_signbit_p (int_mode, STORE_FLAG_VALUE)
6367 : 0 : && op1 == const0_rtx
6368 : 0 : && int_mode == GET_MODE (op0)
6369 : 11636923 : && (i = exact_log2 (nonzero_bits (op0, int_mode))) >= 0)
6370 : : {
6371 : 0 : x = simplify_shift_const (NULL_RTX, ASHIFT, int_mode,
6372 : : expand_compound_operation (op0),
6373 : 0 : GET_MODE_PRECISION (int_mode) - 1 - i);
6374 : 0 : if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
6375 : 0 : return XEXP (x, 0);
6376 : : else
6377 : : return x;
6378 : : }
6379 : :
6380 : : /* If the code changed, return a whole new comparison.
6381 : : We also need to avoid using SUBST in cases where
6382 : : simplify_comparison has widened a comparison with a CONST_INT,
6383 : : since in that case the wider CONST_INT may fail the sanity
6384 : : checks in do_SUBST. */
6385 : 11636923 : if (new_code != code
6386 : 11286137 : || (CONST_INT_P (op1)
6387 : 6584779 : && GET_MODE (op0) != GET_MODE (XEXP (x, 0))
6388 : 11125 : && GET_MODE (op0) != GET_MODE (XEXP (x, 1))))
6389 : 360700 : return gen_rtx_fmt_ee (new_code, mode, op0, op1);
6390 : :
6391 : : /* Otherwise, keep this operation, but maybe change its operands.
6392 : : This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
6393 : 11276223 : SUBST (XEXP (x, 0), op0);
6394 : 11276223 : SUBST (XEXP (x, 1), op1);
6395 : : }
6396 : : break;
6397 : :
6398 : 11185368 : case IF_THEN_ELSE:
6399 : 11185368 : return simplify_if_then_else (x);
6400 : :
6401 : 4640214 : case ZERO_EXTRACT:
6402 : 4640214 : case SIGN_EXTRACT:
6403 : 4640214 : case ZERO_EXTEND:
6404 : 4640214 : case SIGN_EXTEND:
6405 : : /* If we are processing SET_DEST, we are done. */
6406 : 4640214 : if (in_dest)
6407 : : return x;
6408 : :
6409 : 4637485 : return expand_compound_operation (x);
6410 : :
6411 : 43315500 : case SET:
6412 : 43315500 : return simplify_set (x);
6413 : :
6414 : 10505841 : case AND:
6415 : 10505841 : case IOR:
6416 : 10505841 : return simplify_logical (x);
6417 : :
6418 : 12861772 : case ASHIFT:
6419 : 12861772 : case LSHIFTRT:
6420 : 12861772 : case ASHIFTRT:
6421 : 12861772 : case ROTATE:
6422 : 12861772 : case ROTATERT:
6423 : : /* If this is a shift by a constant amount, simplify it. */
6424 : 12861772 : if (CONST_INT_P (XEXP (x, 1)))
6425 : 12434367 : return simplify_shift_const (x, code, mode, XEXP (x, 0),
6426 : 12434367 : INTVAL (XEXP (x, 1)));
6427 : :
6428 : : else if (SHIFT_COUNT_TRUNCATED && !REG_P (XEXP (x, 1)))
6429 : : SUBST (XEXP (x, 1),
6430 : : force_to_mode (XEXP (x, 1), GET_MODE (XEXP (x, 1)),
6431 : : (HOST_WIDE_INT_1U
6432 : : << exact_log2 (GET_MODE_UNIT_BITSIZE
6433 : : (GET_MODE (x)))) - 1, false));
6434 : : break;
6435 : 1694499 : case VEC_SELECT:
6436 : 1694499 : {
6437 : 1694499 : rtx trueop0 = XEXP (x, 0);
6438 : 1694499 : mode = GET_MODE (trueop0);
6439 : 1694499 : rtx trueop1 = XEXP (x, 1);
6440 : : /* If we select a low-part subreg, return that. */
6441 : 1694499 : if (vec_series_lowpart_p (GET_MODE (x), mode, trueop1))
6442 : : {
6443 : 805 : rtx new_rtx = lowpart_subreg (GET_MODE (x), trueop0, mode);
6444 : 805 : if (new_rtx != NULL_RTX)
6445 : : return new_rtx;
6446 : : }
6447 : : }
6448 : :
6449 : : default:
6450 : : break;
6451 : : }
6452 : :
6453 : : return x;
6454 : : }
6455 : :
6456 : : /* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
6457 : :
6458 : : static rtx
6459 : 11185368 : simplify_if_then_else (rtx x)
6460 : : {
6461 : 11185368 : machine_mode mode = GET_MODE (x);
6462 : 11185368 : rtx cond = XEXP (x, 0);
6463 : 11185368 : rtx true_rtx = XEXP (x, 1);
6464 : 11185368 : rtx false_rtx = XEXP (x, 2);
6465 : 11185368 : enum rtx_code true_code = GET_CODE (cond);
6466 : 11185368 : bool comparison_p = COMPARISON_P (cond);
6467 : 11185368 : rtx temp;
6468 : 11185368 : int i;
6469 : 11185368 : enum rtx_code false_code;
6470 : 11185368 : rtx reversed;
6471 : 11185368 : scalar_int_mode int_mode, inner_mode;
6472 : :
6473 : : /* Simplify storing of the truth value. */
6474 : 11185368 : if (comparison_p && true_rtx == const_true_rtx && false_rtx == const0_rtx)
6475 : 0 : return simplify_gen_relational (true_code, mode, VOIDmode,
6476 : 0 : XEXP (cond, 0), XEXP (cond, 1));
6477 : :
6478 : : /* Also when the truth value has to be reversed. */
6479 : 11184923 : if (comparison_p
6480 : 11184923 : && true_rtx == const0_rtx && false_rtx == const_true_rtx
6481 : 0 : && (reversed = reversed_comparison (cond, mode)))
6482 : : return reversed;
6483 : :
6484 : : /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
6485 : : in it is being compared against certain values. Get the true and false
6486 : : comparisons and see if that says anything about the value of each arm. */
6487 : :
6488 : 11185368 : if (comparison_p
6489 : 11184923 : && ((false_code = reversed_comparison_code (cond, NULL))
6490 : : != UNKNOWN)
6491 : 22232427 : && REG_P (XEXP (cond, 0)))
6492 : : {
6493 : 7144858 : HOST_WIDE_INT nzb;
6494 : 7144858 : rtx from = XEXP (cond, 0);
6495 : 7144858 : rtx true_val = XEXP (cond, 1);
6496 : 7144858 : rtx false_val = true_val;
6497 : 7144858 : bool swapped = false;
6498 : :
6499 : : /* If FALSE_CODE is EQ, swap the codes and arms. */
6500 : :
6501 : 7144858 : if (false_code == EQ)
6502 : : {
6503 : 2659740 : swapped = true, true_code = EQ, false_code = NE;
6504 : 2659740 : std::swap (true_rtx, false_rtx);
6505 : : }
6506 : :
6507 : 7144858 : scalar_int_mode from_mode;
6508 : 7144858 : if (is_a <scalar_int_mode> (GET_MODE (from), &from_mode))
6509 : : {
6510 : : /* If we are comparing against zero and the expression being
6511 : : tested has only a single bit that might be nonzero, that is
6512 : : its value when it is not equal to zero. Similarly if it is
6513 : : known to be -1 or 0. */
6514 : 6000685 : if (true_code == EQ
6515 : 4469981 : && true_val == const0_rtx
6516 : 7865226 : && pow2p_hwi (nzb = nonzero_bits (from, from_mode)))
6517 : : {
6518 : 197644 : false_code = EQ;
6519 : 197644 : false_val = gen_int_mode (nzb, from_mode);
6520 : : }
6521 : 5803041 : else if (true_code == EQ
6522 : 4272337 : && true_val == const0_rtx
6523 : 7469938 : && (num_sign_bit_copies (from, from_mode)
6524 : 1666897 : == GET_MODE_PRECISION (from_mode)))
6525 : : {
6526 : 809 : false_code = EQ;
6527 : 809 : false_val = constm1_rtx;
6528 : : }
6529 : : }
6530 : :
6531 : : /* Now simplify an arm if we know the value of the register in the
6532 : : branch and it is used in the arm. Be careful due to the potential
6533 : : of locally-shared RTL. */
6534 : :
6535 : 7144858 : if (reg_mentioned_p (from, true_rtx))
6536 : 274792 : true_rtx = subst (known_cond (copy_rtx (true_rtx), true_code,
6537 : : from, true_val),
6538 : : pc_rtx, pc_rtx, false, false, false);
6539 : 7144858 : if (reg_mentioned_p (from, false_rtx))
6540 : 87921 : false_rtx = subst (known_cond (copy_rtx (false_rtx), false_code,
6541 : : from, false_val),
6542 : : pc_rtx, pc_rtx, false, false, false);
6543 : :
6544 : 11629976 : SUBST (XEXP (x, 1), swapped ? false_rtx : true_rtx);
6545 : 11629976 : SUBST (XEXP (x, 2), swapped ? true_rtx : false_rtx);
6546 : :
6547 : 7144858 : true_rtx = XEXP (x, 1);
6548 : 7144858 : false_rtx = XEXP (x, 2);
6549 : 7144858 : true_code = GET_CODE (cond);
6550 : : }
6551 : :
6552 : : /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
6553 : : reversed, do so to avoid needing two sets of patterns for
6554 : : subtract-and-branch insns. Similarly if we have a constant in the true
6555 : : arm, the false arm is the same as the first operand of the comparison, or
6556 : : the false arm is more complicated than the true arm. */
6557 : :
6558 : 11185368 : if (comparison_p
6559 : 11184923 : && reversed_comparison_code (cond, NULL) != UNKNOWN
6560 : 22232427 : && (true_rtx == pc_rtx
6561 : 11047059 : || (CONSTANT_P (true_rtx)
6562 : 9268462 : && !CONST_INT_P (false_rtx) && false_rtx != pc_rtx)
6563 : 11015103 : || true_rtx == const0_rtx
6564 : 11014707 : || (OBJECT_P (true_rtx) && !OBJECT_P (false_rtx))
6565 : 10982069 : || (GET_CODE (true_rtx) == SUBREG && OBJECT_P (SUBREG_REG (true_rtx))
6566 : 14064 : && !OBJECT_P (false_rtx))
6567 : 10977184 : || reg_mentioned_p (true_rtx, false_rtx)
6568 : 10977091 : || rtx_equal_p (false_rtx, XEXP (cond, 0))))
6569 : : {
6570 : 97274 : SUBST (XEXP (x, 0), reversed_comparison (cond, GET_MODE (cond)));
6571 : 97274 : SUBST (XEXP (x, 1), false_rtx);
6572 : 97274 : SUBST (XEXP (x, 2), true_rtx);
6573 : :
6574 : 97274 : std::swap (true_rtx, false_rtx);
6575 : 97274 : cond = XEXP (x, 0);
6576 : :
6577 : : /* It is possible that the conditional has been simplified out. */
6578 : 97274 : true_code = GET_CODE (cond);
6579 : 97274 : comparison_p = COMPARISON_P (cond);
6580 : : }
6581 : :
6582 : : /* If the two arms are identical, we don't need the comparison. */
6583 : :
6584 : 11185368 : if (rtx_equal_p (true_rtx, false_rtx) && ! side_effects_p (cond))
6585 : : return true_rtx;
6586 : :
6587 : : /* Convert a == b ? b : a to "a". */
6588 : 3264504 : if (true_code == EQ && ! side_effects_p (cond)
6589 : 3254385 : && !HONOR_NANS (mode)
6590 : 3222907 : && rtx_equal_p (XEXP (cond, 0), false_rtx)
6591 : 11185723 : && rtx_equal_p (XEXP (cond, 1), true_rtx))
6592 : : return false_rtx;
6593 : 4145601 : else if (true_code == NE && ! side_effects_p (cond)
6594 : 4107860 : && !HONOR_NANS (mode)
6595 : 4103838 : && rtx_equal_p (XEXP (cond, 0), true_rtx)
6596 : 11227470 : && rtx_equal_p (XEXP (cond, 1), false_rtx))
6597 : : return true_rtx;
6598 : :
6599 : : /* Look for cases where we have (abs x) or (neg (abs X)). */
6600 : :
6601 : 11185356 : if (GET_MODE_CLASS (mode) == MODE_INT
6602 : 1708654 : && comparison_p
6603 : 1708635 : && XEXP (cond, 1) == const0_rtx
6604 : 1321317 : && GET_CODE (false_rtx) == NEG
6605 : 18 : && rtx_equal_p (true_rtx, XEXP (false_rtx, 0))
6606 : 0 : && rtx_equal_p (true_rtx, XEXP (cond, 0))
6607 : 11185356 : && ! side_effects_p (true_rtx))
6608 : 0 : switch (true_code)
6609 : : {
6610 : 0 : case GT:
6611 : 0 : case GE:
6612 : 0 : return simplify_gen_unary (ABS, mode, true_rtx, mode);
6613 : 0 : case LT:
6614 : 0 : case LE:
6615 : 0 : return
6616 : 0 : simplify_gen_unary (NEG, mode,
6617 : : simplify_gen_unary (ABS, mode, true_rtx, mode),
6618 : 0 : mode);
6619 : : default:
6620 : : break;
6621 : : }
6622 : :
6623 : : /* Look for MIN or MAX. */
6624 : :
6625 : 11185356 : if ((! FLOAT_MODE_P (mode)
6626 : 41184 : || (flag_unsafe_math_optimizations
6627 : 305 : && !HONOR_NANS (mode)
6628 : 305 : && !HONOR_SIGNED_ZEROS (mode)))
6629 : 11144477 : && comparison_p
6630 : 11144141 : && rtx_equal_p (XEXP (cond, 0), true_rtx)
6631 : 83011 : && rtx_equal_p (XEXP (cond, 1), false_rtx)
6632 : 10285 : && ! side_effects_p (cond))
6633 : 10285 : switch (true_code)
6634 : : {
6635 : 3432 : case GE:
6636 : 3432 : case GT:
6637 : 3432 : return simplify_gen_binary (SMAX, mode, true_rtx, false_rtx);
6638 : 3493 : case LE:
6639 : 3493 : case LT:
6640 : 3493 : return simplify_gen_binary (SMIN, mode, true_rtx, false_rtx);
6641 : 2523 : case GEU:
6642 : 2523 : case GTU:
6643 : 2523 : return simplify_gen_binary (UMAX, mode, true_rtx, false_rtx);
6644 : 837 : case LEU:
6645 : 837 : case LTU:
6646 : 837 : return simplify_gen_binary (UMIN, mode, true_rtx, false_rtx);
6647 : : default:
6648 : : break;
6649 : : }
6650 : :
6651 : : /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
6652 : : second operand is zero, this can be done as (OP Z (mult COND C2)) where
6653 : : C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
6654 : : SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
6655 : : We can do this kind of thing in some cases when STORE_FLAG_VALUE is
6656 : : neither 1 or -1, but it isn't worth checking for. */
6657 : :
6658 : 11175071 : if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6659 : : && comparison_p
6660 : 12785816 : && is_int_mode (mode, &int_mode)
6661 : 12873441 : && ! side_effects_p (x))
6662 : : {
6663 : 1695412 : rtx t = make_compound_operation (true_rtx, SET);
6664 : 1695412 : rtx f = make_compound_operation (false_rtx, SET);
6665 : 1695412 : rtx cond_op0 = XEXP (cond, 0);
6666 : 1695412 : rtx cond_op1 = XEXP (cond, 1);
6667 : 1695412 : enum rtx_code op = UNKNOWN, extend_op = UNKNOWN;
6668 : 1695412 : scalar_int_mode m = int_mode;
6669 : 1695412 : rtx z = 0, c1 = NULL_RTX;
6670 : :
6671 : 1695412 : if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
6672 : : || GET_CODE (t) == IOR || GET_CODE (t) == XOR
6673 : : || GET_CODE (t) == ASHIFT
6674 : : || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
6675 : 183875 : && rtx_equal_p (XEXP (t, 0), f))
6676 : 80458 : c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
6677 : :
6678 : : /* If an identity-zero op is commutative, check whether there
6679 : : would be a match if we swapped the operands. */
6680 : 1559992 : else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
6681 : 1547626 : || GET_CODE (t) == XOR)
6682 : 1628616 : && rtx_equal_p (XEXP (t, 1), f))
6683 : 7167 : c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
6684 : 1607787 : else if (GET_CODE (t) == SIGN_EXTEND
6685 : 1557 : && is_a <scalar_int_mode> (GET_MODE (XEXP (t, 0)), &inner_mode)
6686 : 1557 : && (GET_CODE (XEXP (t, 0)) == PLUS
6687 : 1557 : || GET_CODE (XEXP (t, 0)) == MINUS
6688 : : || GET_CODE (XEXP (t, 0)) == IOR
6689 : : || GET_CODE (XEXP (t, 0)) == XOR
6690 : : || GET_CODE (XEXP (t, 0)) == ASHIFT
6691 : : || GET_CODE (XEXP (t, 0)) == LSHIFTRT
6692 : : || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
6693 : 108 : && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
6694 : 45 : && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
6695 : 45 : && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
6696 : 1607787 : && (num_sign_bit_copies (f, GET_MODE (f))
6697 : 0 : > (unsigned int)
6698 : 0 : (GET_MODE_PRECISION (int_mode)
6699 : 0 : - GET_MODE_PRECISION (inner_mode))))
6700 : : {
6701 : 0 : c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
6702 : 0 : extend_op = SIGN_EXTEND;
6703 : 0 : m = inner_mode;
6704 : : }
6705 : 1607787 : else if (GET_CODE (t) == SIGN_EXTEND
6706 : 1557 : && is_a <scalar_int_mode> (GET_MODE (XEXP (t, 0)), &inner_mode)
6707 : 1557 : && (GET_CODE (XEXP (t, 0)) == PLUS
6708 : 1467 : || GET_CODE (XEXP (t, 0)) == IOR
6709 : 1464 : || GET_CODE (XEXP (t, 0)) == XOR)
6710 : 93 : && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
6711 : 2 : && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
6712 : 2 : && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
6713 : 1607789 : && (num_sign_bit_copies (f, GET_MODE (f))
6714 : 2 : > (unsigned int)
6715 : 2 : (GET_MODE_PRECISION (int_mode)
6716 : 2 : - GET_MODE_PRECISION (inner_mode))))
6717 : : {
6718 : 0 : c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
6719 : 0 : extend_op = SIGN_EXTEND;
6720 : 0 : m = inner_mode;
6721 : : }
6722 : 1607787 : else if (GET_CODE (t) == ZERO_EXTEND
6723 : 2508 : && is_a <scalar_int_mode> (GET_MODE (XEXP (t, 0)), &inner_mode)
6724 : 2508 : && (GET_CODE (XEXP (t, 0)) == PLUS
6725 : 2508 : || GET_CODE (XEXP (t, 0)) == MINUS
6726 : : || GET_CODE (XEXP (t, 0)) == IOR
6727 : : || GET_CODE (XEXP (t, 0)) == XOR
6728 : : || GET_CODE (XEXP (t, 0)) == ASHIFT
6729 : : || GET_CODE (XEXP (t, 0)) == LSHIFTRT
6730 : : || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
6731 : 539 : && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
6732 : 63 : && HWI_COMPUTABLE_MODE_P (int_mode)
6733 : 63 : && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
6734 : 63 : && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
6735 : 1607787 : && ((nonzero_bits (f, GET_MODE (f))
6736 : 0 : & ~GET_MODE_MASK (inner_mode))
6737 : : == 0))
6738 : : {
6739 : 0 : c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
6740 : 0 : extend_op = ZERO_EXTEND;
6741 : 0 : m = inner_mode;
6742 : : }
6743 : 1607787 : else if (GET_CODE (t) == ZERO_EXTEND
6744 : 2508 : && is_a <scalar_int_mode> (GET_MODE (XEXP (t, 0)), &inner_mode)
6745 : 2508 : && (GET_CODE (XEXP (t, 0)) == PLUS
6746 : 2284 : || GET_CODE (XEXP (t, 0)) == IOR
6747 : 2283 : || GET_CODE (XEXP (t, 0)) == XOR)
6748 : 225 : && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
6749 : 12 : && HWI_COMPUTABLE_MODE_P (int_mode)
6750 : 12 : && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
6751 : 12 : && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
6752 : 1607787 : && ((nonzero_bits (f, GET_MODE (f))
6753 : 0 : & ~GET_MODE_MASK (inner_mode))
6754 : : == 0))
6755 : : {
6756 : 0 : c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
6757 : 0 : extend_op = ZERO_EXTEND;
6758 : 0 : m = inner_mode;
6759 : : }
6760 : :
6761 : 87625 : if (z)
6762 : : {
6763 : 87625 : machine_mode cm = m;
6764 : 87625 : if ((op == ASHIFT || op == LSHIFTRT || op == ASHIFTRT)
6765 : 377 : && GET_MODE (c1) != VOIDmode)
6766 : 75 : cm = GET_MODE (c1);
6767 : 87625 : temp = subst (simplify_gen_relational (true_code, cm, VOIDmode,
6768 : : cond_op0, cond_op1),
6769 : : pc_rtx, pc_rtx, false, false, false);
6770 : 87625 : temp = simplify_gen_binary (MULT, cm, temp,
6771 : : simplify_gen_binary (MULT, cm, c1,
6772 : : const_true_rtx));
6773 : 87625 : temp = subst (temp, pc_rtx, pc_rtx, false, false, false);
6774 : 87625 : temp = simplify_gen_binary (op, m, gen_lowpart (m, z), temp);
6775 : :
6776 : 87625 : if (extend_op != UNKNOWN)
6777 : 0 : temp = simplify_gen_unary (extend_op, int_mode, temp, m);
6778 : :
6779 : 87625 : return temp;
6780 : : }
6781 : : }
6782 : :
6783 : : /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
6784 : : 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
6785 : : negation of a single bit, we can convert this operation to a shift. We
6786 : : can actually do this more generally, but it doesn't seem worth it. */
6787 : :
6788 : 11087446 : if (true_code == NE
6789 : 11087446 : && is_a <scalar_int_mode> (mode, &int_mode)
6790 : 341034 : && XEXP (cond, 1) == const0_rtx
6791 : 228088 : && false_rtx == const0_rtx
6792 : 24216 : && CONST_INT_P (true_rtx)
6793 : 11087828 : && ((nonzero_bits (XEXP (cond, 0), int_mode) == 1
6794 : 0 : && (i = exact_log2 (UINTVAL (true_rtx))) >= 0)
6795 : 382 : || ((num_sign_bit_copies (XEXP (cond, 0), int_mode)
6796 : 382 : == GET_MODE_PRECISION (int_mode))
6797 : 0 : && (i = exact_log2 (-UINTVAL (true_rtx))) >= 0)))
6798 : 0 : return
6799 : 0 : simplify_shift_const (NULL_RTX, ASHIFT, int_mode,
6800 : 0 : gen_lowpart (int_mode, XEXP (cond, 0)), i);
6801 : :
6802 : : /* (IF_THEN_ELSE (NE A 0) C1 0) is A or a zero-extend of A if the only
6803 : : non-zero bit in A is C1. */
6804 : 4113075 : if (true_code == NE && XEXP (cond, 1) == const0_rtx
6805 : 1847167 : && false_rtx == const0_rtx && CONST_INT_P (true_rtx)
6806 : 11185750 : && is_a <scalar_int_mode> (mode, &int_mode)
6807 : 382 : && is_a <scalar_int_mode> (GET_MODE (XEXP (cond, 0)), &inner_mode)
6808 : 30 : && (UINTVAL (true_rtx) & GET_MODE_MASK (int_mode))
6809 : 30 : == nonzero_bits (XEXP (cond, 0), inner_mode)
6810 : 11087448 : && (i = exact_log2 (UINTVAL (true_rtx) & GET_MODE_MASK (int_mode))) >= 0)
6811 : : {
6812 : 0 : rtx val = XEXP (cond, 0);
6813 : 0 : if (inner_mode == int_mode)
6814 : : return val;
6815 : 0 : else if (GET_MODE_PRECISION (inner_mode) < GET_MODE_PRECISION (int_mode))
6816 : 0 : return simplify_gen_unary (ZERO_EXTEND, int_mode, val, inner_mode);
6817 : : }
6818 : :
6819 : : return x;
6820 : : }
6821 : :
6822 : : /* Simplify X, a SET expression. Return the new expression. */
6823 : :
6824 : : static rtx
6825 : 43315500 : simplify_set (rtx x)
6826 : : {
6827 : 43315500 : rtx src = SET_SRC (x);
6828 : 43315500 : rtx dest = SET_DEST (x);
6829 : 86631000 : machine_mode mode
6830 : 43315500 : = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
6831 : 43315500 : rtx_insn *other_insn;
6832 : 43315500 : rtx *cc_use;
6833 : 43315500 : scalar_int_mode int_mode;
6834 : :
6835 : : /* (set (pc) (return)) gets written as (return). */
6836 : 43315500 : if (GET_CODE (dest) == PC && ANY_RETURN_P (src))
6837 : : return src;
6838 : :
6839 : : /* Now that we know for sure which bits of SRC we are using, see if we can
6840 : : simplify the expression for the object knowing that we only need the
6841 : : low-order bits. */
6842 : :
6843 : 43315500 : if (GET_MODE_CLASS (mode) == MODE_INT && HWI_COMPUTABLE_MODE_P (mode))
6844 : : {
6845 : 19765295 : src = force_to_mode (src, mode, HOST_WIDE_INT_M1U, false);
6846 : 19765295 : SUBST (SET_SRC (x), src);
6847 : : }
6848 : :
6849 : : /* If the source is a COMPARE, look for the use of the comparison result
6850 : : and try to simplify it unless we already have used undobuf.other_insn. */
6851 : 37334539 : if ((GET_MODE_CLASS (mode) == MODE_CC || GET_CODE (src) == COMPARE)
6852 : 5980961 : && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
6853 : 5524546 : && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
6854 : 5524546 : && COMPARISON_P (*cc_use)
6855 : 48839561 : && rtx_equal_p (XEXP (*cc_use, 0), dest))
6856 : : {
6857 : 5522559 : enum rtx_code old_code = GET_CODE (*cc_use);
6858 : 5522559 : enum rtx_code new_code;
6859 : 5522559 : rtx op0, op1, tmp;
6860 : 5522559 : bool other_changed = false;
6861 : 5522559 : rtx inner_compare = NULL_RTX;
6862 : 5522559 : machine_mode compare_mode = GET_MODE (dest);
6863 : :
6864 : 5522559 : if (GET_CODE (src) == COMPARE)
6865 : : {
6866 : 5186082 : op0 = XEXP (src, 0), op1 = XEXP (src, 1);
6867 : 5186082 : if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
6868 : : {
6869 : 0 : inner_compare = op0;
6870 : 0 : op0 = XEXP (inner_compare, 0), op1 = XEXP (inner_compare, 1);
6871 : : }
6872 : : }
6873 : : else
6874 : 336477 : op0 = src, op1 = CONST0_RTX (GET_MODE (src));
6875 : :
6876 : 5522559 : tmp = simplify_relational_operation (old_code, compare_mode, VOIDmode,
6877 : : op0, op1);
6878 : 5522559 : if (!tmp)
6879 : : new_code = old_code;
6880 : 427680 : else if (!CONSTANT_P (tmp))
6881 : : {
6882 : 422929 : new_code = GET_CODE (tmp);
6883 : 422929 : op0 = XEXP (tmp, 0);
6884 : 422929 : op1 = XEXP (tmp, 1);
6885 : : }
6886 : : else
6887 : : {
6888 : 4751 : rtx pat = PATTERN (other_insn);
6889 : 4751 : undobuf.other_insn = other_insn;
6890 : 4751 : SUBST (*cc_use, tmp);
6891 : :
6892 : : /* Attempt to simplify CC user. */
6893 : 4751 : if (GET_CODE (pat) == SET)
6894 : : {
6895 : 4242 : rtx new_rtx = simplify_rtx (SET_SRC (pat));
6896 : 4242 : if (new_rtx != NULL_RTX)
6897 : 3881 : SUBST (SET_SRC (pat), new_rtx);
6898 : : }
6899 : :
6900 : : /* Convert X into a no-op move. */
6901 : 4751 : SUBST (SET_DEST (x), pc_rtx);
6902 : 4751 : SUBST (SET_SRC (x), pc_rtx);
6903 : 4751 : return x;
6904 : : }
6905 : :
6906 : : /* Simplify our comparison, if possible. */
6907 : 5517808 : new_code = simplify_comparison (new_code, &op0, &op1);
6908 : :
6909 : : #ifdef SELECT_CC_MODE
6910 : : /* If this machine has CC modes other than CCmode, check to see if we
6911 : : need to use a different CC mode here. */
6912 : 5517808 : if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
6913 : 525811 : compare_mode = GET_MODE (op0);
6914 : 4991997 : else if (inner_compare
6915 : 0 : && GET_MODE_CLASS (GET_MODE (inner_compare)) == MODE_CC
6916 : 0 : && new_code == old_code
6917 : 0 : && op0 == XEXP (inner_compare, 0)
6918 : 0 : && op1 == XEXP (inner_compare, 1))
6919 : 0 : compare_mode = GET_MODE (inner_compare);
6920 : : else
6921 : 4991997 : compare_mode = SELECT_CC_MODE (new_code, op0, op1);
6922 : :
6923 : : /* If the mode changed, we have to change SET_DEST, the mode in the
6924 : : compare, and the mode in the place SET_DEST is used. If SET_DEST is
6925 : : a hard register, just build new versions with the proper mode. If it
6926 : : is a pseudo, we lose unless it is only time we set the pseudo, in
6927 : : which case we can safely change its mode. */
6928 : 5517808 : if (compare_mode != GET_MODE (dest))
6929 : : {
6930 : 218420 : if (can_change_dest_mode (dest, 0, compare_mode))
6931 : : {
6932 : 218420 : unsigned int regno = REGNO (dest);
6933 : 218420 : rtx new_dest;
6934 : :
6935 : 218420 : if (regno < FIRST_PSEUDO_REGISTER)
6936 : 218420 : new_dest = gen_rtx_REG (compare_mode, regno);
6937 : : else
6938 : : {
6939 : 0 : subst_mode (regno, compare_mode);
6940 : 0 : new_dest = regno_reg_rtx[regno];
6941 : : }
6942 : :
6943 : 218420 : SUBST (SET_DEST (x), new_dest);
6944 : 218420 : SUBST (XEXP (*cc_use, 0), new_dest);
6945 : 218420 : other_changed = true;
6946 : :
6947 : 218420 : dest = new_dest;
6948 : : }
6949 : : }
6950 : : #endif /* SELECT_CC_MODE */
6951 : :
6952 : : /* If the code changed, we have to build a new comparison in
6953 : : undobuf.other_insn. */
6954 : 5517808 : if (new_code != old_code)
6955 : : {
6956 : 578656 : bool other_changed_previously = other_changed;
6957 : 578656 : unsigned HOST_WIDE_INT mask;
6958 : 578656 : rtx old_cc_use = *cc_use;
6959 : :
6960 : 578656 : SUBST (*cc_use, gen_rtx_fmt_ee (new_code, GET_MODE (*cc_use),
6961 : : dest, const0_rtx));
6962 : 578656 : other_changed = true;
6963 : :
6964 : : /* If the only change we made was to change an EQ into an NE or
6965 : : vice versa, OP0 has only one bit that might be nonzero, and OP1
6966 : : is zero, check if changing the user of the condition code will
6967 : : produce a valid insn. If it won't, we can keep the original code
6968 : : in that insn by surrounding our operation with an XOR. */
6969 : :
6970 : 578656 : if (((old_code == NE && new_code == EQ)
6971 : 548541 : || (old_code == EQ && new_code == NE))
6972 : 64344 : && ! other_changed_previously && op1 == const0_rtx
6973 : 61979 : && HWI_COMPUTABLE_MODE_P (GET_MODE (op0))
6974 : 587032 : && pow2p_hwi (mask = nonzero_bits (op0, GET_MODE (op0))))
6975 : : {
6976 : 8365 : rtx pat = PATTERN (other_insn), note = 0;
6977 : :
6978 : 8365 : if ((recog_for_combine (&pat, other_insn, ¬e) < 0
6979 : 8365 : && ! check_asm_operands (pat)))
6980 : : {
6981 : 4 : *cc_use = old_cc_use;
6982 : 4 : other_changed = false;
6983 : :
6984 : 4 : op0 = simplify_gen_binary (XOR, GET_MODE (op0), op0,
6985 : 4 : gen_int_mode (mask,
6986 : 4 : GET_MODE (op0)));
6987 : : }
6988 : : }
6989 : : }
6990 : :
6991 : 4947517 : if (other_changed)
6992 : 600253 : undobuf.other_insn = other_insn;
6993 : :
6994 : : /* Don't generate a compare of a CC with 0, just use that CC. */
6995 : 5517808 : if (GET_MODE (op0) == compare_mode && op1 == const0_rtx)
6996 : : {
6997 : 525811 : SUBST (SET_SRC (x), op0);
6998 : 525811 : src = SET_SRC (x);
6999 : : }
7000 : : /* Otherwise, if we didn't previously have the same COMPARE we
7001 : : want, create it from scratch. */
7002 : 4991997 : else if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode
7003 : 4866676 : || XEXP (src, 0) != op0 || XEXP (src, 1) != op1)
7004 : : {
7005 : 1306159 : SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
7006 : 1306159 : src = SET_SRC (x);
7007 : : }
7008 : : }
7009 : : else
7010 : : {
7011 : : /* Get SET_SRC in a form where we have placed back any
7012 : : compound expressions. Then do the checks below. */
7013 : 37792941 : src = make_compound_operation (src, SET);
7014 : 37792941 : SUBST (SET_SRC (x), src);
7015 : : }
7016 : :
7017 : : /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
7018 : : and X being a REG or (subreg (reg)), we may be able to convert this to
7019 : : (set (subreg:m2 x) (op)).
7020 : :
7021 : : We can always do this if M1 is narrower than M2 because that means that
7022 : : we only care about the low bits of the result.
7023 : :
7024 : : However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
7025 : : perform a narrower operation than requested since the high-order bits will
7026 : : be undefined. On machine where it is defined, this transformation is safe
7027 : : as long as M1 and M2 have the same number of words. */
7028 : :
7029 : 405823 : if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
7030 : 386153 : && !OBJECT_P (SUBREG_REG (src))
7031 : : && (known_equal_after_align_up
7032 : 246158 : (GET_MODE_SIZE (GET_MODE (src)),
7033 : 492316 : GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))),
7034 : 246158 : UNITS_PER_WORD))
7035 : 216422 : && (WORD_REGISTER_OPERATIONS || !paradoxical_subreg_p (src))
7036 : 211618 : && ! (REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER
7037 : 141 : && !REG_CAN_CHANGE_MODE_P (REGNO (dest),
7038 : : GET_MODE (SUBREG_REG (src)),
7039 : : GET_MODE (src)))
7040 : 43522226 : && (REG_P (dest)
7041 : 98547 : || (GET_CODE (dest) == SUBREG
7042 : 261 : && REG_P (SUBREG_REG (dest)))))
7043 : : {
7044 : 113191 : SUBST (SET_DEST (x),
7045 : : gen_lowpart (GET_MODE (SUBREG_REG (src)),
7046 : : dest));
7047 : 113191 : SUBST (SET_SRC (x), SUBREG_REG (src));
7048 : :
7049 : 113191 : src = SET_SRC (x), dest = SET_DEST (x);
7050 : : }
7051 : :
7052 : : /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
7053 : : would require a paradoxical subreg. Replace the subreg with a
7054 : : zero_extend to avoid the reload that would otherwise be required.
7055 : : Don't do this unless we have a scalar integer mode, otherwise the
7056 : : transformation is incorrect. */
7057 : :
7058 : 43310749 : enum rtx_code extend_op;
7059 : 43310749 : if (paradoxical_subreg_p (src)
7060 : : && MEM_P (SUBREG_REG (src))
7061 : : && SCALAR_INT_MODE_P (GET_MODE (src))
7062 : : && (extend_op = load_extend_op (GET_MODE (SUBREG_REG (src)))) != UNKNOWN)
7063 : : {
7064 : : SUBST (SET_SRC (x),
7065 : : gen_rtx_fmt_e (extend_op, GET_MODE (src), SUBREG_REG (src)));
7066 : :
7067 : : src = SET_SRC (x);
7068 : : }
7069 : :
7070 : : /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
7071 : : are comparing an item known to be 0 or -1 against 0, use a logical
7072 : : operation instead. Check for one of the arms being an IOR of the other
7073 : : arm with some value. We compute three terms to be IOR'ed together. In
7074 : : practice, at most two will be nonzero. Then we do the IOR's. */
7075 : :
7076 : 43310749 : if (GET_CODE (dest) != PC
7077 : 33967614 : && GET_CODE (src) == IF_THEN_ELSE
7078 : 1006598 : && is_int_mode (GET_MODE (src), &int_mode)
7079 : 932483 : && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
7080 : 420347 : && XEXP (XEXP (src, 0), 1) == const0_rtx
7081 : 271493 : && int_mode == GET_MODE (XEXP (XEXP (src, 0), 0))
7082 : 80335 : && (!HAVE_conditional_move
7083 : 80335 : || ! can_conditionally_move_p (int_mode))
7084 : 0 : && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0), int_mode)
7085 : 0 : == GET_MODE_PRECISION (int_mode))
7086 : 43310749 : && ! side_effects_p (src))
7087 : : {
7088 : 0 : rtx true_rtx = (GET_CODE (XEXP (src, 0)) == NE
7089 : 0 : ? XEXP (src, 1) : XEXP (src, 2));
7090 : 0 : rtx false_rtx = (GET_CODE (XEXP (src, 0)) == NE
7091 : 0 : ? XEXP (src, 2) : XEXP (src, 1));
7092 : 0 : rtx term1 = const0_rtx, term2, term3;
7093 : :
7094 : 0 : if (GET_CODE (true_rtx) == IOR
7095 : 0 : && rtx_equal_p (XEXP (true_rtx, 0), false_rtx))
7096 : 0 : term1 = false_rtx, true_rtx = XEXP (true_rtx, 1), false_rtx = const0_rtx;
7097 : 0 : else if (GET_CODE (true_rtx) == IOR
7098 : 0 : && rtx_equal_p (XEXP (true_rtx, 1), false_rtx))
7099 : 0 : term1 = false_rtx, true_rtx = XEXP (true_rtx, 0), false_rtx = const0_rtx;
7100 : 0 : else if (GET_CODE (false_rtx) == IOR
7101 : 0 : && rtx_equal_p (XEXP (false_rtx, 0), true_rtx))
7102 : 0 : term1 = true_rtx, false_rtx = XEXP (false_rtx, 1), true_rtx = const0_rtx;
7103 : 0 : else if (GET_CODE (false_rtx) == IOR
7104 : 0 : && rtx_equal_p (XEXP (false_rtx, 1), true_rtx))
7105 : 0 : term1 = true_rtx, false_rtx = XEXP (false_rtx, 0), true_rtx = const0_rtx;
7106 : :
7107 : 0 : term2 = simplify_gen_binary (AND, int_mode,
7108 : 0 : XEXP (XEXP (src, 0), 0), true_rtx);
7109 : 0 : term3 = simplify_gen_binary (AND, int_mode,
7110 : : simplify_gen_unary (NOT, int_mode,
7111 : 0 : XEXP (XEXP (src, 0), 0),
7112 : : int_mode),
7113 : : false_rtx);
7114 : :
7115 : 0 : SUBST (SET_SRC (x),
7116 : : simplify_gen_binary (IOR, int_mode,
7117 : : simplify_gen_binary (IOR, int_mode,
7118 : : term1, term2),
7119 : : term3));
7120 : :
7121 : 0 : src = SET_SRC (x);
7122 : : }
7123 : :
7124 : : /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
7125 : : whole thing fail. */
7126 : 43310749 : if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
7127 : : return src;
7128 : 43310737 : else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
7129 : : return dest;
7130 : : else
7131 : : /* Convert this into a field assignment operation, if possible. */
7132 : 43310737 : return make_field_assignment (x);
7133 : : }
7134 : :
7135 : : /* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
7136 : : result. */
7137 : :
7138 : : static rtx
7139 : 10505841 : simplify_logical (rtx x)
7140 : : {
7141 : 10505841 : rtx op0 = XEXP (x, 0);
7142 : 10505841 : rtx op1 = XEXP (x, 1);
7143 : 10505841 : scalar_int_mode mode;
7144 : :
7145 : 10505841 : switch (GET_CODE (x))
7146 : : {
7147 : 6397057 : case AND:
7148 : : /* We can call simplify_and_const_int only if we don't lose
7149 : : any (sign) bits when converting INTVAL (op1) to
7150 : : "unsigned HOST_WIDE_INT". */
7151 : 6397057 : if (is_a <scalar_int_mode> (GET_MODE (x), &mode)
7152 : 5973352 : && CONST_INT_P (op1)
7153 : 4706467 : && (HWI_COMPUTABLE_MODE_P (mode)
7154 : 7631 : || INTVAL (op1) > 0))
7155 : : {
7156 : 4705388 : x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
7157 : 4705388 : if (GET_CODE (x) != AND)
7158 : : return x;
7159 : :
7160 : 4670645 : op0 = XEXP (x, 0);
7161 : 4670645 : op1 = XEXP (x, 1);
7162 : : }
7163 : :
7164 : : /* If we have any of (and (ior A B) C) or (and (xor A B) C),
7165 : : apply the distributive law and then the inverse distributive
7166 : : law to see if things simplify. */
7167 : 6362314 : if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
7168 : : {
7169 : 111904 : rtx result = distribute_and_simplify_rtx (x, 0);
7170 : 111904 : if (result)
7171 : : return result;
7172 : : }
7173 : 6349162 : if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
7174 : : {
7175 : 1734 : rtx result = distribute_and_simplify_rtx (x, 1);
7176 : 1734 : if (result)
7177 : : return result;
7178 : : }
7179 : : break;
7180 : :
7181 : 4108784 : case IOR:
7182 : : /* If we have (ior (and A B) C), apply the distributive law and then
7183 : : the inverse distributive law to see if things simplify. */
7184 : :
7185 : 4108784 : if (GET_CODE (op0) == AND)
7186 : : {
7187 : 1162812 : rtx result = distribute_and_simplify_rtx (x, 0);
7188 : 1162812 : if (result)
7189 : : return result;
7190 : : }
7191 : :
7192 : 4108782 : if (GET_CODE (op1) == AND)
7193 : : {
7194 : 48384 : rtx result = distribute_and_simplify_rtx (x, 1);
7195 : 48384 : if (result)
7196 : : return result;
7197 : : }
7198 : : break;
7199 : :
7200 : 0 : default:
7201 : 0 : gcc_unreachable ();
7202 : : }
7203 : :
7204 : : return x;
7205 : : }
7206 : :
7207 : : /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
7208 : : operations" because they can be replaced with two more basic operations.
7209 : : ZERO_EXTEND is also considered "compound" because it can be replaced with
7210 : : an AND operation, which is simpler, though only one operation.
7211 : :
7212 : : The function expand_compound_operation is called with an rtx expression
7213 : : and will convert it to the appropriate shifts and AND operations,
7214 : : simplifying at each stage.
7215 : :
7216 : : The function make_compound_operation is called to convert an expression
7217 : : consisting of shifts and ANDs into the equivalent compound expression.
7218 : : It is the inverse of this function, loosely speaking. */
7219 : :
7220 : : static rtx
7221 : 15700931 : expand_compound_operation (rtx x)
7222 : : {
7223 : 15700931 : unsigned HOST_WIDE_INT pos = 0, len;
7224 : 15700931 : bool unsignedp = false;
7225 : 15700931 : unsigned int modewidth;
7226 : 15700931 : rtx tem;
7227 : 15700931 : scalar_int_mode inner_mode;
7228 : :
7229 : 15700931 : switch (GET_CODE (x))
7230 : : {
7231 : 4446392 : case ZERO_EXTEND:
7232 : 4446392 : unsignedp = true;
7233 : : /* FALLTHRU */
7234 : 5870762 : case SIGN_EXTEND:
7235 : : /* We can't necessarily use a const_int for a multiword mode;
7236 : : it depends on implicitly extending the value.
7237 : : Since we don't know the right way to extend it,
7238 : : we can't tell whether the implicit way is right.
7239 : :
7240 : : Even for a mode that is no wider than a const_int,
7241 : : we can't win, because we need to sign extend one of its bits through
7242 : : the rest of it, and we don't know which bit. */
7243 : 5870762 : if (CONST_INT_P (XEXP (x, 0)))
7244 : : return x;
7245 : :
7246 : : /* Reject modes that aren't scalar integers because turning vector
7247 : : or complex modes into shifts causes problems. */
7248 : 5870762 : if (!is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &inner_mode))
7249 : : return x;
7250 : :
7251 : : /* Return if (subreg:MODE FROM 0) is not a safe replacement for
7252 : : (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
7253 : : because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
7254 : : reloaded. If not for that, MEM's would very rarely be safe.
7255 : :
7256 : : Reject modes bigger than a word, because we might not be able
7257 : : to reference a two-register group starting with an arbitrary register
7258 : : (and currently gen_lowpart might crash for a SUBREG). */
7259 : :
7260 : 11875441 : if (GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD)
7261 : : return x;
7262 : :
7263 : 5545637 : len = GET_MODE_PRECISION (inner_mode);
7264 : : /* If the inner object has VOIDmode (the only way this can happen
7265 : : is if it is an ASM_OPERANDS), we can't do anything since we don't
7266 : : know how much masking to do. */
7267 : 5545637 : if (len == 0)
7268 : : return x;
7269 : :
7270 : : break;
7271 : :
7272 : 829239 : case ZERO_EXTRACT:
7273 : 829239 : unsignedp = true;
7274 : :
7275 : : /* fall through */
7276 : :
7277 : 851919 : case SIGN_EXTRACT:
7278 : : /* If the operand is a CLOBBER, just return it. */
7279 : 851919 : if (GET_CODE (XEXP (x, 0)) == CLOBBER)
7280 : : return XEXP (x, 0);
7281 : :
7282 : 851919 : if (!CONST_INT_P (XEXP (x, 1))
7283 : 851788 : || !CONST_INT_P (XEXP (x, 2)))
7284 : : return x;
7285 : :
7286 : : /* Reject modes that aren't scalar integers because turning vector
7287 : : or complex modes into shifts causes problems. */
7288 : 13158986 : if (!is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &inner_mode))
7289 : : return x;
7290 : :
7291 : 776149 : len = INTVAL (XEXP (x, 1));
7292 : 776149 : pos = INTVAL (XEXP (x, 2));
7293 : :
7294 : : /* This should stay within the object being extracted, fail otherwise. */
7295 : 776149 : if (len + pos > GET_MODE_PRECISION (inner_mode))
7296 : : return x;
7297 : :
7298 : : if (BITS_BIG_ENDIAN)
7299 : : pos = GET_MODE_PRECISION (inner_mode) - len - pos;
7300 : :
7301 : : break;
7302 : :
7303 : : default:
7304 : : return x;
7305 : : }
7306 : :
7307 : : /* We've rejected non-scalar operations by now. */
7308 : 6321760 : scalar_int_mode mode = as_a <scalar_int_mode> (GET_MODE (x));
7309 : :
7310 : : /* Convert sign extension to zero extension, if we know that the high
7311 : : bit is not set, as this is easier to optimize. It will be converted
7312 : : back to cheaper alternative in make_extraction. */
7313 : 6321760 : if (GET_CODE (x) == SIGN_EXTEND
7314 : 1281679 : && HWI_COMPUTABLE_MODE_P (mode)
7315 : 7489173 : && ((nonzero_bits (XEXP (x, 0), inner_mode)
7316 : 1167413 : & ~(((unsigned HOST_WIDE_INT) GET_MODE_MASK (inner_mode)) >> 1))
7317 : : == 0))
7318 : : {
7319 : 2103 : rtx temp = gen_rtx_ZERO_EXTEND (mode, XEXP (x, 0));
7320 : 2103 : rtx temp2 = expand_compound_operation (temp);
7321 : :
7322 : : /* Make sure this is a profitable operation. */
7323 : 2103 : if (set_src_cost (x, mode, optimize_this_for_speed_p)
7324 : 2103 : > set_src_cost (temp2, mode, optimize_this_for_speed_p))
7325 : : return temp2;
7326 : 2008 : else if (set_src_cost (x, mode, optimize_this_for_speed_p)
7327 : 2008 : > set_src_cost (temp, mode, optimize_this_for_speed_p))
7328 : : return temp;
7329 : : else
7330 : : return x;
7331 : : }
7332 : :
7333 : : /* We can optimize some special cases of ZERO_EXTEND. */
7334 : 6319657 : if (GET_CODE (x) == ZERO_EXTEND)
7335 : : {
7336 : : /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
7337 : : know that the last value didn't have any inappropriate bits
7338 : : set. */
7339 : 4263958 : if (GET_CODE (XEXP (x, 0)) == TRUNCATE
7340 : 183 : && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode
7341 : 183 : && HWI_COMPUTABLE_MODE_P (mode)
7342 : 4264141 : && (nonzero_bits (XEXP (XEXP (x, 0), 0), mode)
7343 : 183 : & ~GET_MODE_MASK (inner_mode)) == 0)
7344 : 31 : return XEXP (XEXP (x, 0), 0);
7345 : :
7346 : : /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
7347 : 4263927 : if (GET_CODE (XEXP (x, 0)) == SUBREG
7348 : 637493 : && GET_MODE (SUBREG_REG (XEXP (x, 0))) == mode
7349 : 580084 : && subreg_lowpart_p (XEXP (x, 0))
7350 : 257919 : && HWI_COMPUTABLE_MODE_P (mode)
7351 : 4497291 : && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), mode)
7352 : 233364 : & ~GET_MODE_MASK (inner_mode)) == 0)
7353 : 89 : return SUBREG_REG (XEXP (x, 0));
7354 : :
7355 : : /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
7356 : : is a comparison and STORE_FLAG_VALUE permits. This is like
7357 : : the first case, but it works even when MODE is larger
7358 : : than HOST_WIDE_INT. */
7359 : 4263838 : if (GET_CODE (XEXP (x, 0)) == TRUNCATE
7360 : 152 : && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode
7361 : 152 : && COMPARISON_P (XEXP (XEXP (x, 0), 0))
7362 : 0 : && GET_MODE_PRECISION (inner_mode) <= HOST_BITS_PER_WIDE_INT
7363 : 4263838 : && (STORE_FLAG_VALUE & ~GET_MODE_MASK (inner_mode)) == 0)
7364 : : return XEXP (XEXP (x, 0), 0);
7365 : :
7366 : : /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
7367 : 4263838 : if (GET_CODE (XEXP (x, 0)) == SUBREG
7368 : 637404 : && GET_MODE (SUBREG_REG (XEXP (x, 0))) == mode
7369 : 579995 : && subreg_lowpart_p (XEXP (x, 0))
7370 : 257830 : && COMPARISON_P (SUBREG_REG (XEXP (x, 0)))
7371 : 0 : && GET_MODE_PRECISION (inner_mode) <= HOST_BITS_PER_WIDE_INT
7372 : 4263838 : && (STORE_FLAG_VALUE & ~GET_MODE_MASK (inner_mode)) == 0)
7373 : : return SUBREG_REG (XEXP (x, 0));
7374 : :
7375 : : }
7376 : :
7377 : : /* If we reach here, we want to return a pair of shifts. The inner
7378 : : shift is a left shift of BITSIZE - POS - LEN bits. The outer
7379 : : shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
7380 : : logical depending on the value of UNSIGNEDP.
7381 : :
7382 : : If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
7383 : : converted into an AND of a shift.
7384 : :
7385 : : We must check for the case where the left shift would have a negative
7386 : : count. This can happen in a case like (x >> 31) & 255 on machines
7387 : : that can't shift by a constant. On those machines, we would first
7388 : : combine the shift with the AND to produce a variable-position
7389 : : extraction. Then the constant of 31 would be substituted in
7390 : : to produce such a position. */
7391 : :
7392 : 6319537 : modewidth = GET_MODE_PRECISION (mode);
7393 : 6319537 : if (modewidth >= pos + len)
7394 : : {
7395 : 6319536 : tem = gen_lowpart (mode, XEXP (x, 0));
7396 : 6319536 : if (!tem || GET_CODE (tem) == CLOBBER)
7397 : : return x;
7398 : 6631852 : tem = simplify_shift_const (NULL_RTX, ASHIFT, mode,
7399 : 3315926 : tem, modewidth - pos - len);
7400 : 3315926 : tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
7401 : 3315926 : mode, tem, modewidth - len);
7402 : : }
7403 : 1 : else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
7404 : : {
7405 : 0 : tem = simplify_shift_const (NULL_RTX, LSHIFTRT, inner_mode,
7406 : : XEXP (x, 0), pos);
7407 : 0 : tem = gen_lowpart (mode, tem);
7408 : 0 : if (!tem || GET_CODE (tem) == CLOBBER)
7409 : : return x;
7410 : 0 : tem = simplify_and_const_int (NULL_RTX, mode, tem,
7411 : 0 : (HOST_WIDE_INT_1U << len) - 1);
7412 : : }
7413 : : else
7414 : : /* Any other cases we can't handle. */
7415 : : return x;
7416 : :
7417 : : /* If we couldn't do this for some reason, return the original
7418 : : expression. */
7419 : 3315926 : if (GET_CODE (tem) == CLOBBER)
7420 : : return x;
7421 : :
7422 : : return tem;
7423 : : }
7424 : :
7425 : : /* X is a SET which contains an assignment of one object into
7426 : : a part of another (such as a bit-field assignment, STRICT_LOW_PART,
7427 : : or certain SUBREGS). If possible, convert it into a series of
7428 : : logical operations.
7429 : :
7430 : : We half-heartedly support variable positions, but do not at all
7431 : : support variable lengths. */
7432 : :
7433 : : static const_rtx
7434 : 78982158 : expand_field_assignment (const_rtx x)
7435 : : {
7436 : 78982158 : rtx inner;
7437 : 78982158 : rtx pos; /* Always counts from low bit. */
7438 : 78982158 : int len, inner_len;
7439 : 78982158 : rtx mask, cleared, masked;
7440 : 78982158 : scalar_int_mode compute_mode;
7441 : :
7442 : : /* Loop until we find something we can't simplify. */
7443 : 79231117 : while (1)
7444 : : {
7445 : 79231117 : if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
7446 : 16947 : && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
7447 : : {
7448 : 16947 : rtx x0 = XEXP (SET_DEST (x), 0);
7449 : 16947 : if (!GET_MODE_PRECISION (GET_MODE (x0)).is_constant (&len))
7450 : : break;
7451 : 16947 : inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
7452 : 16947 : pos = gen_int_mode (subreg_lsb (XEXP (SET_DEST (x), 0)),
7453 : : MAX_MODE_INT);
7454 : 16947 : }
7455 : 79214170 : else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
7456 : 5309 : && CONST_INT_P (XEXP (SET_DEST (x), 1)))
7457 : : {
7458 : 5309 : inner = XEXP (SET_DEST (x), 0);
7459 : 5309 : if (!GET_MODE_PRECISION (GET_MODE (inner)).is_constant (&inner_len))
7460 : : break;
7461 : :
7462 : 5309 : len = INTVAL (XEXP (SET_DEST (x), 1));
7463 : 5309 : pos = XEXP (SET_DEST (x), 2);
7464 : :
7465 : : /* A constant position should stay within the width of INNER. */
7466 : 5309 : if (CONST_INT_P (pos) && INTVAL (pos) + len > inner_len)
7467 : : break;
7468 : :
7469 : : if (BITS_BIG_ENDIAN)
7470 : : {
7471 : : if (CONST_INT_P (pos))
7472 : : pos = GEN_INT (inner_len - len - INTVAL (pos));
7473 : : else if (GET_CODE (pos) == MINUS
7474 : : && CONST_INT_P (XEXP (pos, 1))
7475 : : && INTVAL (XEXP (pos, 1)) == inner_len - len)
7476 : : /* If position is ADJUST - X, new position is X. */
7477 : : pos = XEXP (pos, 0);
7478 : : else
7479 : : pos = simplify_gen_binary (MINUS, GET_MODE (pos),
7480 : : gen_int_mode (inner_len - len,
7481 : : GET_MODE (pos)),
7482 : : pos);
7483 : : }
7484 : : }
7485 : :
7486 : : /* If the destination is a subreg that overwrites the whole of the inner
7487 : : register, we can move the subreg to the source. */
7488 : 79444079 : else if (GET_CODE (SET_DEST (x)) == SUBREG
7489 : : /* We need SUBREGs to compute nonzero_bits properly. */
7490 : 858488 : && nonzero_sign_valid
7491 : 79981516 : && !read_modify_subreg_p (SET_DEST (x)))
7492 : : {
7493 : 235218 : x = gen_rtx_SET (SUBREG_REG (SET_DEST (x)),
7494 : : gen_lowpart
7495 : : (GET_MODE (SUBREG_REG (SET_DEST (x))),
7496 : : SET_SRC (x)));
7497 : 235218 : continue;
7498 : : }
7499 : : else
7500 : : break;
7501 : :
7502 : 25053 : while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
7503 : 2797 : inner = SUBREG_REG (inner);
7504 : :
7505 : : /* Don't attempt bitwise arithmetic on non scalar integer modes. */
7506 : 22256 : if (!is_a <scalar_int_mode> (GET_MODE (inner), &compute_mode))
7507 : : {
7508 : : /* Don't do anything for vector or complex integral types. */
7509 : 5889 : if (! FLOAT_MODE_P (GET_MODE (inner)))
7510 : : break;
7511 : :
7512 : : /* Try to find an integral mode to pun with. */
7513 : 38 : if (!int_mode_for_size (GET_MODE_BITSIZE (GET_MODE (inner)), 0)
7514 : 0 : .exists (&compute_mode))
7515 : : break;
7516 : :
7517 : 19 : inner = gen_lowpart (compute_mode, inner);
7518 : : }
7519 : :
7520 : : /* Compute a mask of LEN bits, if we can do this on the host machine. */
7521 : 16386 : if (len >= HOST_BITS_PER_WIDE_INT)
7522 : : break;
7523 : :
7524 : : /* Don't try to compute in too wide unsupported modes. */
7525 : 16386 : if (!targetm.scalar_mode_supported_p (compute_mode))
7526 : : break;
7527 : :
7528 : : /* gen_lowpart_for_combine returns CLOBBER on failure. */
7529 : 16386 : rtx lowpart = gen_lowpart (compute_mode, SET_SRC (x));
7530 : 16386 : if (GET_CODE (lowpart) == CLOBBER)
7531 : : break;
7532 : :
7533 : : /* Now compute the equivalent expression. Make a copy of INNER
7534 : : for the SET_DEST in case it is a MEM into which we will substitute;
7535 : : we don't want shared RTL in that case. */
7536 : 13741 : mask = gen_int_mode ((HOST_WIDE_INT_1U << len) - 1,
7537 : : compute_mode);
7538 : 13741 : cleared = simplify_gen_binary (AND, compute_mode,
7539 : : simplify_gen_unary (NOT, compute_mode,
7540 : : simplify_gen_binary (ASHIFT,
7541 : : compute_mode,
7542 : : mask, pos),
7543 : : compute_mode),
7544 : : inner);
7545 : 13741 : masked = simplify_gen_binary (ASHIFT, compute_mode,
7546 : : simplify_gen_binary (
7547 : : AND, compute_mode, lowpart, mask),
7548 : : pos);
7549 : :
7550 : 13741 : x = gen_rtx_SET (copy_rtx (inner),
7551 : : simplify_gen_binary (IOR, compute_mode,
7552 : : cleared, masked));
7553 : : }
7554 : :
7555 : 78982158 : return x;
7556 : : }
7557 : :
7558 : : /* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
7559 : : it is an RTX that represents the (variable) starting position; otherwise,
7560 : : POS is the (constant) starting bit position. Both are counted from the LSB.
7561 : :
7562 : : UNSIGNEDP is true for an unsigned reference and zero for a signed one.
7563 : :
7564 : : IN_DEST is true if this is a reference in the destination of a SET.
7565 : : This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If nonzero,
7566 : : a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
7567 : : be used.
7568 : :
7569 : : IN_COMPARE is true if we are in a COMPARE. This means that a
7570 : : ZERO_EXTRACT should be built even for bits starting at bit 0.
7571 : :
7572 : : MODE is the desired mode of the result (if IN_DEST == 0).
7573 : :
7574 : : The result is an RTX for the extraction or NULL_RTX if the target
7575 : : can't handle it. */
7576 : :
7577 : : static rtx
7578 : 4757339 : make_extraction (machine_mode mode, rtx inner, HOST_WIDE_INT pos,
7579 : : rtx pos_rtx, unsigned HOST_WIDE_INT len, bool unsignedp,
7580 : : bool in_dest, bool in_compare)
7581 : : {
7582 : : /* This mode describes the size of the storage area
7583 : : to fetch the overall value from. Within that, we
7584 : : ignore the POS lowest bits, etc. */
7585 : 4757339 : machine_mode is_mode = GET_MODE (inner);
7586 : 4757339 : machine_mode inner_mode;
7587 : 4757339 : scalar_int_mode wanted_inner_mode;
7588 : 4757339 : scalar_int_mode wanted_inner_reg_mode = word_mode;
7589 : 4757339 : scalar_int_mode pos_mode = word_mode;
7590 : 4757339 : machine_mode extraction_mode = word_mode;
7591 : 4757339 : rtx new_rtx = 0;
7592 : 4757339 : rtx orig_pos_rtx = pos_rtx;
7593 : 4757339 : HOST_WIDE_INT orig_pos;
7594 : :
7595 : 4757339 : if (pos_rtx && CONST_INT_P (pos_rtx))
7596 : 832523 : pos = INTVAL (pos_rtx), pos_rtx = 0;
7597 : :
7598 : 4757339 : if (GET_CODE (inner) == SUBREG
7599 : 2434618 : && subreg_lowpart_p (inner)
7600 : 7188538 : && (paradoxical_subreg_p (inner)
7601 : : /* If trying or potentionally trying to extract
7602 : : bits outside of is_mode, don't look through
7603 : : non-paradoxical SUBREGs. See PR82192. */
7604 : 157648 : || (pos_rtx == NULL_RTX
7605 : 157613 : && known_le (pos + len, GET_MODE_PRECISION (is_mode)))))
7606 : : {
7607 : : /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
7608 : : consider just the QI as the memory to extract from.
7609 : : The subreg adds or removes high bits; its mode is
7610 : : irrelevant to the meaning of this extraction,
7611 : : since POS and LEN count from the lsb. */
7612 : 2431164 : if (MEM_P (SUBREG_REG (inner)))
7613 : 494271 : is_mode = GET_MODE (SUBREG_REG (inner));
7614 : : inner = SUBREG_REG (inner);
7615 : : }
7616 : 2326175 : else if (GET_CODE (inner) == ASHIFT
7617 : 155347 : && CONST_INT_P (XEXP (inner, 1))
7618 : 154130 : && pos_rtx == 0 && pos == 0
7619 : 154084 : && len > UINTVAL (XEXP (inner, 1)))
7620 : : {
7621 : : /* We're extracting the least significant bits of an rtx
7622 : : (ashift X (const_int C)), where LEN > C. Extract the
7623 : : least significant (LEN - C) bits of X, giving an rtx
7624 : : whose mode is MODE, then shift it left C times. */
7625 : 154084 : new_rtx = make_extraction (mode, XEXP (inner, 0),
7626 : : 0, 0, len - INTVAL (XEXP (inner, 1)),
7627 : : unsignedp, in_dest, in_compare);
7628 : 154084 : if (new_rtx != 0)
7629 : 152439 : return gen_rtx_ASHIFT (mode, new_rtx, XEXP (inner, 1));
7630 : : }
7631 : 2172091 : else if (GET_CODE (inner) == MULT
7632 : 178963 : && CONST_INT_P (XEXP (inner, 1))
7633 : 155115 : && pos_rtx == 0 && pos == 0)
7634 : : {
7635 : : /* We're extracting the least significant bits of an rtx
7636 : : (mult X (const_int 2^C)), where LEN > C. Extract the
7637 : : least significant (LEN - C) bits of X, giving an rtx
7638 : : whose mode is MODE, then multiply it by 2^C. */
7639 : 110968 : const HOST_WIDE_INT shift_amt = exact_log2 (INTVAL (XEXP (inner, 1)));
7640 : 110968 : if (len > 1 && IN_RANGE (shift_amt, 1, len - 1))
7641 : : {
7642 : 107372 : new_rtx = make_extraction (mode, XEXP (inner, 0),
7643 : : 0, 0, len - shift_amt,
7644 : : unsignedp, in_dest, in_compare);
7645 : 107372 : if (new_rtx)
7646 : 107372 : return gen_rtx_MULT (mode, new_rtx, XEXP (inner, 1));
7647 : : }
7648 : : }
7649 : 2061123 : else if (GET_CODE (inner) == TRUNCATE
7650 : : /* If trying or potentionally trying to extract
7651 : : bits outside of is_mode, don't look through
7652 : : TRUNCATE. See PR82192. */
7653 : 0 : && pos_rtx == NULL_RTX
7654 : 2061123 : && known_le (pos + len, GET_MODE_PRECISION (is_mode)))
7655 : 0 : inner = XEXP (inner, 0);
7656 : :
7657 : 4497528 : inner_mode = GET_MODE (inner);
7658 : :
7659 : : /* See if this can be done without an extraction. We never can if the
7660 : : width of the field is not the same as that of some integer mode. For
7661 : : registers, we can only avoid the extraction if the position is at the
7662 : : low-order bit and this is either not in the destination or we have the
7663 : : appropriate STRICT_LOW_PART operation available.
7664 : :
7665 : : For MEM, we can avoid an extract if the field starts on an appropriate
7666 : : boundary and we can change the mode of the memory reference. */
7667 : :
7668 : 4497528 : scalar_int_mode tmode;
7669 : 4497528 : if (int_mode_for_size (len, 1).exists (&tmode)
7670 : 2357016 : && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
7671 : 2041925 : && !MEM_P (inner)
7672 : 1688215 : && (pos == 0 || REG_P (inner))
7673 : 1688215 : && (inner_mode == tmode
7674 : 287152 : || !REG_P (inner)
7675 : 262622 : || TRULY_NOOP_TRUNCATION_MODES_P (tmode, inner_mode)
7676 : 0 : || reg_truncated_to_mode (tmode, inner))
7677 : 1688215 : && (! in_dest
7678 : 30 : || (REG_P (inner)
7679 : 30 : && have_insn_for (STRICT_LOW_PART, tmode))))
7680 : 555824 : || (MEM_P (inner) && pos_rtx == 0
7681 : 354714 : && (pos
7682 : : % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
7683 : : : BITS_PER_UNIT)) == 0
7684 : : /* We can't do this if we are widening INNER_MODE (it
7685 : : may not be aligned, for one thing). */
7686 : 354031 : && !paradoxical_subreg_p (tmode, inner_mode)
7687 : 354031 : && known_le (pos + len, GET_MODE_PRECISION (is_mode))
7688 : 354031 : && (inner_mode == tmode
7689 : 758 : || (! mode_dependent_address_p (XEXP (inner, 0),
7690 : 758 : MEM_ADDR_SPACE (inner))
7691 : 758 : && ! MEM_VOLATILE_P (inner))))))
7692 : : {
7693 : : /* If INNER is a MEM, make a new MEM that encompasses just the desired
7694 : : field. If the original and current mode are the same, we need not
7695 : : adjust the offset. Otherwise, we do if bytes big endian.
7696 : :
7697 : : If INNER is not a MEM, get a piece consisting of just the field
7698 : : of interest (in this case POS % BITS_PER_WORD must be 0). */
7699 : :
7700 : 2042216 : if (MEM_P (inner))
7701 : : {
7702 : 354018 : poly_int64 offset;
7703 : :
7704 : : /* POS counts from lsb, but make OFFSET count in memory order. */
7705 : 354018 : if (BYTES_BIG_ENDIAN)
7706 : : offset = bits_to_bytes_round_down (GET_MODE_PRECISION (is_mode)
7707 : : - len - pos);
7708 : : else
7709 : 354018 : offset = pos / BITS_PER_UNIT;
7710 : :
7711 : 354018 : new_rtx = adjust_address_nv (inner, tmode, offset);
7712 : : }
7713 : 1688198 : else if (REG_P (inner))
7714 : : {
7715 : 1101311 : if (tmode != inner_mode)
7716 : : {
7717 : : /* We can't call gen_lowpart in a DEST since we
7718 : : always want a SUBREG (see below) and it would sometimes
7719 : : return a new hard register. */
7720 : 262605 : if (pos || in_dest)
7721 : : {
7722 : 16 : poly_uint64 offset
7723 : 16 : = subreg_offset_from_lsb (tmode, inner_mode, pos);
7724 : :
7725 : : /* Avoid creating invalid subregs, for example when
7726 : : simplifying (x>>32)&255. */
7727 : 16 : if (!validate_subreg (tmode, inner_mode, inner, offset))
7728 : 0 : return NULL_RTX;
7729 : :
7730 : 16 : new_rtx = gen_rtx_SUBREG (tmode, inner, offset);
7731 : 16 : }
7732 : : else
7733 : 262589 : new_rtx = gen_lowpart (tmode, inner);
7734 : : }
7735 : : else
7736 : : new_rtx = inner;
7737 : : }
7738 : : else
7739 : 1173774 : new_rtx = force_to_mode (inner, tmode,
7740 : : len >= HOST_BITS_PER_WIDE_INT
7741 : : ? HOST_WIDE_INT_M1U
7742 : 586887 : : (HOST_WIDE_INT_1U << len) - 1, false);
7743 : :
7744 : : /* If this extraction is going into the destination of a SET,
7745 : : make a STRICT_LOW_PART unless we made a MEM. */
7746 : :
7747 : 2042216 : if (in_dest)
7748 : 49 : return (MEM_P (new_rtx) ? new_rtx
7749 : : : (GET_CODE (new_rtx) != SUBREG
7750 : 13 : ? gen_rtx_CLOBBER (tmode, const0_rtx)
7751 : 13 : : gen_rtx_STRICT_LOW_PART (VOIDmode, new_rtx)));
7752 : :
7753 : 2042167 : if (mode == tmode)
7754 : : return new_rtx;
7755 : :
7756 : 2042132 : if (CONST_SCALAR_INT_P (new_rtx))
7757 : 5 : return simplify_unary_operation (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
7758 : 5 : mode, new_rtx, tmode);
7759 : :
7760 : : /* If we know that no extraneous bits are set, and that the high
7761 : : bit is not set, convert the extraction to the cheaper of
7762 : : sign and zero extension, that are equivalent in these cases. */
7763 : 2042127 : if (flag_expensive_optimizations
7764 : 2042127 : && (HWI_COMPUTABLE_MODE_P (tmode)
7765 : 1899244 : && ((nonzero_bits (new_rtx, tmode)
7766 : 1899244 : & ~(((unsigned HOST_WIDE_INT)GET_MODE_MASK (tmode)) >> 1))
7767 : : == 0)))
7768 : : {
7769 : 7945 : rtx temp = gen_rtx_ZERO_EXTEND (mode, new_rtx);
7770 : 7945 : rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new_rtx);
7771 : :
7772 : : /* Prefer ZERO_EXTENSION, since it gives more information to
7773 : : backends. */
7774 : 7945 : if (set_src_cost (temp, mode, optimize_this_for_speed_p)
7775 : 7945 : <= set_src_cost (temp1, mode, optimize_this_for_speed_p))
7776 : : return temp;
7777 : 0 : return temp1;
7778 : : }
7779 : :
7780 : : /* Otherwise, sign- or zero-extend unless we already are in the
7781 : : proper mode. */
7782 : :
7783 : 2034182 : return (gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
7784 : 2034182 : mode, new_rtx));
7785 : : }
7786 : :
7787 : : /* Unless this is a COMPARE or we have a funny memory reference,
7788 : : don't do anything with zero-extending field extracts starting at
7789 : : the low-order bit since they are simple AND operations. */
7790 : 2455312 : if (pos_rtx == 0 && pos == 0 && ! in_dest
7791 : 1508352 : && ! in_compare && unsignedp)
7792 : : return 0;
7793 : :
7794 : : /* Unless INNER is not MEM, reject this if we would be spanning bytes or
7795 : : if the position is not a constant and the length is not 1. In all
7796 : : other cases, we would only be going outside our object in cases when
7797 : : an original shift would have been undefined. */
7798 : 1354738 : if (MEM_P (inner)
7799 : 1354738 : && ((pos_rtx == 0 && maybe_gt (pos + len, GET_MODE_PRECISION (is_mode)))
7800 : 1434 : || (pos_rtx != 0 && len != 1)))
7801 : : return 0;
7802 : :
7803 : 1354700 : enum extraction_pattern pattern = (in_dest ? EP_insv
7804 : 1347640 : : unsignedp ? EP_extzv : EP_extv);
7805 : :
7806 : : /* If INNER is not from memory, we want it to have the mode of a register
7807 : : extraction pattern's structure operand, or word_mode if there is no
7808 : : such pattern. The same applies to extraction_mode and pos_mode
7809 : : and their respective operands.
7810 : :
7811 : : For memory, assume that the desired extraction_mode and pos_mode
7812 : : are the same as for a register operation, since at present we don't
7813 : : have named patterns for aligned memory structures. */
7814 : 1354700 : class extraction_insn insn;
7815 : 1354700 : unsigned int inner_size;
7816 : 2709400 : if (GET_MODE_BITSIZE (inner_mode).is_constant (&inner_size)
7817 : 1354700 : && get_best_reg_extraction_insn (&insn, pattern, inner_size, mode))
7818 : : {
7819 : 1248215 : wanted_inner_reg_mode = insn.struct_mode.require ();
7820 : 1248215 : pos_mode = insn.pos_mode;
7821 : 1248215 : extraction_mode = insn.field_mode;
7822 : : }
7823 : :
7824 : : /* Never narrow an object, since that might not be safe. */
7825 : :
7826 : 1354700 : if (mode != VOIDmode
7827 : 1354700 : && partial_subreg_p (extraction_mode, mode))
7828 : : extraction_mode = mode;
7829 : :
7830 : : /* Punt if len is too large for extraction_mode. */
7831 : 1354700 : if (maybe_gt (len, GET_MODE_PRECISION (extraction_mode)))
7832 : : return NULL_RTX;
7833 : :
7834 : 1354688 : if (!MEM_P (inner))
7835 : 1185548 : wanted_inner_mode = wanted_inner_reg_mode;
7836 : : else
7837 : : {
7838 : : /* Be careful not to go beyond the extracted object and maintain the
7839 : : natural alignment of the memory. */
7840 : 169140 : wanted_inner_mode = smallest_int_mode_for_size (len).require ();
7841 : 342049 : while (pos % GET_MODE_BITSIZE (wanted_inner_mode) + len
7842 : 345818 : > GET_MODE_BITSIZE (wanted_inner_mode))
7843 : 3769 : wanted_inner_mode = GET_MODE_WIDER_MODE (wanted_inner_mode).require ();
7844 : : }
7845 : :
7846 : 1354688 : orig_pos = pos;
7847 : :
7848 : 1354688 : if (BITS_BIG_ENDIAN)
7849 : : {
7850 : : /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
7851 : : BITS_BIG_ENDIAN style. If position is constant, compute new
7852 : : position. Otherwise, build subtraction.
7853 : : Note that POS is relative to the mode of the original argument.
7854 : : If it's a MEM we need to recompute POS relative to that.
7855 : : However, if we're extracting from (or inserting into) a register,
7856 : : we want to recompute POS relative to wanted_inner_mode. */
7857 : : int width;
7858 : : if (!MEM_P (inner))
7859 : : width = GET_MODE_BITSIZE (wanted_inner_mode);
7860 : : else if (!GET_MODE_BITSIZE (is_mode).is_constant (&width))
7861 : : return NULL_RTX;
7862 : :
7863 : : if (pos_rtx == 0)
7864 : : pos = width - len - pos;
7865 : : else
7866 : : pos_rtx
7867 : : = gen_rtx_MINUS (GET_MODE (pos_rtx),
7868 : : gen_int_mode (width - len, GET_MODE (pos_rtx)),
7869 : : pos_rtx);
7870 : : /* POS may be less than 0 now, but we check for that below.
7871 : : Note that it can only be less than 0 if !MEM_P (inner). */
7872 : : }
7873 : :
7874 : : /* If INNER has a wider mode, and this is a constant extraction, try to
7875 : : make it smaller and adjust the byte to point to the byte containing
7876 : : the value. */
7877 : 1354688 : if (wanted_inner_mode != VOIDmode
7878 : 1354688 : && inner_mode != wanted_inner_mode
7879 : 193822 : && ! pos_rtx
7880 : 185995 : && partial_subreg_p (wanted_inner_mode, is_mode)
7881 : 111873 : && MEM_P (inner)
7882 : 29337 : && ! mode_dependent_address_p (XEXP (inner, 0), MEM_ADDR_SPACE (inner))
7883 : 1384025 : && ! MEM_VOLATILE_P (inner))
7884 : : {
7885 : 27784 : poly_int64 offset = 0;
7886 : :
7887 : : /* The computations below will be correct if the machine is big
7888 : : endian in both bits and bytes or little endian in bits and bytes.
7889 : : If it is mixed, we must adjust. */
7890 : :
7891 : : /* If bytes are big endian and we had a paradoxical SUBREG, we must
7892 : : adjust OFFSET to compensate. */
7893 : 27784 : if (BYTES_BIG_ENDIAN
7894 : : && paradoxical_subreg_p (is_mode, inner_mode))
7895 : : offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
7896 : :
7897 : : /* We can now move to the desired byte. */
7898 : 55568 : offset += (pos / GET_MODE_BITSIZE (wanted_inner_mode))
7899 : 27784 : * GET_MODE_SIZE (wanted_inner_mode);
7900 : 27784 : pos %= GET_MODE_BITSIZE (wanted_inner_mode);
7901 : :
7902 : 27784 : if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
7903 : : && is_mode != wanted_inner_mode)
7904 : : offset = (GET_MODE_SIZE (is_mode)
7905 : : - GET_MODE_SIZE (wanted_inner_mode) - offset);
7906 : :
7907 : 27784 : inner = adjust_address_nv (inner, wanted_inner_mode, offset);
7908 : : }
7909 : :
7910 : : /* If INNER is not memory, get it into the proper mode. If we are changing
7911 : : its mode, POS must be a constant and smaller than the size of the new
7912 : : mode. */
7913 : 1326904 : else if (!MEM_P (inner))
7914 : : {
7915 : : /* On the LHS, don't create paradoxical subregs implicitely truncating
7916 : : the register unless TARGET_TRULY_NOOP_TRUNCATION. */
7917 : 1185548 : if (in_dest
7918 : 1185548 : && !TRULY_NOOP_TRUNCATION_MODES_P (GET_MODE (inner),
7919 : : wanted_inner_mode))
7920 : 0 : return NULL_RTX;
7921 : :
7922 : 1185548 : if (GET_MODE (inner) != wanted_inner_mode
7923 : 1185548 : && (pos_rtx != 0
7924 : 313316 : || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
7925 : : return NULL_RTX;
7926 : :
7927 : 1119064 : if (orig_pos < 0)
7928 : : return NULL_RTX;
7929 : :
7930 : 2217419 : inner = force_to_mode (inner, wanted_inner_mode,
7931 : : pos_rtx
7932 : 1098355 : || len + orig_pos >= HOST_BITS_PER_WIDE_INT
7933 : : ? HOST_WIDE_INT_M1U
7934 : 923753 : : (((HOST_WIDE_INT_1U << len) - 1)
7935 : 923753 : << orig_pos), false);
7936 : : }
7937 : :
7938 : : /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
7939 : : have to zero extend. Otherwise, we can just use a SUBREG.
7940 : :
7941 : : We dealt with constant rtxes earlier, so pos_rtx cannot
7942 : : have VOIDmode at this point. */
7943 : 1288204 : if (pos_rtx != 0
7944 : 1288204 : && (GET_MODE_SIZE (pos_mode)
7945 : 1310309 : > GET_MODE_SIZE (as_a <scalar_int_mode> (GET_MODE (pos_rtx)))))
7946 : : {
7947 : 62 : rtx temp = simplify_gen_unary (ZERO_EXTEND, pos_mode, pos_rtx,
7948 : : GET_MODE (pos_rtx));
7949 : :
7950 : : /* If we know that no extraneous bits are set, and that the high
7951 : : bit is not set, convert extraction to cheaper one - either
7952 : : SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
7953 : : cases. */
7954 : 62 : if (flag_expensive_optimizations
7955 : 62 : && (HWI_COMPUTABLE_MODE_P (GET_MODE (pos_rtx))
7956 : 62 : && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx))
7957 : 62 : & ~(((unsigned HOST_WIDE_INT)
7958 : 62 : GET_MODE_MASK (GET_MODE (pos_rtx)))
7959 : 62 : >> 1))
7960 : : == 0)))
7961 : : {
7962 : 46 : rtx temp1 = simplify_gen_unary (SIGN_EXTEND, pos_mode, pos_rtx,
7963 : : GET_MODE (pos_rtx));
7964 : :
7965 : : /* Prefer ZERO_EXTENSION, since it gives more information to
7966 : : backends. */
7967 : 46 : if (set_src_cost (temp1, pos_mode, optimize_this_for_speed_p)
7968 : 46 : < set_src_cost (temp, pos_mode, optimize_this_for_speed_p))
7969 : 1288204 : temp = temp1;
7970 : : }
7971 : : pos_rtx = temp;
7972 : : }
7973 : :
7974 : : /* Make POS_RTX unless we already have it and it is correct. If we don't
7975 : : have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
7976 : : be a CONST_INT. */
7977 : 1288204 : if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
7978 : : pos_rtx = orig_pos_rtx;
7979 : :
7980 : 479663 : else if (pos_rtx == 0)
7981 : 457558 : pos_rtx = GEN_INT (pos);
7982 : :
7983 : : /* Make the required operation. See if we can use existing rtx. */
7984 : 1288204 : new_rtx = gen_rtx_fmt_eee (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
7985 : : extraction_mode, inner, GEN_INT (len), pos_rtx);
7986 : 1288204 : if (! in_dest)
7987 : 1281190 : new_rtx = gen_lowpart (mode, new_rtx);
7988 : :
7989 : : return new_rtx;
7990 : : }
7991 : :
7992 : : /* See if X (of mode MODE) contains an ASHIFT of COUNT or more bits that
7993 : : can be commuted with any other operations in X. Return X without
7994 : : that shift if so. */
7995 : :
7996 : : static rtx
7997 : 1594562 : extract_left_shift (scalar_int_mode mode, rtx x, int count)
7998 : : {
7999 : 1594562 : enum rtx_code code = GET_CODE (x);
8000 : 1594562 : rtx tem;
8001 : :
8002 : 1594562 : switch (code)
8003 : : {
8004 : 277361 : case ASHIFT:
8005 : : /* This is the shift itself. If it is wide enough, we will return
8006 : : either the value being shifted if the shift count is equal to
8007 : : COUNT or a shift for the difference. */
8008 : 277361 : if (CONST_INT_P (XEXP (x, 1))
8009 : 272133 : && INTVAL (XEXP (x, 1)) >= count)
8010 : 270770 : return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
8011 : 270770 : INTVAL (XEXP (x, 1)) - count);
8012 : : break;
8013 : :
8014 : 5362 : case NEG: case NOT:
8015 : 5362 : if ((tem = extract_left_shift (mode, XEXP (x, 0), count)) != 0)
8016 : 2571 : return simplify_gen_unary (code, mode, tem, mode);
8017 : :
8018 : : break;
8019 : :
8020 : 543881 : case PLUS: case IOR: case XOR: case AND:
8021 : : /* If we can safely shift this constant and we find the inner shift,
8022 : : make a new operation. */
8023 : 543881 : if (CONST_INT_P (XEXP (x, 1))
8024 : 260114 : && (UINTVAL (XEXP (x, 1))
8025 : 260114 : & (((HOST_WIDE_INT_1U << count)) - 1)) == 0
8026 : 665619 : && (tem = extract_left_shift (mode, XEXP (x, 0), count)) != 0)
8027 : : {
8028 : 6863 : HOST_WIDE_INT val = INTVAL (XEXP (x, 1)) >> count;
8029 : 6863 : return simplify_gen_binary (code, mode, tem,
8030 : 6863 : gen_int_mode (val, mode));
8031 : : }
8032 : : break;
8033 : :
8034 : : default:
8035 : : break;
8036 : : }
8037 : :
8038 : : return 0;
8039 : : }
8040 : :
8041 : : /* Subroutine of make_compound_operation. *X_PTR is the rtx at the current
8042 : : level of the expression and MODE is its mode. IN_CODE is as for
8043 : : make_compound_operation. *NEXT_CODE_PTR is the value of IN_CODE
8044 : : that should be used when recursing on operands of *X_PTR.
8045 : :
8046 : : There are two possible actions:
8047 : :
8048 : : - Return null. This tells the caller to recurse on *X_PTR with IN_CODE
8049 : : equal to *NEXT_CODE_PTR, after which *X_PTR holds the final value.
8050 : :
8051 : : - Return a new rtx, which the caller returns directly. */
8052 : :
8053 : : static rtx
8054 : 251541019 : make_compound_operation_int (scalar_int_mode mode, rtx *x_ptr,
8055 : : enum rtx_code in_code,
8056 : : enum rtx_code *next_code_ptr)
8057 : : {
8058 : 251541019 : rtx x = *x_ptr;
8059 : 251541019 : enum rtx_code next_code = *next_code_ptr;
8060 : 251541019 : enum rtx_code code = GET_CODE (x);
8061 : 251541019 : int mode_width = GET_MODE_PRECISION (mode);
8062 : 251541019 : rtx rhs, lhs;
8063 : 251541019 : rtx new_rtx = 0;
8064 : 251541019 : int i;
8065 : 251541019 : rtx tem;
8066 : 251541019 : scalar_int_mode inner_mode;
8067 : 251541019 : bool equality_comparison = false;
8068 : :
8069 : 251541019 : if (in_code == EQ)
8070 : : {
8071 : 7958284 : equality_comparison = true;
8072 : 7958284 : in_code = COMPARE;
8073 : : }
8074 : :
8075 : : /* Process depending on the code of this operation. If NEW is set
8076 : : nonzero, it will be returned. */
8077 : :
8078 : 251541019 : switch (code)
8079 : : {
8080 : 6241639 : case ASHIFT:
8081 : : /* Convert shifts by constants into multiplications if inside
8082 : : an address. */
8083 : 6241639 : if (in_code == MEM && CONST_INT_P (XEXP (x, 1))
8084 : 1872211 : && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
8085 : 1872211 : && INTVAL (XEXP (x, 1)) >= 0)
8086 : : {
8087 : 1872211 : HOST_WIDE_INT count = INTVAL (XEXP (x, 1));
8088 : 1872211 : HOST_WIDE_INT multval = HOST_WIDE_INT_1 << count;
8089 : :
8090 : 1872211 : new_rtx = make_compound_operation (XEXP (x, 0), next_code);
8091 : 1872211 : if (GET_CODE (new_rtx) == NEG)
8092 : : {
8093 : 7 : new_rtx = XEXP (new_rtx, 0);
8094 : 7 : multval = -multval;
8095 : : }
8096 : 1872211 : multval = trunc_int_for_mode (multval, mode);
8097 : 1872211 : new_rtx = gen_rtx_MULT (mode, new_rtx, gen_int_mode (multval, mode));
8098 : : }
8099 : : break;
8100 : :
8101 : 45729044 : case PLUS:
8102 : 45729044 : lhs = XEXP (x, 0);
8103 : 45729044 : rhs = XEXP (x, 1);
8104 : 45729044 : lhs = make_compound_operation (lhs, next_code);
8105 : 45729044 : rhs = make_compound_operation (rhs, next_code);
8106 : 45729044 : if (GET_CODE (lhs) == MULT && GET_CODE (XEXP (lhs, 0)) == NEG)
8107 : : {
8108 : 0 : tem = simplify_gen_binary (MULT, mode, XEXP (XEXP (lhs, 0), 0),
8109 : : XEXP (lhs, 1));
8110 : 0 : new_rtx = simplify_gen_binary (MINUS, mode, rhs, tem);
8111 : : }
8112 : 45729044 : else if (GET_CODE (lhs) == MULT
8113 : 4492950 : && (CONST_INT_P (XEXP (lhs, 1)) && INTVAL (XEXP (lhs, 1)) < 0))
8114 : : {
8115 : 45181 : tem = simplify_gen_binary (MULT, mode, XEXP (lhs, 0),
8116 : : simplify_gen_unary (NEG, mode,
8117 : : XEXP (lhs, 1),
8118 : : mode));
8119 : 45181 : new_rtx = simplify_gen_binary (MINUS, mode, rhs, tem);
8120 : : }
8121 : : else
8122 : : {
8123 : 45683863 : SUBST (XEXP (x, 0), lhs);
8124 : 45683863 : SUBST (XEXP (x, 1), rhs);
8125 : : }
8126 : 45729044 : maybe_swap_commutative_operands (x);
8127 : 45729044 : return x;
8128 : :
8129 : 3567279 : case MINUS:
8130 : 3567279 : lhs = XEXP (x, 0);
8131 : 3567279 : rhs = XEXP (x, 1);
8132 : 3567279 : lhs = make_compound_operation (lhs, next_code);
8133 : 3567279 : rhs = make_compound_operation (rhs, next_code);
8134 : 3567279 : if (GET_CODE (rhs) == MULT && GET_CODE (XEXP (rhs, 0)) == NEG)
8135 : : {
8136 : 0 : tem = simplify_gen_binary (MULT, mode, XEXP (XEXP (rhs, 0), 0),
8137 : : XEXP (rhs, 1));
8138 : 0 : return simplify_gen_binary (PLUS, mode, tem, lhs);
8139 : : }
8140 : 3567279 : else if (GET_CODE (rhs) == MULT
8141 : 86125 : && (CONST_INT_P (XEXP (rhs, 1)) && INTVAL (XEXP (rhs, 1)) < 0))
8142 : : {
8143 : 225 : tem = simplify_gen_binary (MULT, mode, XEXP (rhs, 0),
8144 : : simplify_gen_unary (NEG, mode,
8145 : : XEXP (rhs, 1),
8146 : : mode));
8147 : 225 : return simplify_gen_binary (PLUS, mode, tem, lhs);
8148 : : }
8149 : : else
8150 : : {
8151 : 3567054 : SUBST (XEXP (x, 0), lhs);
8152 : 3567054 : SUBST (XEXP (x, 1), rhs);
8153 : 3567054 : return x;
8154 : : }
8155 : :
8156 : 6805210 : case AND:
8157 : : /* If the second operand is not a constant, we can't do anything
8158 : : with it. */
8159 : 6805210 : if (!CONST_INT_P (XEXP (x, 1)))
8160 : : break;
8161 : :
8162 : : /* If the constant is a power of two minus one and the first operand
8163 : : is a logical right shift, make an extraction. */
8164 : 5437800 : if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
8165 : 5437800 : && (i = exact_log2 (UINTVAL (XEXP (x, 1)) + 1)) >= 0)
8166 : : {
8167 : 522949 : new_rtx = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
8168 : 522949 : new_rtx = make_extraction (mode, new_rtx, 0, XEXP (XEXP (x, 0), 1),
8169 : : i, true, false, in_code == COMPARE);
8170 : : }
8171 : :
8172 : : /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
8173 : 4914851 : else if (GET_CODE (XEXP (x, 0)) == SUBREG
8174 : 1260456 : && subreg_lowpart_p (XEXP (x, 0))
8175 : 6131681 : && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (XEXP (x, 0))),
8176 : : &inner_mode)
8177 : 1257293 : && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
8178 : 4956981 : && (i = exact_log2 (UINTVAL (XEXP (x, 1)) + 1)) >= 0)
8179 : : {
8180 : 40463 : rtx inner_x0 = SUBREG_REG (XEXP (x, 0));
8181 : 40463 : new_rtx = make_compound_operation (XEXP (inner_x0, 0), next_code);
8182 : 40463 : new_rtx = make_extraction (inner_mode, new_rtx, 0,
8183 : : XEXP (inner_x0, 1),
8184 : : i, true, false, in_code == COMPARE);
8185 : :
8186 : : /* If we narrowed the mode when dropping the subreg, then we lose. */
8187 : 121389 : if (GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (mode))
8188 : 40463 : new_rtx = NULL;
8189 : :
8190 : : /* If that didn't give anything, see if the AND simplifies on
8191 : : its own. */
8192 : 40463 : if (!new_rtx && i >= 0)
8193 : : {
8194 : 3571 : new_rtx = make_compound_operation (XEXP (x, 0), next_code);
8195 : 3571 : new_rtx = make_extraction (mode, new_rtx, 0, NULL_RTX, i,
8196 : : true, false, in_code == COMPARE);
8197 : : }
8198 : : }
8199 : : /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
8200 : 4874388 : else if ((GET_CODE (XEXP (x, 0)) == XOR
8201 : 4874388 : || GET_CODE (XEXP (x, 0)) == IOR)
8202 : 21770 : && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
8203 : 1575 : && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
8204 : 4874398 : && (i = exact_log2 (UINTVAL (XEXP (x, 1)) + 1)) >= 0)
8205 : : {
8206 : : /* Apply the distributive law, and then try to make extractions. */
8207 : 10 : new_rtx = gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)), mode,
8208 : : gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
8209 : : XEXP (x, 1)),
8210 : : gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
8211 : : XEXP (x, 1)));
8212 : 10 : new_rtx = make_compound_operation (new_rtx, in_code);
8213 : : }
8214 : :
8215 : : /* If we are have (and (rotate X C) M) and C is larger than the number
8216 : : of bits in M, this is an extraction. */
8217 : :
8218 : 4874378 : else if (GET_CODE (XEXP (x, 0)) == ROTATE
8219 : 1464 : && CONST_INT_P (XEXP (XEXP (x, 0), 1))
8220 : 1464 : && (i = exact_log2 (UINTVAL (XEXP (x, 1)) + 1)) >= 0
8221 : 4874408 : && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
8222 : : {
8223 : 2 : new_rtx = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
8224 : 2 : new_rtx = make_extraction (mode, new_rtx,
8225 : 2 : (GET_MODE_PRECISION (mode)
8226 : 2 : - INTVAL (XEXP (XEXP (x, 0), 1))),
8227 : : NULL_RTX, i, true, false,
8228 : : in_code == COMPARE);
8229 : : }
8230 : :
8231 : : /* On machines without logical shifts, if the operand of the AND is
8232 : : a logical shift and our mask turns off all the propagated sign
8233 : : bits, we can replace the logical shift with an arithmetic shift. */
8234 : 4874376 : else if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
8235 : 78517 : && !have_insn_for (LSHIFTRT, mode)
8236 : 0 : && have_insn_for (ASHIFTRT, mode)
8237 : 0 : && CONST_INT_P (XEXP (XEXP (x, 0), 1))
8238 : 0 : && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
8239 : 0 : && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
8240 : 4874376 : && mode_width <= HOST_BITS_PER_WIDE_INT)
8241 : : {
8242 : 0 : unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
8243 : :
8244 : 0 : mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
8245 : 0 : if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
8246 : 0 : SUBST (XEXP (x, 0),
8247 : : gen_rtx_ASHIFTRT (mode,
8248 : : make_compound_operation (XEXP (XEXP (x,
8249 : : 0),
8250 : : 0),
8251 : : next_code),
8252 : : XEXP (XEXP (x, 0), 1)));
8253 : : }
8254 : :
8255 : : /* If the constant is one less than a power of two, this might be
8256 : : representable by an extraction even if no shift is present.
8257 : : If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
8258 : : we are in a COMPARE. */
8259 : 4874376 : else if ((i = exact_log2 (UINTVAL (XEXP (x, 1)) + 1)) >= 0)
8260 : 2326666 : new_rtx = make_extraction (mode,
8261 : : make_compound_operation (XEXP (x, 0),
8262 : : next_code),
8263 : : 0, NULL_RTX, i,
8264 : : true, false, in_code == COMPARE);
8265 : :
8266 : : /* If we are in a comparison and this is an AND with a power of two,
8267 : : convert this into the appropriate bit extract. */
8268 : 2547710 : else if (in_code == COMPARE
8269 : 468813 : && (i = exact_log2 (UINTVAL (XEXP (x, 1)))) >= 0
8270 : 2618992 : && (equality_comparison || i < GET_MODE_PRECISION (mode) - 1))
8271 : 71282 : new_rtx = make_extraction (mode,
8272 : : make_compound_operation (XEXP (x, 0),
8273 : : next_code),
8274 : : i, NULL_RTX, 1, true, false, true);
8275 : :
8276 : : /* If the one operand is a paradoxical subreg of a register or memory and
8277 : : the constant (limited to the smaller mode) has only zero bits where
8278 : : the sub expression has known zero bits, this can be expressed as
8279 : : a zero_extend. */
8280 : 2476428 : else if (GET_CODE (XEXP (x, 0)) == SUBREG)
8281 : : {
8282 : 69942 : rtx sub;
8283 : :
8284 : 69942 : sub = XEXP (XEXP (x, 0), 0);
8285 : 69942 : machine_mode sub_mode = GET_MODE (sub);
8286 : 69942 : int sub_width;
8287 : 30681 : if ((REG_P (sub) || MEM_P (sub))
8288 : 39903 : && GET_MODE_PRECISION (sub_mode).is_constant (&sub_width)
8289 : 39903 : && sub_width < mode_width
8290 : 69942 : && (!WORD_REGISTER_OPERATIONS
8291 : : || sub_width >= BITS_PER_WORD
8292 : : /* On WORD_REGISTER_OPERATIONS targets the bits
8293 : : beyond sub_mode aren't considered undefined,
8294 : : so optimize only if it is a MEM load when MEM loads
8295 : : zero extend, because then the upper bits are all zero. */
8296 : : || (MEM_P (sub)
8297 : : && load_extend_op (sub_mode) == ZERO_EXTEND)))
8298 : : {
8299 : 28807 : unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (sub_mode);
8300 : 28807 : unsigned HOST_WIDE_INT mask;
8301 : :
8302 : : /* Original AND constant with all the known zero bits set. */
8303 : 28807 : mask = UINTVAL (XEXP (x, 1)) | (~nonzero_bits (sub, sub_mode));
8304 : 28807 : if ((mask & mode_mask) == mode_mask)
8305 : : {
8306 : 22652 : new_rtx = make_compound_operation (sub, next_code);
8307 : 22652 : new_rtx = make_extraction (mode, new_rtx, 0, 0, sub_width,
8308 : : true, false, in_code == COMPARE);
8309 : : }
8310 : : }
8311 : : }
8312 : :
8313 : : break;
8314 : :
8315 : 1825367 : case LSHIFTRT:
8316 : : /* If the sign bit is known to be zero, replace this with an
8317 : : arithmetic shift. */
8318 : 1825367 : if (have_insn_for (ASHIFTRT, mode)
8319 : 1825367 : && ! have_insn_for (LSHIFTRT, mode)
8320 : 0 : && mode_width <= HOST_BITS_PER_WIDE_INT
8321 : 1825367 : && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
8322 : : {
8323 : 0 : new_rtx = gen_rtx_ASHIFTRT (mode,
8324 : : make_compound_operation (XEXP (x, 0),
8325 : : next_code),
8326 : : XEXP (x, 1));
8327 : 0 : break;
8328 : : }
8329 : :
8330 : : /* fall through */
8331 : :
8332 : 4284282 : case ASHIFTRT:
8333 : 4284282 : lhs = XEXP (x, 0);
8334 : 4284282 : rhs = XEXP (x, 1);
8335 : :
8336 : : /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
8337 : : this is a SIGN_EXTRACT. */
8338 : 4284282 : if (CONST_INT_P (rhs)
8339 : 4141928 : && GET_CODE (lhs) == ASHIFT
8340 : 1199788 : && CONST_INT_P (XEXP (lhs, 1))
8341 : 1194560 : && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1))
8342 : 932985 : && INTVAL (XEXP (lhs, 1)) >= 0
8343 : 932981 : && INTVAL (rhs) < mode_width)
8344 : : {
8345 : 932981 : new_rtx = make_compound_operation (XEXP (lhs, 0), next_code);
8346 : 932981 : new_rtx = make_extraction (mode, new_rtx,
8347 : 932981 : INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
8348 : 932981 : NULL_RTX, mode_width - INTVAL (rhs),
8349 : : code == LSHIFTRT, false,
8350 : : in_code == COMPARE);
8351 : 932981 : break;
8352 : : }
8353 : :
8354 : : /* See if we have operations between an ASHIFTRT and an ASHIFT.
8355 : : If so, try to merge the shifts into a SIGN_EXTEND. We could
8356 : : also do this for some cases of SIGN_EXTRACT, but it doesn't
8357 : : seem worth the effort; the case checked for occurs on Alpha. */
8358 : :
8359 : 3351301 : if (!OBJECT_P (lhs)
8360 : 1557295 : && ! (GET_CODE (lhs) == SUBREG
8361 : 135595 : && (OBJECT_P (SUBREG_REG (lhs))))
8362 : 1488822 : && CONST_INT_P (rhs)
8363 : 1472124 : && INTVAL (rhs) >= 0
8364 : 1472124 : && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
8365 : 1467462 : && INTVAL (rhs) < mode_width
8366 : 4818763 : && (new_rtx = extract_left_shift (mode, lhs, INTVAL (rhs))) != 0)
8367 : 270770 : new_rtx = make_extraction (mode, make_compound_operation (new_rtx,
8368 : : next_code),
8369 : 270770 : 0, NULL_RTX, mode_width - INTVAL (rhs),
8370 : : code == LSHIFTRT, false, in_code == COMPARE);
8371 : :
8372 : : break;
8373 : :
8374 : 8750592 : case SUBREG:
8375 : : /* Call ourselves recursively on the inner expression. If we are
8376 : : narrowing the object and it has a different RTL code from
8377 : : what it originally did, do this SUBREG as a force_to_mode. */
8378 : 8750592 : {
8379 : 8750592 : rtx inner = SUBREG_REG (x), simplified;
8380 : 8750592 : enum rtx_code subreg_code = in_code;
8381 : :
8382 : : /* If the SUBREG is masking of a logical right shift,
8383 : : make an extraction. */
8384 : 8750592 : if (GET_CODE (inner) == LSHIFTRT
8385 : 8763243 : && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
8386 : 620178 : && GET_MODE_SIZE (mode) < GET_MODE_SIZE (inner_mode)
8387 : 302767 : && CONST_INT_P (XEXP (inner, 1))
8388 : 298288 : && UINTVAL (XEXP (inner, 1)) < GET_MODE_PRECISION (inner_mode)
8389 : 9048880 : && subreg_lowpart_p (x))
8390 : : {
8391 : 297438 : new_rtx = make_compound_operation (XEXP (inner, 0), next_code);
8392 : 297438 : int width = GET_MODE_PRECISION (inner_mode)
8393 : 297438 : - INTVAL (XEXP (inner, 1));
8394 : 297438 : if (width > mode_width)
8395 : : width = mode_width;
8396 : 297438 : new_rtx = make_extraction (mode, new_rtx, 0, XEXP (inner, 1),
8397 : : width, true, false, in_code == COMPARE);
8398 : 297438 : break;
8399 : : }
8400 : :
8401 : : /* If in_code is COMPARE, it isn't always safe to pass it through
8402 : : to the recursive make_compound_operation call. */
8403 : 8453154 : if (subreg_code == COMPARE
8404 : 8453154 : && (!subreg_lowpart_p (x)
8405 : 177603 : || GET_CODE (inner) == SUBREG
8406 : : /* (subreg:SI (and:DI (reg:DI) (const_int 0x800000000)) 0)
8407 : : is (const_int 0), rather than
8408 : : (subreg:SI (lshiftrt:DI (reg:DI) (const_int 35)) 0).
8409 : : Similarly (subreg:QI (and:SI (reg:SI) (const_int 0x80)) 0)
8410 : : for non-equality comparisons against 0 is not equivalent
8411 : : to (subreg:QI (lshiftrt:SI (reg:SI) (const_int 7)) 0). */
8412 : 177603 : || (GET_CODE (inner) == AND
8413 : 1003 : && CONST_INT_P (XEXP (inner, 1))
8414 : 321 : && partial_subreg_p (x)
8415 : 642 : && exact_log2 (UINTVAL (XEXP (inner, 1)))
8416 : 321 : >= GET_MODE_BITSIZE (mode) - 1)))
8417 : : subreg_code = SET;
8418 : :
8419 : 8453154 : tem = make_compound_operation (inner, subreg_code);
8420 : :
8421 : 8453154 : simplified
8422 : 8453154 : = simplify_subreg (mode, tem, GET_MODE (inner), SUBREG_BYTE (x));
8423 : 8453154 : if (simplified)
8424 : 17221 : tem = simplified;
8425 : :
8426 : 8453154 : if (GET_CODE (tem) != GET_CODE (inner)
8427 : 24612 : && partial_subreg_p (x)
8428 : 8475778 : && subreg_lowpart_p (x))
8429 : : {
8430 : 22598 : rtx newer
8431 : 22598 : = force_to_mode (tem, mode, HOST_WIDE_INT_M1U, false);
8432 : :
8433 : : /* If we have something other than a SUBREG, we might have
8434 : : done an expansion, so rerun ourselves. */
8435 : 22598 : if (GET_CODE (newer) != SUBREG)
8436 : 9223 : newer = make_compound_operation (newer, in_code);
8437 : :
8438 : : /* force_to_mode can expand compounds. If it just re-expanded
8439 : : the compound, use gen_lowpart to convert to the desired
8440 : : mode. */
8441 : 22598 : if (rtx_equal_p (newer, x)
8442 : : /* Likewise if it re-expanded the compound only partially.
8443 : : This happens for SUBREG of ZERO_EXTRACT if they extract
8444 : : the same number of bits. */
8445 : 22598 : || (GET_CODE (newer) == SUBREG
8446 : 13186 : && (GET_CODE (SUBREG_REG (newer)) == LSHIFTRT
8447 : 13186 : || GET_CODE (SUBREG_REG (newer)) == ASHIFTRT)
8448 : 3701 : && GET_CODE (inner) == AND
8449 : 3535 : && rtx_equal_p (SUBREG_REG (newer), XEXP (inner, 0))))
8450 : 5122 : return gen_lowpart (GET_MODE (x), tem);
8451 : :
8452 : 17476 : return newer;
8453 : : }
8454 : :
8455 : 8430556 : if (simplified)
8456 : : return tem;
8457 : : }
8458 : : break;
8459 : :
8460 : : default:
8461 : : break;
8462 : : }
8463 : :
8464 : 9485245 : if (new_rtx)
8465 : 5195312 : *x_ptr = gen_lowpart (mode, new_rtx);
8466 : 202221386 : *next_code_ptr = next_code;
8467 : 202221386 : return NULL_RTX;
8468 : : }
8469 : :
8470 : : /* Look at the expression rooted at X. Look for expressions
8471 : : equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
8472 : : Form these expressions.
8473 : :
8474 : : Return the new rtx, usually just X.
8475 : :
8476 : : Also, for machines like the VAX that don't have logical shift insns,
8477 : : try to convert logical to arithmetic shift operations in cases where
8478 : : they are equivalent. This undoes the canonicalizations to logical
8479 : : shifts done elsewhere.
8480 : :
8481 : : We try, as much as possible, to re-use rtl expressions to save memory.
8482 : :
8483 : : IN_CODE says what kind of expression we are processing. Normally, it is
8484 : : SET. In a memory address it is MEM. When processing the arguments of
8485 : : a comparison or a COMPARE against zero, it is COMPARE, or EQ if more
8486 : : precisely it is an equality comparison against zero. */
8487 : :
8488 : : rtx
8489 : 424081114 : make_compound_operation (rtx x, enum rtx_code in_code)
8490 : : {
8491 : 424081114 : enum rtx_code code = GET_CODE (x);
8492 : 424081114 : const char *fmt;
8493 : 424081114 : int i, j;
8494 : 424081114 : enum rtx_code next_code;
8495 : 424081114 : rtx new_rtx, tem;
8496 : :
8497 : : /* Select the code to be used in recursive calls. Once we are inside an
8498 : : address, we stay there. If we have a comparison, set to COMPARE,
8499 : : but once inside, go back to our default of SET. */
8500 : :
8501 : 424081114 : next_code = (code == MEM ? MEM
8502 : 399874775 : : ((code == COMPARE || COMPARISON_P (x))
8503 : 417403092 : && XEXP (x, 1) == const0_rtx) ? COMPARE
8504 : 392811409 : : in_code == COMPARE || in_code == EQ ? SET : in_code);
8505 : :
8506 : 424081114 : scalar_int_mode mode;
8507 : 424081114 : if (is_a <scalar_int_mode> (GET_MODE (x), &mode))
8508 : : {
8509 : 251541019 : rtx new_rtx = make_compound_operation_int (mode, &x, in_code,
8510 : : &next_code);
8511 : 251541019 : if (new_rtx)
8512 : : return new_rtx;
8513 : 202221386 : code = GET_CODE (x);
8514 : : }
8515 : :
8516 : : /* Now recursively process each operand of this operation. We need to
8517 : : handle ZERO_EXTEND specially so that we don't lose track of the
8518 : : inner mode. */
8519 : 374761481 : if (code == ZERO_EXTEND)
8520 : : {
8521 : 3187639 : new_rtx = make_compound_operation (XEXP (x, 0), next_code);
8522 : 6375278 : tem = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
8523 : 3187639 : new_rtx, GET_MODE (XEXP (x, 0)));
8524 : 3187639 : if (tem)
8525 : : return tem;
8526 : 3175420 : SUBST (XEXP (x, 0), new_rtx);
8527 : 3175420 : return x;
8528 : : }
8529 : :
8530 : 371573842 : fmt = GET_RTX_FORMAT (code);
8531 : 862745153 : for (i = 0; i < GET_RTX_LENGTH (code); i++)
8532 : 491171311 : if (fmt[i] == 'e')
8533 : : {
8534 : 188198833 : new_rtx = make_compound_operation (XEXP (x, i), next_code);
8535 : 188198833 : SUBST (XEXP (x, i), new_rtx);
8536 : : }
8537 : 302972478 : else if (fmt[i] == 'E')
8538 : 22089255 : for (j = 0; j < XVECLEN (x, i); j++)
8539 : : {
8540 : 16071956 : new_rtx = make_compound_operation (XVECEXP (x, i, j), next_code);
8541 : 16071956 : SUBST (XVECEXP (x, i, j), new_rtx);
8542 : : }
8543 : :
8544 : 371573842 : maybe_swap_commutative_operands (x);
8545 : 371573842 : return x;
8546 : : }
8547 : :
8548 : : /* Given M see if it is a value that would select a field of bits
8549 : : within an item, but not the entire word. Return -1 if not.
8550 : : Otherwise, return the starting position of the field, where 0 is the
8551 : : low-order bit.
8552 : :
8553 : : *PLEN is set to the length of the field. */
8554 : :
8555 : : static int
8556 : 7945 : get_pos_from_mask (unsigned HOST_WIDE_INT m, unsigned HOST_WIDE_INT *plen)
8557 : : {
8558 : : /* Get the bit number of the first 1 bit from the right, -1 if none. */
8559 : 7945 : int pos = m ? ctz_hwi (m) : -1;
8560 : 7945 : int len = 0;
8561 : :
8562 : 7945 : if (pos >= 0)
8563 : : /* Now shift off the low-order zero bits and see if we have a
8564 : : power of two minus 1. */
8565 : 7945 : len = exact_log2 ((m >> pos) + 1);
8566 : :
8567 : 6566 : if (len <= 0)
8568 : : pos = -1;
8569 : :
8570 : 7945 : *plen = len;
8571 : 7945 : return pos;
8572 : : }
8573 : :
8574 : : /* If X refers to a register that equals REG in value, replace these
8575 : : references with REG. */
8576 : : static rtx
8577 : 12596 : canon_reg_for_combine (rtx x, rtx reg)
8578 : : {
8579 : 12596 : rtx op0, op1, op2;
8580 : 12596 : const char *fmt;
8581 : 12596 : int i;
8582 : 12596 : bool copied;
8583 : :
8584 : 12596 : enum rtx_code code = GET_CODE (x);
8585 : 12596 : switch (GET_RTX_CLASS (code))
8586 : : {
8587 : 0 : case RTX_UNARY:
8588 : 0 : op0 = canon_reg_for_combine (XEXP (x, 0), reg);
8589 : 0 : if (op0 != XEXP (x, 0))
8590 : 0 : return simplify_gen_unary (GET_CODE (x), GET_MODE (x), op0,
8591 : 0 : GET_MODE (reg));
8592 : : break;
8593 : :
8594 : 2951 : case RTX_BIN_ARITH:
8595 : 2951 : case RTX_COMM_ARITH:
8596 : 2951 : op0 = canon_reg_for_combine (XEXP (x, 0), reg);
8597 : 2951 : op1 = canon_reg_for_combine (XEXP (x, 1), reg);
8598 : 2951 : if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
8599 : 0 : return simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
8600 : : break;
8601 : :
8602 : 13 : case RTX_COMPARE:
8603 : 13 : case RTX_COMM_COMPARE:
8604 : 13 : op0 = canon_reg_for_combine (XEXP (x, 0), reg);
8605 : 13 : op1 = canon_reg_for_combine (XEXP (x, 1), reg);
8606 : 13 : if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
8607 : 0 : return simplify_gen_relational (GET_CODE (x), GET_MODE (x),
8608 : 0 : GET_MODE (op0), op0, op1);
8609 : : break;
8610 : :
8611 : 0 : case RTX_TERNARY:
8612 : 0 : case RTX_BITFIELD_OPS:
8613 : 0 : op0 = canon_reg_for_combine (XEXP (x, 0), reg);
8614 : 0 : op1 = canon_reg_for_combine (XEXP (x, 1), reg);
8615 : 0 : op2 = canon_reg_for_combine (XEXP (x, 2), reg);
8616 : 0 : if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1) || op2 != XEXP (x, 2))
8617 : 0 : return simplify_gen_ternary (GET_CODE (x), GET_MODE (x),
8618 : 0 : GET_MODE (op0), op0, op1, op2);
8619 : : /* FALLTHRU */
8620 : :
8621 : 5950 : case RTX_OBJ:
8622 : 5950 : if (REG_P (x))
8623 : : {
8624 : 5944 : if (rtx_equal_p (get_last_value (reg), x)
8625 : 5944 : || rtx_equal_p (reg, get_last_value (x)))
8626 : 0 : return reg;
8627 : : else
8628 : : break;
8629 : : }
8630 : :
8631 : : /* fall through */
8632 : :
8633 : 3688 : default:
8634 : 3688 : fmt = GET_RTX_FORMAT (code);
8635 : 3688 : copied = false;
8636 : 7588 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8637 : 3900 : if (fmt[i] == 'e')
8638 : : {
8639 : 212 : rtx op = canon_reg_for_combine (XEXP (x, i), reg);
8640 : 212 : if (op != XEXP (x, i))
8641 : : {
8642 : 0 : if (!copied)
8643 : : {
8644 : 0 : copied = true;
8645 : 0 : x = copy_rtx (x);
8646 : : }
8647 : 0 : XEXP (x, i) = op;
8648 : : }
8649 : : }
8650 : 3688 : else if (fmt[i] == 'E')
8651 : : {
8652 : : int j;
8653 : 0 : for (j = 0; j < XVECLEN (x, i); j++)
8654 : : {
8655 : 0 : rtx op = canon_reg_for_combine (XVECEXP (x, i, j), reg);
8656 : 0 : if (op != XVECEXP (x, i, j))
8657 : : {
8658 : 0 : if (!copied)
8659 : : {
8660 : 0 : copied = true;
8661 : 0 : x = copy_rtx (x);
8662 : : }
8663 : 0 : XVECEXP (x, i, j) = op;
8664 : : }
8665 : : }
8666 : : }
8667 : :
8668 : : break;
8669 : : }
8670 : :
8671 : : return x;
8672 : : }
8673 : :
8674 : : /* Return X converted to MODE. If the value is already truncated to
8675 : : MODE we can just return a subreg even though in the general case we
8676 : : would need an explicit truncation. */
8677 : :
8678 : : static rtx
8679 : 111779087 : gen_lowpart_or_truncate (machine_mode mode, rtx x)
8680 : : {
8681 : 111779087 : if (!CONST_INT_P (x)
8682 : 106132922 : && partial_subreg_p (mode, GET_MODE (x))
8683 : 111779087 : && !TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x))
8684 : 111779087 : && !(REG_P (x) && reg_truncated_to_mode (mode, x)))
8685 : : {
8686 : : /* Bit-cast X into an integer mode. */
8687 : 0 : if (!SCALAR_INT_MODE_P (GET_MODE (x)))
8688 : 0 : x = gen_lowpart (int_mode_for_mode (GET_MODE (x)).require (), x);
8689 : 0 : x = simplify_gen_unary (TRUNCATE, int_mode_for_mode (mode).require (),
8690 : 0 : x, GET_MODE (x));
8691 : : }
8692 : :
8693 : 111779087 : return gen_lowpart (mode, x);
8694 : : }
8695 : :
8696 : : /* See if X can be simplified knowing that we will only refer to it in
8697 : : MODE and will only refer to those bits that are nonzero in MASK.
8698 : : If other bits are being computed or if masking operations are done
8699 : : that select a superset of the bits in MASK, they can sometimes be
8700 : : ignored.
8701 : :
8702 : : Return a possibly simplified expression, but always convert X to
8703 : : MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
8704 : :
8705 : : If JUST_SELECT is true, don't optimize by noticing that bits in MASK
8706 : : are all off in X. This is used when X will be complemented, by either
8707 : : NOT, NEG, or XOR. */
8708 : :
8709 : : static rtx
8710 : 83449971 : force_to_mode (rtx x, machine_mode mode, unsigned HOST_WIDE_INT mask,
8711 : : bool just_select)
8712 : : {
8713 : 83449971 : enum rtx_code code = GET_CODE (x);
8714 : 83449971 : bool next_select = just_select || code == XOR || code == NOT || code == NEG;
8715 : 83449971 : machine_mode op_mode;
8716 : 83449971 : unsigned HOST_WIDE_INT nonzero;
8717 : :
8718 : : /* If this is a CALL or ASM_OPERANDS, don't do anything. Some of the
8719 : : code below will do the wrong thing since the mode of such an
8720 : : expression is VOIDmode.
8721 : :
8722 : : Also do nothing if X is a CLOBBER; this can happen if X was
8723 : : the return value from a call to gen_lowpart. */
8724 : 83449971 : if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
8725 : : return x;
8726 : :
8727 : : /* We want to perform the operation in its present mode unless we know
8728 : : that the operation is valid in MODE, in which case we do the operation
8729 : : in MODE. */
8730 : 166737446 : op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
8731 : 77258324 : && have_insn_for (code, mode))
8732 : 130041569 : ? mode : GET_MODE (x));
8733 : :
8734 : : /* It is not valid to do a right-shift in a narrower mode
8735 : : than the one it came in with. */
8736 : 83368723 : if ((code == LSHIFTRT || code == ASHIFTRT)
8737 : 83368723 : && partial_subreg_p (mode, GET_MODE (x)))
8738 : 435004 : op_mode = GET_MODE (x);
8739 : :
8740 : : /* Truncate MASK to fit OP_MODE. */
8741 : 83368723 : if (op_mode)
8742 : 77287568 : mask &= GET_MODE_MASK (op_mode);
8743 : :
8744 : : /* Determine what bits of X are guaranteed to be (non)zero. */
8745 : 83368723 : nonzero = nonzero_bits (x, mode);
8746 : :
8747 : : /* If none of the bits in X are needed, return a zero. */
8748 : 83368723 : if (!just_select && (nonzero & mask) == 0 && !side_effects_p (x))
8749 : 528696 : x = const0_rtx;
8750 : :
8751 : : /* If X is a CONST_INT, return a new one. Do this here since the
8752 : : test below will fail. */
8753 : 83368723 : if (CONST_INT_P (x))
8754 : : {
8755 : 6166084 : if (SCALAR_INT_MODE_P (mode))
8756 : 6166084 : return gen_int_mode (INTVAL (x) & mask, mode);
8757 : : else
8758 : : {
8759 : 0 : x = GEN_INT (INTVAL (x) & mask);
8760 : 0 : return gen_lowpart_common (mode, x);
8761 : : }
8762 : : }
8763 : :
8764 : : /* If X is narrower than MODE and we want all the bits in X's mode, just
8765 : : get X in the proper mode. */
8766 : 77202639 : if (paradoxical_subreg_p (mode, GET_MODE (x))
8767 : 77202639 : && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
8768 : 2776034 : return gen_lowpart (mode, x);
8769 : :
8770 : : /* We can ignore the effect of a SUBREG if it narrows the mode or
8771 : : if the constant masks to zero all the bits the mode doesn't have. */
8772 : 74426605 : if (GET_CODE (x) == SUBREG
8773 : 6596522 : && subreg_lowpart_p (x)
8774 : 80862889 : && (partial_subreg_p (x)
8775 : 4519645 : || (mask
8776 : 4519645 : & GET_MODE_MASK (GET_MODE (x))
8777 : 4519645 : & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))) == 0))
8778 : 6413240 : return force_to_mode (SUBREG_REG (x), mode, mask, next_select);
8779 : :
8780 : 68013365 : scalar_int_mode int_mode, xmode;
8781 : 68013365 : if (is_a <scalar_int_mode> (mode, &int_mode)
8782 : 68013365 : && is_a <scalar_int_mode> (GET_MODE (x), &xmode))
8783 : : /* OP_MODE is either MODE or XMODE, so it must be a scalar
8784 : : integer too. */
8785 : 67984514 : return force_int_to_mode (x, int_mode, xmode,
8786 : : as_a <scalar_int_mode> (op_mode),
8787 : 67984514 : mask, just_select);
8788 : :
8789 : 28851 : return gen_lowpart_or_truncate (mode, x);
8790 : : }
8791 : :
8792 : : /* Subroutine of force_to_mode that handles cases in which both X and
8793 : : the result are scalar integers. MODE is the mode of the result,
8794 : : XMODE is the mode of X, and OP_MODE says which of MODE or XMODE
8795 : : is preferred for simplified versions of X. The other arguments
8796 : : are as for force_to_mode. */
8797 : :
8798 : : static rtx
8799 : 67984514 : force_int_to_mode (rtx x, scalar_int_mode mode, scalar_int_mode xmode,
8800 : : scalar_int_mode op_mode, unsigned HOST_WIDE_INT mask,
8801 : : bool just_select)
8802 : : {
8803 : 67984514 : enum rtx_code code = GET_CODE (x);
8804 : 67984514 : bool next_select = just_select || code == XOR || code == NOT || code == NEG;
8805 : 67984514 : unsigned HOST_WIDE_INT fuller_mask;
8806 : 67984514 : rtx op0, op1, temp;
8807 : 67984514 : poly_int64 const_op0;
8808 : :
8809 : : /* When we have an arithmetic operation, or a shift whose count we
8810 : : do not know, we need to assume that all bits up to the highest-order
8811 : : bit in MASK will be needed. This is how we form such a mask. */
8812 : 67984514 : if (mask & (HOST_WIDE_INT_1U << (HOST_BITS_PER_WIDE_INT - 1)))
8813 : : fuller_mask = HOST_WIDE_INT_M1U;
8814 : : else
8815 : 75362688 : fuller_mask = ((HOST_WIDE_INT_1U << (floor_log2 (mask) + 1)) - 1);
8816 : :
8817 : 67984514 : switch (code)
8818 : : {
8819 : : case CLOBBER:
8820 : : /* If X is a (clobber (const_int)), return it since we know we are
8821 : : generating something that won't match. */
8822 : : return x;
8823 : :
8824 : 348173 : case SIGN_EXTEND:
8825 : 348173 : case ZERO_EXTEND:
8826 : 348173 : case ZERO_EXTRACT:
8827 : 348173 : case SIGN_EXTRACT:
8828 : 348173 : x = expand_compound_operation (x);
8829 : 348173 : if (GET_CODE (x) != code)
8830 : 212925 : return force_to_mode (x, mode, mask, next_select);
8831 : : break;
8832 : :
8833 : 141 : case TRUNCATE:
8834 : : /* Similarly for a truncate. */
8835 : 141 : return force_to_mode (XEXP (x, 0), mode, mask, next_select);
8836 : :
8837 : 3552748 : case AND:
8838 : : /* If this is an AND with a constant, convert it into an AND
8839 : : whose constant is the AND of that constant with MASK. If it
8840 : : remains an AND of MASK, delete it since it is redundant. */
8841 : :
8842 : 3552748 : if (CONST_INT_P (XEXP (x, 1)))
8843 : : {
8844 : 5700434 : x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
8845 : 2850217 : mask & INTVAL (XEXP (x, 1)));
8846 : 2850217 : xmode = op_mode;
8847 : :
8848 : : /* If X is still an AND, see if it is an AND with a mask that
8849 : : is just some low-order bits. If so, and it is MASK, we don't
8850 : : need it. */
8851 : :
8852 : 2800447 : if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1))
8853 : 5650664 : && (INTVAL (XEXP (x, 1)) & GET_MODE_MASK (xmode)) == mask)
8854 : 74731 : x = XEXP (x, 0);
8855 : :
8856 : : /* If it remains an AND, try making another AND with the bits
8857 : : in the mode mask that aren't in MASK turned on. If the
8858 : : constant in the AND is wide enough, this might make a
8859 : : cheaper constant. */
8860 : :
8861 : 2725718 : if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1))
8862 : 2725716 : && GET_MODE_MASK (xmode) != mask
8863 : 2932558 : && HWI_COMPUTABLE_MODE_P (xmode))
8864 : : {
8865 : 82341 : unsigned HOST_WIDE_INT cval
8866 : 82341 : = UINTVAL (XEXP (x, 1)) | (GET_MODE_MASK (xmode) & ~mask);
8867 : 82341 : rtx y;
8868 : :
8869 : 82341 : y = simplify_gen_binary (AND, xmode, XEXP (x, 0),
8870 : 82341 : gen_int_mode (cval, xmode));
8871 : 82341 : if (set_src_cost (y, xmode, optimize_this_for_speed_p)
8872 : 82341 : < set_src_cost (x, xmode, optimize_this_for_speed_p))
8873 : 67685691 : x = y;
8874 : : }
8875 : :
8876 : : break;
8877 : : }
8878 : :
8879 : 702531 : goto binop;
8880 : :
8881 : 9508160 : case PLUS:
8882 : : /* In (and (plus FOO C1) M), if M is a mask that just turns off
8883 : : low-order bits (as in an alignment operation) and FOO is already
8884 : : aligned to that boundary, mask C1 to that boundary as well.
8885 : : This may eliminate that PLUS and, later, the AND. */
8886 : :
8887 : 9508160 : {
8888 : 9508160 : unsigned int width = GET_MODE_PRECISION (mode);
8889 : 9508160 : unsigned HOST_WIDE_INT smask = mask;
8890 : :
8891 : : /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
8892 : : number, sign extend it. */
8893 : :
8894 : 9508160 : if (width < HOST_BITS_PER_WIDE_INT
8895 : 3097782 : && (smask & (HOST_WIDE_INT_1U << (width - 1))) != 0)
8896 : 2766170 : smask |= HOST_WIDE_INT_M1U << width;
8897 : :
8898 : 9508160 : if (CONST_INT_P (XEXP (x, 1))
8899 : 3585303 : && pow2p_hwi (- smask)
8900 : 3055929 : && (nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0
8901 : 12126826 : && (INTVAL (XEXP (x, 1)) & ~smask) != 0)
8902 : 12510 : return force_to_mode (plus_constant (xmode, XEXP (x, 0),
8903 : 12510 : (INTVAL (XEXP (x, 1)) & smask)),
8904 : : mode, smask, next_select);
8905 : : }
8906 : :
8907 : : /* fall through */
8908 : :
8909 : 11327451 : case MULT:
8910 : : /* Substituting into the operands of a widening MULT is not likely to
8911 : : create RTL matching a machine insn. */
8912 : 11327451 : if (code == MULT
8913 : 1831801 : && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
8914 : 1831801 : || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
8915 : 89305 : && (GET_CODE (XEXP (x, 1)) == ZERO_EXTEND
8916 : 89305 : || GET_CODE (XEXP (x, 1)) == SIGN_EXTEND)
8917 : 38539 : && REG_P (XEXP (XEXP (x, 0), 0))
8918 : 29546 : && REG_P (XEXP (XEXP (x, 1), 0)))
8919 : 21364 : return gen_lowpart_or_truncate (mode, x);
8920 : :
8921 : : /* For PLUS, MINUS and MULT, we need any bits less significant than the
8922 : : most significant bit in MASK since carries from those bits will
8923 : : affect the bits we are interested in. */
8924 : 11306087 : mask = fuller_mask;
8925 : 11306087 : goto binop;
8926 : :
8927 : 2298029 : case MINUS:
8928 : : /* If X is (minus C Y) where C's least set bit is larger than any bit
8929 : : in the mask, then we may replace with (neg Y). */
8930 : 2298029 : if (poly_int_rtx_p (XEXP (x, 0), &const_op0)
8931 : 168883 : && known_alignment (poly_uint64 (const_op0)) > mask)
8932 : : {
8933 : 44 : x = simplify_gen_unary (NEG, xmode, XEXP (x, 1), xmode);
8934 : 44 : return force_to_mode (x, mode, mask, next_select);
8935 : : }
8936 : :
8937 : : /* Similarly, if C contains every bit in the fuller_mask, then we may
8938 : : replace with (not Y). */
8939 : 2297985 : if (CONST_INT_P (XEXP (x, 0))
8940 : 168839 : && ((UINTVAL (XEXP (x, 0)) | fuller_mask) == UINTVAL (XEXP (x, 0))))
8941 : : {
8942 : 502 : x = simplify_gen_unary (NOT, xmode, XEXP (x, 1), xmode);
8943 : 502 : return force_to_mode (x, mode, mask, next_select);
8944 : : }
8945 : :
8946 : 2297483 : mask = fuller_mask;
8947 : 2297483 : goto binop;
8948 : :
8949 : 2520905 : case IOR:
8950 : 2520905 : case XOR:
8951 : : /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
8952 : : LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
8953 : : operation which may be a bitfield extraction. Ensure that the
8954 : : constant we form is not wider than the mode of X. */
8955 : :
8956 : 2520905 : if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
8957 : 65913 : && CONST_INT_P (XEXP (XEXP (x, 0), 1))
8958 : 55920 : && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
8959 : 55920 : && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
8960 : 55920 : && CONST_INT_P (XEXP (x, 1))
8961 : 4846 : && ((INTVAL (XEXP (XEXP (x, 0), 1))
8962 : 9692 : + floor_log2 (INTVAL (XEXP (x, 1))))
8963 : 4846 : < GET_MODE_PRECISION (xmode))
8964 : 2520905 : && (UINTVAL (XEXP (x, 1))
8965 : 2889 : & ~nonzero_bits (XEXP (x, 0), xmode)) == 0)
8966 : : {
8967 : 5056 : temp = gen_int_mode ((INTVAL (XEXP (x, 1)) & mask)
8968 : 2528 : << INTVAL (XEXP (XEXP (x, 0), 1)),
8969 : : xmode);
8970 : 5056 : temp = simplify_gen_binary (GET_CODE (x), xmode,
8971 : 2528 : XEXP (XEXP (x, 0), 0), temp);
8972 : 5056 : x = simplify_gen_binary (LSHIFTRT, xmode, temp,
8973 : 2528 : XEXP (XEXP (x, 0), 1));
8974 : 2528 : return force_to_mode (x, mode, mask, next_select);
8975 : : }
8976 : :
8977 : 16824478 : binop:
8978 : : /* For most binary operations, just propagate into the operation and
8979 : : change the mode if we have an operation of that mode. */
8980 : :
8981 : 16824478 : op0 = force_to_mode (XEXP (x, 0), mode, mask, next_select);
8982 : 16824478 : op1 = force_to_mode (XEXP (x, 1), mode, mask, next_select);
8983 : :
8984 : : /* If we ended up truncating both operands, truncate the result of the
8985 : : operation instead. */
8986 : 16824478 : if (GET_CODE (op0) == TRUNCATE
8987 : 0 : && GET_CODE (op1) == TRUNCATE)
8988 : : {
8989 : 0 : op0 = XEXP (op0, 0);
8990 : 0 : op1 = XEXP (op1, 0);
8991 : : }
8992 : :
8993 : 16824478 : op0 = gen_lowpart_or_truncate (op_mode, op0);
8994 : 16824478 : op1 = gen_lowpart_or_truncate (op_mode, op1);
8995 : :
8996 : 16824478 : if (op_mode != xmode || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
8997 : : {
8998 : 2251027 : x = simplify_gen_binary (code, op_mode, op0, op1);
8999 : 2251027 : xmode = op_mode;
9000 : : }
9001 : : break;
9002 : :
9003 : 4091980 : case ASHIFT:
9004 : : /* For left shifts, do the same, but just for the first operand.
9005 : : However, we cannot do anything with shifts where we cannot
9006 : : guarantee that the counts are smaller than the size of the mode
9007 : : because such a count will have a different meaning in a
9008 : : wider mode. */
9009 : :
9010 : 3893308 : if (! (CONST_INT_P (XEXP (x, 1))
9011 : 3893333 : && INTVAL (XEXP (x, 1)) >= 0
9012 : 3893308 : && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (mode))
9013 : 4150105 : && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
9014 : 198647 : && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
9015 : 198647 : < (unsigned HOST_WIDE_INT) GET_MODE_PRECISION (mode))))
9016 : : break;
9017 : :
9018 : : /* If the shift count is a constant and we can do arithmetic in
9019 : : the mode of the shift, refine which bits we need. Otherwise, use the
9020 : : conservative form of the mask. */
9021 : 3906054 : if (CONST_INT_P (XEXP (x, 1))
9022 : 3835208 : && INTVAL (XEXP (x, 1)) >= 0
9023 : 3835208 : && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (op_mode)
9024 : 7741262 : && HWI_COMPUTABLE_MODE_P (op_mode))
9025 : 3832212 : mask >>= INTVAL (XEXP (x, 1));
9026 : : else
9027 : : mask = fuller_mask;
9028 : :
9029 : 3906054 : op0 = gen_lowpart_or_truncate (op_mode,
9030 : : force_to_mode (XEXP (x, 0), mode,
9031 : : mask, next_select));
9032 : :
9033 : 3906054 : if (op_mode != xmode || op0 != XEXP (x, 0))
9034 : : {
9035 : 881907 : x = simplify_gen_binary (code, op_mode, op0, XEXP (x, 1));
9036 : 881907 : xmode = op_mode;
9037 : : }
9038 : : break;
9039 : :
9040 : 3001291 : case LSHIFTRT:
9041 : : /* Here we can only do something if the shift count is a constant,
9042 : : this shift constant is valid for the host, and we can do arithmetic
9043 : : in OP_MODE. */
9044 : :
9045 : 3001291 : if (CONST_INT_P (XEXP (x, 1))
9046 : 2899860 : && INTVAL (XEXP (x, 1)) >= 0
9047 : 2899859 : && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
9048 : 5901136 : && HWI_COMPUTABLE_MODE_P (op_mode))
9049 : : {
9050 : 2895756 : rtx inner = XEXP (x, 0);
9051 : 2895756 : unsigned HOST_WIDE_INT inner_mask;
9052 : :
9053 : : /* Select the mask of the bits we need for the shift operand. */
9054 : 2895756 : inner_mask = mask << INTVAL (XEXP (x, 1));
9055 : :
9056 : : /* We can only change the mode of the shift if we can do arithmetic
9057 : : in the mode of the shift and INNER_MASK is no wider than the
9058 : : width of X's mode. */
9059 : 2895756 : if ((inner_mask & ~GET_MODE_MASK (xmode)) != 0)
9060 : 316377 : op_mode = xmode;
9061 : :
9062 : 2895756 : inner = force_to_mode (inner, op_mode, inner_mask, next_select);
9063 : :
9064 : 2895756 : if (xmode != op_mode || inner != XEXP (x, 0))
9065 : : {
9066 : 667890 : x = simplify_gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
9067 : 667890 : xmode = op_mode;
9068 : : }
9069 : : }
9070 : :
9071 : : /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
9072 : : shift and AND produces only copies of the sign bit (C2 is one less
9073 : : than a power of two), we can do this with just a shift. */
9074 : :
9075 : 3001291 : if (GET_CODE (x) == LSHIFTRT
9076 : 3001216 : && CONST_INT_P (XEXP (x, 1))
9077 : : /* The shift puts one of the sign bit copies in the least significant
9078 : : bit. */
9079 : 5799570 : && ((INTVAL (XEXP (x, 1))
9080 : 2899785 : + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
9081 : 2899785 : >= GET_MODE_PRECISION (xmode))
9082 : 240073 : && pow2p_hwi (mask + 1)
9083 : : /* Number of bits left after the shift must be more than the mask
9084 : : needs. */
9085 : 70119 : && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1))
9086 : 70119 : <= GET_MODE_PRECISION (xmode))
9087 : : /* Must be more sign bit copies than the mask needs. */
9088 : 3026953 : && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
9089 : 25662 : >= exact_log2 (mask + 1)))
9090 : : {
9091 : 25662 : int nbits = GET_MODE_PRECISION (xmode) - exact_log2 (mask + 1);
9092 : 25662 : x = simplify_gen_binary (LSHIFTRT, xmode, XEXP (x, 0),
9093 : 25662 : gen_int_shift_amount (xmode, nbits));
9094 : : }
9095 : 3001291 : goto shiftrt;
9096 : :
9097 : 1958418 : case ASHIFTRT:
9098 : : /* If we are just looking for the sign bit, we don't need this shift at
9099 : : all, even if it has a variable count. */
9100 : 1958418 : if (val_signbit_p (xmode, mask))
9101 : 673 : return force_to_mode (XEXP (x, 0), mode, mask, next_select);
9102 : :
9103 : : /* If this is a shift by a constant, get a mask that contains those bits
9104 : : that are not copies of the sign bit. We then have two cases: If
9105 : : MASK only includes those bits, this can be a logical shift, which may
9106 : : allow simplifications. If MASK is a single-bit field not within
9107 : : those bits, we are requesting a copy of the sign bit and hence can
9108 : : shift the sign bit to the appropriate location. */
9109 : :
9110 : 1957745 : if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0
9111 : 1920633 : && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
9112 : : {
9113 : 1920546 : unsigned HOST_WIDE_INT nonzero;
9114 : 1920546 : int i;
9115 : :
9116 : : /* If the considered data is wider than HOST_WIDE_INT, we can't
9117 : : represent a mask for all its bits in a single scalar.
9118 : : But we only care about the lower bits, so calculate these. */
9119 : :
9120 : 1920546 : if (GET_MODE_PRECISION (xmode) > HOST_BITS_PER_WIDE_INT)
9121 : : {
9122 : 399 : nonzero = HOST_WIDE_INT_M1U;
9123 : :
9124 : : /* GET_MODE_PRECISION (GET_MODE (x)) - INTVAL (XEXP (x, 1))
9125 : : is the number of bits a full-width mask would have set.
9126 : : We need only shift if these are fewer than nonzero can
9127 : : hold. If not, we must keep all bits set in nonzero. */
9128 : :
9129 : 399 : if (GET_MODE_PRECISION (xmode) - INTVAL (XEXP (x, 1))
9130 : : < HOST_BITS_PER_WIDE_INT)
9131 : 0 : nonzero >>= INTVAL (XEXP (x, 1))
9132 : 0 : + HOST_BITS_PER_WIDE_INT
9133 : 0 : - GET_MODE_PRECISION (xmode);
9134 : : }
9135 : : else
9136 : : {
9137 : 1920147 : nonzero = GET_MODE_MASK (xmode);
9138 : 1920147 : nonzero >>= INTVAL (XEXP (x, 1));
9139 : : }
9140 : :
9141 : 1920546 : if ((mask & ~nonzero) == 0)
9142 : : {
9143 : 42254 : x = simplify_shift_const (NULL_RTX, LSHIFTRT, xmode,
9144 : : XEXP (x, 0), INTVAL (XEXP (x, 1)));
9145 : 42254 : if (GET_CODE (x) != ASHIFTRT)
9146 : 42254 : return force_to_mode (x, mode, mask, next_select);
9147 : : }
9148 : :
9149 : 1878292 : else if ((i = exact_log2 (mask)) >= 0)
9150 : : {
9151 : 72 : x = simplify_shift_const
9152 : 144 : (NULL_RTX, LSHIFTRT, xmode, XEXP (x, 0),
9153 : 72 : GET_MODE_PRECISION (xmode) - 1 - i);
9154 : :
9155 : 72 : if (GET_CODE (x) != ASHIFTRT)
9156 : 72 : return force_to_mode (x, mode, mask, next_select);
9157 : : }
9158 : : }
9159 : :
9160 : : /* If MASK is 1, convert this to an LSHIFTRT. This can be done
9161 : : even if the shift count isn't a constant. */
9162 : 1915419 : if (mask == 1)
9163 : 3106 : x = simplify_gen_binary (LSHIFTRT, xmode, XEXP (x, 0), XEXP (x, 1));
9164 : :
9165 : 1912313 : shiftrt:
9166 : :
9167 : : /* If this is a zero- or sign-extension operation that just affects bits
9168 : : we don't care about, remove it. Be sure the call above returned
9169 : : something that is still a shift. */
9170 : :
9171 : 4916710 : if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
9172 : 4916635 : && CONST_INT_P (XEXP (x, 1))
9173 : 4778092 : && INTVAL (XEXP (x, 1)) >= 0
9174 : 4778091 : && (INTVAL (XEXP (x, 1))
9175 : 9556182 : <= GET_MODE_PRECISION (xmode) - (floor_log2 (mask) + 1))
9176 : 1636698 : && GET_CODE (XEXP (x, 0)) == ASHIFT
9177 : 4917682 : && XEXP (XEXP (x, 0), 1) == XEXP (x, 1))
9178 : 764 : return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask, next_select);
9179 : :
9180 : : break;
9181 : :
9182 : 36096 : case ROTATE:
9183 : 36096 : case ROTATERT:
9184 : : /* If the shift count is constant and we can do computations
9185 : : in the mode of X, compute where the bits we care about are.
9186 : : Otherwise, we can't do anything. Don't change the mode of
9187 : : the shift or propagate MODE into the shift, though. */
9188 : 36096 : if (CONST_INT_P (XEXP (x, 1))
9189 : 27684 : && INTVAL (XEXP (x, 1)) >= 0)
9190 : : {
9191 : 27682 : temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
9192 : 27682 : xmode, gen_int_mode (mask, xmode),
9193 : : XEXP (x, 1));
9194 : 27682 : if (temp && CONST_INT_P (temp))
9195 : 27682 : x = simplify_gen_binary (code, xmode,
9196 : : force_to_mode (XEXP (x, 0), xmode,
9197 : 27682 : INTVAL (temp), next_select),
9198 : : XEXP (x, 1));
9199 : : }
9200 : : break;
9201 : :
9202 : 148802 : case NEG:
9203 : : /* If we just want the low-order bit, the NEG isn't needed since it
9204 : : won't change the low-order bit. */
9205 : 148802 : if (mask == 1)
9206 : 386 : return force_to_mode (XEXP (x, 0), mode, mask, just_select);
9207 : :
9208 : : /* We need any bits less significant than the most significant bit in
9209 : : MASK since carries from those bits will affect the bits we are
9210 : : interested in. */
9211 : 148416 : mask = fuller_mask;
9212 : 148416 : goto unop;
9213 : :
9214 : 379499 : case NOT:
9215 : : /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
9216 : : same as the XOR case above. Ensure that the constant we form is not
9217 : : wider than the mode of X. */
9218 : :
9219 : 379499 : if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
9220 : 9830 : && CONST_INT_P (XEXP (XEXP (x, 0), 1))
9221 : 9255 : && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
9222 : 18510 : && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
9223 : 9255 : < GET_MODE_PRECISION (xmode))
9224 : 384159 : && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
9225 : : {
9226 : 4660 : temp = gen_int_mode (mask << INTVAL (XEXP (XEXP (x, 0), 1)), xmode);
9227 : 4660 : temp = simplify_gen_binary (XOR, xmode, XEXP (XEXP (x, 0), 0), temp);
9228 : 9320 : x = simplify_gen_binary (LSHIFTRT, xmode,
9229 : 4660 : temp, XEXP (XEXP (x, 0), 1));
9230 : :
9231 : 4660 : return force_to_mode (x, mode, mask, next_select);
9232 : : }
9233 : :
9234 : : /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
9235 : : use the full mask inside the NOT. */
9236 : : mask = fuller_mask;
9237 : :
9238 : 523255 : unop:
9239 : 523255 : op0 = gen_lowpart_or_truncate (op_mode,
9240 : : force_to_mode (XEXP (x, 0), mode, mask,
9241 : : next_select));
9242 : 523255 : if (op_mode != xmode || op0 != XEXP (x, 0))
9243 : : {
9244 : 53759 : x = simplify_gen_unary (code, op_mode, op0, op_mode);
9245 : 53759 : xmode = op_mode;
9246 : : }
9247 : : break;
9248 : :
9249 : 456529 : case NE:
9250 : : /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
9251 : : in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
9252 : : which is equal to STORE_FLAG_VALUE. */
9253 : 456529 : if ((mask & ~STORE_FLAG_VALUE) == 0
9254 : 3443 : && XEXP (x, 1) == const0_rtx
9255 : 3424 : && GET_MODE (XEXP (x, 0)) == mode
9256 : 9 : && pow2p_hwi (nonzero_bits (XEXP (x, 0), mode))
9257 : 456529 : && (nonzero_bits (XEXP (x, 0), mode)
9258 : : == (unsigned HOST_WIDE_INT) STORE_FLAG_VALUE))
9259 : 0 : return force_to_mode (XEXP (x, 0), mode, mask, next_select);
9260 : :
9261 : : break;
9262 : :
9263 : 1160461 : case IF_THEN_ELSE:
9264 : : /* We have no way of knowing if the IF_THEN_ELSE can itself be
9265 : : written in a narrower mode. We play it safe and do not do so. */
9266 : :
9267 : 1160461 : op0 = gen_lowpart_or_truncate (xmode,
9268 : : force_to_mode (XEXP (x, 1), mode,
9269 : : mask, next_select));
9270 : 1160461 : op1 = gen_lowpart_or_truncate (xmode,
9271 : : force_to_mode (XEXP (x, 2), mode,
9272 : : mask, next_select));
9273 : 1160461 : if (op0 != XEXP (x, 1) || op1 != XEXP (x, 2))
9274 : 172001 : x = simplify_gen_ternary (IF_THEN_ELSE, xmode,
9275 : 172001 : GET_MODE (XEXP (x, 0)), XEXP (x, 0),
9276 : : op0, op1);
9277 : : break;
9278 : :
9279 : : default:
9280 : : break;
9281 : : }
9282 : :
9283 : : /* Ensure we return a value of the proper mode. */
9284 : 67685691 : return gen_lowpart_or_truncate (mode, x);
9285 : : }
9286 : :
9287 : : /* Return nonzero if X is an expression that has one of two values depending on
9288 : : whether some other value is zero or nonzero. In that case, we return the
9289 : : value that is being tested, *PTRUE is set to the value if the rtx being
9290 : : returned has a nonzero value, and *PFALSE is set to the other alternative.
9291 : :
9292 : : If we return zero, we set *PTRUE and *PFALSE to X. */
9293 : :
9294 : : static rtx
9295 : 219925768 : if_then_else_cond (rtx x, rtx *ptrue, rtx *pfalse)
9296 : : {
9297 : 219925768 : machine_mode mode = GET_MODE (x);
9298 : 219925768 : enum rtx_code code = GET_CODE (x);
9299 : 219925768 : rtx cond0, cond1, true0, true1, false0, false1;
9300 : 219925768 : unsigned HOST_WIDE_INT nz;
9301 : 219925768 : scalar_int_mode int_mode;
9302 : :
9303 : : /* If we are comparing a value against zero, we are done. */
9304 : 219925768 : if ((code == NE || code == EQ)
9305 : 2286903 : && XEXP (x, 1) == const0_rtx)
9306 : : {
9307 : 1465770 : *ptrue = (code == NE) ? const_true_rtx : const0_rtx;
9308 : 1465770 : *pfalse = (code == NE) ? const0_rtx : const_true_rtx;
9309 : 1465770 : return XEXP (x, 0);
9310 : : }
9311 : :
9312 : : /* If this is a unary operation whose operand has one of two values, apply
9313 : : our opcode to compute those values. */
9314 : 218459998 : else if (UNARY_P (x)
9315 : 218459998 : && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
9316 : : {
9317 : 381369 : *ptrue = simplify_gen_unary (code, mode, true0, GET_MODE (XEXP (x, 0)));
9318 : 762738 : *pfalse = simplify_gen_unary (code, mode, false0,
9319 : 381369 : GET_MODE (XEXP (x, 0)));
9320 : 381369 : return cond0;
9321 : : }
9322 : :
9323 : : /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
9324 : : make can't possibly match and would suppress other optimizations. */
9325 : 218078629 : else if (code == COMPARE)
9326 : : ;
9327 : :
9328 : : /* If this is a binary operation, see if either side has only one of two
9329 : : values. If either one does or if both do and they are conditional on
9330 : : the same value, compute the new true and false values. */
9331 : 214239893 : else if (BINARY_P (x))
9332 : : {
9333 : 79737010 : rtx op0 = XEXP (x, 0);
9334 : 79737010 : rtx op1 = XEXP (x, 1);
9335 : 79737010 : cond0 = if_then_else_cond (op0, &true0, &false0);
9336 : 79737010 : cond1 = if_then_else_cond (op1, &true1, &false1);
9337 : :
9338 : 547905 : if ((cond0 != 0 && cond1 != 0 && !rtx_equal_p (cond0, cond1))
9339 : 80260501 : && (REG_P (op0) || REG_P (op1)))
9340 : : {
9341 : : /* Try to enable a simplification by undoing work done by
9342 : : if_then_else_cond if it converted a REG into something more
9343 : : complex. */
9344 : 380765 : if (REG_P (op0))
9345 : : {
9346 : 90502 : cond0 = 0;
9347 : 90502 : true0 = false0 = op0;
9348 : : }
9349 : : else
9350 : : {
9351 : 290263 : cond1 = 0;
9352 : 290263 : true1 = false1 = op1;
9353 : : }
9354 : : }
9355 : :
9356 : 79737010 : if ((cond0 != 0 || cond1 != 0)
9357 : 79737010 : && ! (cond0 != 0 && cond1 != 0 && !rtx_equal_p (cond0, cond1)))
9358 : : {
9359 : : /* If if_then_else_cond returned zero, then true/false are the
9360 : : same rtl. We must copy one of them to prevent invalid rtl
9361 : : sharing. */
9362 : 3770799 : if (cond0 == 0)
9363 : 1195586 : true0 = copy_rtx (true0);
9364 : 2575213 : else if (cond1 == 0)
9365 : 2550799 : true1 = copy_rtx (true1);
9366 : :
9367 : 3770799 : if (COMPARISON_P (x))
9368 : : {
9369 : 242998 : *ptrue = simplify_gen_relational (code, mode, VOIDmode,
9370 : : true0, true1);
9371 : 242998 : *pfalse = simplify_gen_relational (code, mode, VOIDmode,
9372 : : false0, false1);
9373 : : }
9374 : : else
9375 : : {
9376 : 3527801 : *ptrue = simplify_gen_binary (code, mode, true0, true1);
9377 : 3527801 : *pfalse = simplify_gen_binary (code, mode, false0, false1);
9378 : : }
9379 : :
9380 : 4966385 : return cond0 ? cond0 : cond1;
9381 : : }
9382 : :
9383 : : /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
9384 : : operands is zero when the other is nonzero, and vice-versa,
9385 : : and STORE_FLAG_VALUE is 1 or -1. */
9386 : :
9387 : 75966211 : if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9388 : 75966211 : && (code == PLUS || code == IOR || code == XOR || code == MINUS
9389 : : || code == UMAX)
9390 : 31968720 : && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
9391 : : {
9392 : 35313 : rtx op0 = XEXP (XEXP (x, 0), 1);
9393 : 35313 : rtx op1 = XEXP (XEXP (x, 1), 1);
9394 : :
9395 : 35313 : cond0 = XEXP (XEXP (x, 0), 0);
9396 : 35313 : cond1 = XEXP (XEXP (x, 1), 0);
9397 : :
9398 : 35313 : if (COMPARISON_P (cond0)
9399 : 155 : && COMPARISON_P (cond1)
9400 : 0 : && SCALAR_INT_MODE_P (mode)
9401 : 0 : && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
9402 : 0 : && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
9403 : 0 : && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
9404 : 0 : || ((swap_condition (GET_CODE (cond0))
9405 : 0 : == reversed_comparison_code (cond1, NULL))
9406 : 0 : && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
9407 : 0 : && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
9408 : 35313 : && ! side_effects_p (x))
9409 : : {
9410 : 0 : *ptrue = simplify_gen_binary (MULT, mode, op0, const_true_rtx);
9411 : 0 : *pfalse = simplify_gen_binary (MULT, mode,
9412 : : (code == MINUS
9413 : 0 : ? simplify_gen_unary (NEG, mode,
9414 : : op1, mode)
9415 : : : op1),
9416 : : const_true_rtx);
9417 : 0 : return cond0;
9418 : : }
9419 : : }
9420 : :
9421 : : /* Similarly for MULT, AND and UMIN, except that for these the result
9422 : : is always zero. */
9423 : 75966211 : if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9424 : 75966211 : && (code == MULT || code == AND || code == UMIN)
9425 : 10643254 : && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
9426 : : {
9427 : 886 : cond0 = XEXP (XEXP (x, 0), 0);
9428 : 886 : cond1 = XEXP (XEXP (x, 1), 0);
9429 : :
9430 : 886 : if (COMPARISON_P (cond0)
9431 : 0 : && COMPARISON_P (cond1)
9432 : 0 : && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
9433 : 0 : && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
9434 : 0 : && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
9435 : 0 : || ((swap_condition (GET_CODE (cond0))
9436 : 0 : == reversed_comparison_code (cond1, NULL))
9437 : 0 : && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
9438 : 0 : && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
9439 : 886 : && ! side_effects_p (x))
9440 : : {
9441 : 0 : *ptrue = *pfalse = const0_rtx;
9442 : 0 : return cond0;
9443 : : }
9444 : : }
9445 : : }
9446 : :
9447 : 134502883 : else if (code == IF_THEN_ELSE)
9448 : : {
9449 : : /* If we have IF_THEN_ELSE already, extract the condition and
9450 : : canonicalize it if it is NE or EQ. */
9451 : 392174 : cond0 = XEXP (x, 0);
9452 : 392174 : *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
9453 : 392174 : if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
9454 : 108616 : return XEXP (cond0, 0);
9455 : 283558 : else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
9456 : : {
9457 : 21401 : *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
9458 : 21401 : return XEXP (cond0, 0);
9459 : : }
9460 : : else
9461 : : return cond0;
9462 : : }
9463 : :
9464 : : /* If X is a SUBREG, we can narrow both the true and false values
9465 : : if the inner expression, if there is a condition. */
9466 : 134110709 : else if (code == SUBREG
9467 : 134110709 : && (cond0 = if_then_else_cond (SUBREG_REG (x), &true0,
9468 : : &false0)) != 0)
9469 : : {
9470 : 665132 : true0 = simplify_gen_subreg (mode, true0,
9471 : 332566 : GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
9472 : 665132 : false0 = simplify_gen_subreg (mode, false0,
9473 : 332566 : GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
9474 : 332566 : if (true0 && false0)
9475 : : {
9476 : 332566 : *ptrue = true0;
9477 : 332566 : *pfalse = false0;
9478 : 332566 : return cond0;
9479 : : }
9480 : : }
9481 : :
9482 : : /* If X is a constant, this isn't special and will cause confusions
9483 : : if we treat it as such. Likewise if it is equivalent to a constant. */
9484 : 133778143 : else if (CONSTANT_P (x)
9485 : 133778143 : || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
9486 : : ;
9487 : :
9488 : : /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that
9489 : : will be least confusing to the rest of the compiler. */
9490 : 89338399 : else if (mode == BImode)
9491 : : {
9492 : 0 : *ptrue = GEN_INT (STORE_FLAG_VALUE), *pfalse = const0_rtx;
9493 : 0 : return x;
9494 : : }
9495 : :
9496 : : /* If X is known to be either 0 or -1, those are the true and
9497 : : false values when testing X. */
9498 : 89338399 : else if (x == constm1_rtx || x == const0_rtx
9499 : 89338399 : || (is_a <scalar_int_mode> (mode, &int_mode)
9500 : 64117990 : && (num_sign_bit_copies (x, int_mode)
9501 : 64117990 : == GET_MODE_PRECISION (int_mode))))
9502 : : {
9503 : 736414 : *ptrue = constm1_rtx, *pfalse = const0_rtx;
9504 : 736414 : return x;
9505 : : }
9506 : :
9507 : : /* Likewise for 0 or a single bit. */
9508 : 88601985 : else if (HWI_COMPUTABLE_MODE_P (mode)
9509 : 59972290 : && pow2p_hwi (nz = nonzero_bits (x, mode)))
9510 : : {
9511 : 1672048 : *ptrue = gen_int_mode (nz, mode), *pfalse = const0_rtx;
9512 : 1672048 : return x;
9513 : : }
9514 : :
9515 : : /* Otherwise fail; show no condition with true and false values the same. */
9516 : 211174628 : *ptrue = *pfalse = x;
9517 : 211174628 : return 0;
9518 : : }
9519 : :
9520 : : /* Return the value of expression X given the fact that condition COND
9521 : : is known to be true when applied to REG as its first operand and VAL
9522 : : as its second. X is known to not be shared and so can be modified in
9523 : : place.
9524 : :
9525 : : We only handle the simplest cases, and specifically those cases that
9526 : : arise with IF_THEN_ELSE expressions. */
9527 : :
9528 : : static rtx
9529 : 529224 : known_cond (rtx x, enum rtx_code cond, rtx reg, rtx val)
9530 : : {
9531 : 529224 : enum rtx_code code = GET_CODE (x);
9532 : 529224 : const char *fmt;
9533 : 529224 : int i, j;
9534 : :
9535 : 529224 : if (side_effects_p (x))
9536 : : return x;
9537 : :
9538 : : /* If either operand of the condition is a floating point value,
9539 : : then we have to avoid collapsing an EQ comparison. */
9540 : 529224 : if (cond == EQ
9541 : 101886 : && rtx_equal_p (x, reg)
9542 : 67337 : && ! FLOAT_MODE_P (GET_MODE (x))
9543 : 596561 : && ! FLOAT_MODE_P (GET_MODE (val)))
9544 : : return val;
9545 : :
9546 : 461887 : if (cond == UNEQ && rtx_equal_p (x, reg))
9547 : : return val;
9548 : :
9549 : : /* If X is (abs REG) and we know something about REG's relationship
9550 : : with zero, we may be able to simplify this. */
9551 : :
9552 : 461887 : if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
9553 : 1 : switch (cond)
9554 : : {
9555 : 0 : case GE: case GT: case EQ:
9556 : 0 : return XEXP (x, 0);
9557 : 1 : case LT: case LE:
9558 : 2 : return simplify_gen_unary (NEG, GET_MODE (XEXP (x, 0)),
9559 : : XEXP (x, 0),
9560 : 1 : GET_MODE (XEXP (x, 0)));
9561 : : default:
9562 : : break;
9563 : : }
9564 : :
9565 : : /* The only other cases we handle are MIN, MAX, and comparisons if the
9566 : : operands are the same as REG and VAL. */
9567 : :
9568 : 461886 : else if (COMPARISON_P (x) || COMMUTATIVE_ARITH_P (x))
9569 : : {
9570 : 231881 : if (rtx_equal_p (XEXP (x, 0), val))
9571 : : {
9572 : 3 : std::swap (val, reg);
9573 : 3 : cond = swap_condition (cond);
9574 : : }
9575 : :
9576 : 231881 : if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
9577 : : {
9578 : 209462 : if (COMPARISON_P (x))
9579 : : {
9580 : 209258 : if (comparison_dominates_p (cond, code))
9581 : 471 : return VECTOR_MODE_P (GET_MODE (x)) ? x : const_true_rtx;
9582 : :
9583 : 208787 : code = reversed_comparison_code (x, NULL);
9584 : 208787 : if (code != UNKNOWN
9585 : 208787 : && comparison_dominates_p (cond, code))
9586 : 32 : return CONST0_RTX (GET_MODE (x));
9587 : : else
9588 : 208755 : return x;
9589 : : }
9590 : 204 : else if (code == SMAX || code == SMIN
9591 : 204 : || code == UMIN || code == UMAX)
9592 : : {
9593 : 38 : int unsignedp = (code == UMIN || code == UMAX);
9594 : :
9595 : : /* Do not reverse the condition when it is NE or EQ.
9596 : : This is because we cannot conclude anything about
9597 : : the value of 'SMAX (x, y)' when x is not equal to y,
9598 : : but we can when x equals y. */
9599 : 38 : if ((code == SMAX || code == UMAX)
9600 : 35 : && ! (cond == EQ || cond == NE))
9601 : 2 : cond = reverse_condition (cond);
9602 : :
9603 : 5 : switch (cond)
9604 : : {
9605 : 0 : case GE: case GT:
9606 : 0 : return unsignedp ? x : XEXP (x, 1);
9607 : 5 : case LE: case LT:
9608 : 5 : return unsignedp ? x : XEXP (x, 0);
9609 : 0 : case GEU: case GTU:
9610 : 0 : return unsignedp ? XEXP (x, 1) : x;
9611 : 0 : case LEU: case LTU:
9612 : 0 : return unsignedp ? XEXP (x, 0) : x;
9613 : : default:
9614 : : break;
9615 : : }
9616 : : }
9617 : : }
9618 : : }
9619 : 230005 : else if (code == SUBREG)
9620 : : {
9621 : 7364 : machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
9622 : 7364 : rtx new_rtx, r = known_cond (SUBREG_REG (x), cond, reg, val);
9623 : :
9624 : 7364 : if (SUBREG_REG (x) != r)
9625 : : {
9626 : : /* We must simplify subreg here, before we lose track of the
9627 : : original inner_mode. */
9628 : 16 : new_rtx = simplify_subreg (GET_MODE (x), r,
9629 : 8 : inner_mode, SUBREG_BYTE (x));
9630 : 8 : if (new_rtx)
9631 : : return new_rtx;
9632 : : else
9633 : 8 : SUBST (SUBREG_REG (x), r);
9634 : : }
9635 : :
9636 : 7364 : return x;
9637 : : }
9638 : : /* We don't have to handle SIGN_EXTEND here, because even in the
9639 : : case of replacing something with a modeless CONST_INT, a
9640 : : CONST_INT is already (supposed to be) a valid sign extension for
9641 : : its narrower mode, which implies it's already properly
9642 : : sign-extended for the wider mode. Now, for ZERO_EXTEND, the
9643 : : story is different. */
9644 : 222641 : else if (code == ZERO_EXTEND)
9645 : : {
9646 : 863 : machine_mode inner_mode = GET_MODE (XEXP (x, 0));
9647 : 863 : rtx new_rtx, r = known_cond (XEXP (x, 0), cond, reg, val);
9648 : :
9649 : 863 : if (XEXP (x, 0) != r)
9650 : : {
9651 : : /* We must simplify the zero_extend here, before we lose
9652 : : track of the original inner_mode. */
9653 : 0 : new_rtx = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
9654 : : r, inner_mode);
9655 : 0 : if (new_rtx)
9656 : : return new_rtx;
9657 : : else
9658 : 0 : SUBST (XEXP (x, 0), r);
9659 : : }
9660 : :
9661 : 863 : return x;
9662 : : }
9663 : :
9664 : 244396 : fmt = GET_RTX_FORMAT (code);
9665 : 563426 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9666 : : {
9667 : 319030 : if (fmt[i] == 'e')
9668 : 151212 : SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
9669 : 167818 : else if (fmt[i] == 'E')
9670 : 8780 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9671 : 7072 : SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
9672 : : cond, reg, val));
9673 : : }
9674 : :
9675 : : return x;
9676 : : }
9677 : :
9678 : : /* See if X and Y are equal for the purposes of seeing if we can rewrite an
9679 : : assignment as a field assignment. */
9680 : :
9681 : : static bool
9682 : 567629 : rtx_equal_for_field_assignment_p (rtx x, rtx y, bool widen_x)
9683 : : {
9684 : 567629 : if (widen_x && GET_MODE (x) != GET_MODE (y))
9685 : : {
9686 : 56606 : if (paradoxical_subreg_p (GET_MODE (x), GET_MODE (y)))
9687 : : return false;
9688 : 56606 : if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
9689 : : return false;
9690 : 56606 : x = adjust_address_nv (x, GET_MODE (y),
9691 : : byte_lowpart_offset (GET_MODE (y),
9692 : : GET_MODE (x)));
9693 : : }
9694 : :
9695 : 567629 : if (x == y || rtx_equal_p (x, y))
9696 : 8535 : return true;
9697 : :
9698 : 559094 : if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
9699 : : return false;
9700 : :
9701 : : /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
9702 : : Note that all SUBREGs of MEM are paradoxical; otherwise they
9703 : : would have been rewritten. */
9704 : 97130 : if (MEM_P (x) && GET_CODE (y) == SUBREG
9705 : 6220 : && MEM_P (SUBREG_REG (y))
9706 : 559094 : && rtx_equal_p (SUBREG_REG (y),
9707 : 0 : gen_lowpart (GET_MODE (SUBREG_REG (y)), x)))
9708 : : return true;
9709 : :
9710 : 59691 : if (MEM_P (y) && GET_CODE (x) == SUBREG
9711 : 5000 : && MEM_P (SUBREG_REG (x))
9712 : 559228 : && rtx_equal_p (SUBREG_REG (x),
9713 : 134 : gen_lowpart (GET_MODE (SUBREG_REG (x)), y)))
9714 : : return true;
9715 : :
9716 : : /* We used to see if get_last_value of X and Y were the same but that's
9717 : : not correct. In one direction, we'll cause the assignment to have
9718 : : the wrong destination and in the case, we'll import a register into this
9719 : : insn that might have already have been dead. So fail if none of the
9720 : : above cases are true. */
9721 : : return false;
9722 : : }
9723 : :
9724 : : /* See if X, a SET operation, can be rewritten as a bit-field assignment.
9725 : : Return that assignment if so.
9726 : :
9727 : : We only handle the most common cases. */
9728 : :
9729 : : static rtx
9730 : 43310737 : make_field_assignment (rtx x)
9731 : : {
9732 : 43310737 : rtx dest = SET_DEST (x);
9733 : 43310737 : rtx src = SET_SRC (x);
9734 : 43310737 : rtx assign;
9735 : 43310737 : rtx rhs, lhs;
9736 : 43310737 : HOST_WIDE_INT c1;
9737 : 43310737 : HOST_WIDE_INT pos;
9738 : 43310737 : unsigned HOST_WIDE_INT len;
9739 : 43310737 : rtx other;
9740 : :
9741 : : /* All the rules in this function are specific to scalar integers. */
9742 : 43310737 : scalar_int_mode mode;
9743 : 63715840 : if (!is_a <scalar_int_mode> (GET_MODE (dest), &mode))
9744 : : return x;
9745 : :
9746 : : /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
9747 : : a clear of a one-bit field. We will have changed it to
9748 : : (and (rotate (const_int -2) POS) DEST), so check for that. Also check
9749 : : for a SUBREG. */
9750 : :
9751 : 1246265 : if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
9752 : 2281 : && CONST_INT_P (XEXP (XEXP (src, 0), 0))
9753 : 559 : && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
9754 : 20412743 : && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
9755 : : {
9756 : 145 : assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
9757 : : 1, true, true, false);
9758 : 145 : if (assign != 0)
9759 : 142 : return gen_rtx_SET (assign, const0_rtx);
9760 : : return x;
9761 : : }
9762 : :
9763 : 1246120 : if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
9764 : 93154 : && subreg_lowpart_p (XEXP (src, 0))
9765 : 93123 : && partial_subreg_p (XEXP (src, 0))
9766 : 24499 : && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
9767 : 113 : && CONST_INT_P (XEXP (SUBREG_REG (XEXP (src, 0)), 0))
9768 : 49 : && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
9769 : 20412088 : && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
9770 : : {
9771 : 14 : assign = make_extraction (VOIDmode, dest, 0,
9772 : 7 : XEXP (SUBREG_REG (XEXP (src, 0)), 1),
9773 : : 1, true, true, false);
9774 : 7 : if (assign != 0)
9775 : 7 : return gen_rtx_SET (assign, const0_rtx);
9776 : : return x;
9777 : : }
9778 : :
9779 : : /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
9780 : : one-bit field. */
9781 : 1817346 : if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
9782 : 441955 : && XEXP (XEXP (src, 0), 0) == const1_rtx
9783 : 20413778 : && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
9784 : : {
9785 : 489 : assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
9786 : : 1, true, true, false);
9787 : 489 : if (assign != 0)
9788 : 458 : return gen_rtx_SET (assign, const1_rtx);
9789 : : return x;
9790 : : }
9791 : :
9792 : : /* If DEST is already a field assignment, i.e. ZERO_EXTRACT, and the
9793 : : SRC is an AND with all bits of that field set, then we can discard
9794 : : the AND. */
9795 : 20411543 : if (GET_CODE (dest) == ZERO_EXTRACT
9796 : 2728 : && CONST_INT_P (XEXP (dest, 1))
9797 : 2728 : && GET_CODE (src) == AND
9798 : 18 : && CONST_INT_P (XEXP (src, 1)))
9799 : : {
9800 : 18 : HOST_WIDE_INT width = INTVAL (XEXP (dest, 1));
9801 : 18 : unsigned HOST_WIDE_INT and_mask = INTVAL (XEXP (src, 1));
9802 : 18 : unsigned HOST_WIDE_INT ze_mask;
9803 : :
9804 : 18 : if (width >= HOST_BITS_PER_WIDE_INT)
9805 : : ze_mask = -1;
9806 : : else
9807 : 18 : ze_mask = (HOST_WIDE_INT_1U << width) - 1;
9808 : :
9809 : : /* Complete overlap. We can remove the source AND. */
9810 : 18 : if ((and_mask & ze_mask) == ze_mask)
9811 : 18 : return gen_rtx_SET (dest, XEXP (src, 0));
9812 : :
9813 : : /* Partial overlap. We can reduce the source AND. */
9814 : 0 : if ((and_mask & ze_mask) != and_mask)
9815 : : {
9816 : 0 : src = gen_rtx_AND (mode, XEXP (src, 0),
9817 : : gen_int_mode (and_mask & ze_mask, mode));
9818 : 0 : return gen_rtx_SET (dest, src);
9819 : : }
9820 : : }
9821 : :
9822 : : /* The other case we handle is assignments into a constant-position
9823 : : field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents
9824 : : a mask that has all one bits except for a group of zero bits and
9825 : : OTHER is known to have zeros where C1 has ones, this is such an
9826 : : assignment. Compute the position and length from C1. Shift OTHER
9827 : : to the appropriate position, force it to the required mode, and
9828 : : make the extraction. Check for the AND in both operands. */
9829 : :
9830 : : /* One or more SUBREGs might obscure the constant-position field
9831 : : assignment. The first one we are likely to encounter is an outer
9832 : : narrowing SUBREG, which we can just strip for the purposes of
9833 : : identifying the constant-field assignment. */
9834 : 20411525 : scalar_int_mode src_mode = mode;
9835 : 20411525 : if (GET_CODE (src) == SUBREG
9836 : 207792 : && subreg_lowpart_p (src)
9837 : 20599653 : && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (src)), &src_mode))
9838 : : src = SUBREG_REG (src);
9839 : :
9840 : 20411525 : if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
9841 : : return x;
9842 : :
9843 : 1979281 : rhs = expand_compound_operation (XEXP (src, 0));
9844 : 1979281 : lhs = expand_compound_operation (XEXP (src, 1));
9845 : :
9846 : 1979281 : if (GET_CODE (rhs) == AND
9847 : 794895 : && CONST_INT_P (XEXP (rhs, 1))
9848 : 2414293 : && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
9849 : 7923 : c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
9850 : : /* The second SUBREG that might get in the way is a paradoxical
9851 : : SUBREG around the first operand of the AND. We want to
9852 : : pretend the operand is as wide as the destination here. We
9853 : : do this by adjusting the MEM to wider mode for the sole
9854 : : purpose of the call to rtx_equal_for_field_assignment_p. Also
9855 : : note this trick only works for MEMs. */
9856 : 1971358 : else if (GET_CODE (rhs) == AND
9857 : 786972 : && paradoxical_subreg_p (XEXP (rhs, 0))
9858 : 63509 : && MEM_P (SUBREG_REG (XEXP (rhs, 0)))
9859 : 31156 : && CONST_INT_P (XEXP (rhs, 1))
9860 : 2002514 : && rtx_equal_for_field_assignment_p (SUBREG_REG (XEXP (rhs, 0)),
9861 : : dest, true))
9862 : 0 : c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
9863 : 1971358 : else if (GET_CODE (lhs) == AND
9864 : 83556 : && CONST_INT_P (XEXP (lhs, 1))
9865 : 2045015 : && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
9866 : 22 : c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
9867 : : /* The second SUBREG that might get in the way is a paradoxical
9868 : : SUBREG around the first operand of the AND. We want to
9869 : : pretend the operand is as wide as the destination here. We
9870 : : do this by adjusting the MEM to wider mode for the sole
9871 : : purpose of the call to rtx_equal_for_field_assignment_p. Also
9872 : : note this trick only works for MEMs. */
9873 : 1971336 : else if (GET_CODE (lhs) == AND
9874 : 83534 : && paradoxical_subreg_p (XEXP (lhs, 0))
9875 : 36729 : && MEM_P (SUBREG_REG (XEXP (lhs, 0)))
9876 : 25450 : && CONST_INT_P (XEXP (lhs, 1))
9877 : 1996786 : && rtx_equal_for_field_assignment_p (SUBREG_REG (XEXP (lhs, 0)),
9878 : : dest, true))
9879 : 0 : c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
9880 : : else
9881 : 1971336 : return x;
9882 : :
9883 : 7945 : pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (mode), &len);
9884 : 7945 : if (pos < 0
9885 : 6566 : || pos + len > GET_MODE_PRECISION (mode)
9886 : 6566 : || GET_MODE_PRECISION (mode) > HOST_BITS_PER_WIDE_INT
9887 : 14507 : || (c1 & nonzero_bits (other, mode)) != 0)
9888 : 1477 : return x;
9889 : :
9890 : 6468 : assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len,
9891 : : true, true, false);
9892 : 6468 : if (assign == 0)
9893 : : return x;
9894 : :
9895 : : /* The mode to use for the source is the mode of the assignment, or of
9896 : : what is inside a possible STRICT_LOW_PART. */
9897 : 12912 : machine_mode new_mode = (GET_CODE (assign) == STRICT_LOW_PART
9898 : 6456 : ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
9899 : :
9900 : : /* Shift OTHER right POS places and make it the source, restricting it
9901 : : to the proper length and mode. */
9902 : :
9903 : 6456 : src = canon_reg_for_combine (simplify_shift_const (NULL_RTX, LSHIFTRT,
9904 : : src_mode, other, pos),
9905 : : dest);
9906 : 12912 : src = force_to_mode (src, new_mode,
9907 : : len >= HOST_BITS_PER_WIDE_INT
9908 : : ? HOST_WIDE_INT_M1U
9909 : 6456 : : (HOST_WIDE_INT_1U << len) - 1, false);
9910 : :
9911 : : /* If SRC is masked by an AND that does not make a difference in
9912 : : the value being stored, strip it. */
9913 : 6456 : if (GET_CODE (assign) == ZERO_EXTRACT
9914 : 6407 : && CONST_INT_P (XEXP (assign, 1))
9915 : 6407 : && INTVAL (XEXP (assign, 1)) < HOST_BITS_PER_WIDE_INT
9916 : 6407 : && GET_CODE (src) == AND
9917 : 0 : && CONST_INT_P (XEXP (src, 1))
9918 : 0 : && UINTVAL (XEXP (src, 1))
9919 : 0 : == (HOST_WIDE_INT_1U << INTVAL (XEXP (assign, 1))) - 1)
9920 : 0 : src = XEXP (src, 0);
9921 : :
9922 : 6456 : return gen_rtx_SET (assign, src);
9923 : : }
9924 : :
9925 : : /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
9926 : : if so. */
9927 : :
9928 : : static rtx
9929 : 47499914 : apply_distributive_law (rtx x)
9930 : : {
9931 : 47499914 : enum rtx_code code = GET_CODE (x);
9932 : 47499914 : enum rtx_code inner_code;
9933 : 47499914 : rtx lhs, rhs, other;
9934 : 47499914 : rtx tem;
9935 : :
9936 : : /* Distributivity is not true for floating point as it can change the
9937 : : value. So we don't do it unless -funsafe-math-optimizations. */
9938 : 47499914 : if (FLOAT_MODE_P (GET_MODE (x))
9939 : 3476598 : && ! flag_unsafe_math_optimizations)
9940 : : return x;
9941 : :
9942 : : /* The outer operation can only be one of the following: */
9943 : 44442169 : if (code != IOR && code != AND && code != XOR
9944 : 44442169 : && code != PLUS && code != MINUS)
9945 : : return x;
9946 : :
9947 : 44428777 : lhs = XEXP (x, 0);
9948 : 44428777 : rhs = XEXP (x, 1);
9949 : :
9950 : : /* If either operand is a primitive we can't do anything, so get out
9951 : : fast. */
9952 : 44428777 : if (OBJECT_P (lhs) || OBJECT_P (rhs))
9953 : : return x;
9954 : :
9955 : 2798699 : lhs = expand_compound_operation (lhs);
9956 : 2798699 : rhs = expand_compound_operation (rhs);
9957 : 2798699 : inner_code = GET_CODE (lhs);
9958 : 2798699 : if (inner_code != GET_CODE (rhs))
9959 : : return x;
9960 : :
9961 : : /* See if the inner and outer operations distribute. */
9962 : 760462 : switch (inner_code)
9963 : : {
9964 : 253435 : case LSHIFTRT:
9965 : 253435 : case ASHIFTRT:
9966 : 253435 : case AND:
9967 : 253435 : case IOR:
9968 : : /* These all distribute except over PLUS. */
9969 : 253435 : if (code == PLUS || code == MINUS)
9970 : : return x;
9971 : : break;
9972 : :
9973 : 87904 : case MULT:
9974 : 87904 : if (code != PLUS && code != MINUS)
9975 : : return x;
9976 : : break;
9977 : :
9978 : : case ASHIFT:
9979 : : /* This is also a multiply, so it distributes over everything. */
9980 : : break;
9981 : :
9982 : : /* This used to handle SUBREG, but this turned out to be counter-
9983 : : productive, since (subreg (op ...)) usually is not handled by
9984 : : insn patterns, and this "optimization" therefore transformed
9985 : : recognizable patterns into unrecognizable ones. Therefore the
9986 : : SUBREG case was removed from here.
9987 : :
9988 : : It is possible that distributing SUBREG over arithmetic operations
9989 : : leads to an intermediate result than can then be optimized further,
9990 : : e.g. by moving the outer SUBREG to the other side of a SET as done
9991 : : in simplify_set. This seems to have been the original intent of
9992 : : handling SUBREGs here.
9993 : :
9994 : : However, with current GCC this does not appear to actually happen,
9995 : : at least on major platforms. If some case is found where removing
9996 : : the SUBREG case here prevents follow-on optimizations, distributing
9997 : : SUBREGs ought to be re-added at that place, e.g. in simplify_set. */
9998 : :
9999 : : default:
10000 : : return x;
10001 : : }
10002 : :
10003 : : /* Set LHS and RHS to the inner operands (A and B in the example
10004 : : above) and set OTHER to the common operand (C in the example).
10005 : : There is only one way to do this unless the inner operation is
10006 : : commutative. */
10007 : 266857 : if (COMMUTATIVE_ARITH_P (lhs)
10008 : 266857 : && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
10009 : 2050 : other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
10010 : 264807 : else if (COMMUTATIVE_ARITH_P (lhs)
10011 : 264807 : && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
10012 : 16 : other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
10013 : 264791 : else if (COMMUTATIVE_ARITH_P (lhs)
10014 : 264791 : && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
10015 : 10856 : other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
10016 : 253935 : else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
10017 : 58551 : other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
10018 : : else
10019 : : return x;
10020 : :
10021 : : /* Form the new inner operation, seeing if it simplifies first. */
10022 : 71473 : tem = simplify_gen_binary (code, GET_MODE (x), lhs, rhs);
10023 : :
10024 : : /* There is one exception to the general way of distributing:
10025 : : (a | c) ^ (b | c) -> (a ^ b) & ~c */
10026 : 71473 : if (code == XOR && inner_code == IOR)
10027 : : {
10028 : 72 : inner_code = AND;
10029 : 72 : other = simplify_gen_unary (NOT, GET_MODE (x), other, GET_MODE (x));
10030 : : }
10031 : :
10032 : : /* We may be able to continuing distributing the result, so call
10033 : : ourselves recursively on the inner operation before forming the
10034 : : outer operation, which we return. */
10035 : 71473 : return simplify_gen_binary (inner_code, GET_MODE (x),
10036 : 71473 : apply_distributive_law (tem), other);
10037 : : }
10038 : :
10039 : : /* See if X is of the form (* (+ A B) C), and if so convert to
10040 : : (+ (* A C) (* B C)) and try to simplify.
10041 : :
10042 : : Most of the time, this results in no change. However, if some of
10043 : : the operands are the same or inverses of each other, simplifications
10044 : : will result.
10045 : :
10046 : : For example, (and (ior A B) (not B)) can occur as the result of
10047 : : expanding a bit field assignment. When we apply the distributive
10048 : : law to this, we get (ior (and (A (not B))) (and (B (not B)))),
10049 : : which then simplifies to (and (A (not B))).
10050 : :
10051 : : Note that no checks happen on the validity of applying the inverse
10052 : : distributive law. This is pointless since we can do it in the
10053 : : few places where this routine is called.
10054 : :
10055 : : N is the index of the term that is decomposed (the arithmetic operation,
10056 : : i.e. (+ A B) in the first example above). !N is the index of the term that
10057 : : is distributed, i.e. of C in the first example above. */
10058 : : static rtx
10059 : 1603646 : distribute_and_simplify_rtx (rtx x, int n)
10060 : : {
10061 : 1603646 : machine_mode mode;
10062 : 1603646 : enum rtx_code outer_code, inner_code;
10063 : 1603646 : rtx decomposed, distributed, inner_op0, inner_op1, new_op0, new_op1, tmp;
10064 : :
10065 : : /* Distributivity is not true for floating point as it can change the
10066 : : value. So we don't do it unless -funsafe-math-optimizations. */
10067 : 1603646 : if (FLOAT_MODE_P (GET_MODE (x))
10068 : 97973 : && ! flag_unsafe_math_optimizations)
10069 : : return NULL_RTX;
10070 : :
10071 : 1509560 : decomposed = XEXP (x, n);
10072 : 1509560 : if (!ARITHMETIC_P (decomposed))
10073 : : return NULL_RTX;
10074 : :
10075 : 1509560 : mode = GET_MODE (x);
10076 : 1509560 : outer_code = GET_CODE (x);
10077 : 1509560 : distributed = XEXP (x, !n);
10078 : :
10079 : 1509560 : inner_code = GET_CODE (decomposed);
10080 : 1509560 : inner_op0 = XEXP (decomposed, 0);
10081 : 1509560 : inner_op1 = XEXP (decomposed, 1);
10082 : :
10083 : : /* Special case (and (xor B C) (not A)), which is equivalent to
10084 : : (xor (ior A B) (ior A C)) */
10085 : 1509560 : if (outer_code == AND && inner_code == XOR && GET_CODE (distributed) == NOT)
10086 : : {
10087 : 64 : distributed = XEXP (distributed, 0);
10088 : 64 : outer_code = IOR;
10089 : : }
10090 : :
10091 : 1509560 : if (n == 0)
10092 : : {
10093 : : /* Distribute the second term. */
10094 : 1460238 : new_op0 = simplify_gen_binary (outer_code, mode, inner_op0, distributed);
10095 : 1460238 : new_op1 = simplify_gen_binary (outer_code, mode, inner_op1, distributed);
10096 : : }
10097 : : else
10098 : : {
10099 : : /* Distribute the first term. */
10100 : 49322 : new_op0 = simplify_gen_binary (outer_code, mode, distributed, inner_op0);
10101 : 49322 : new_op1 = simplify_gen_binary (outer_code, mode, distributed, inner_op1);
10102 : : }
10103 : :
10104 : 1509560 : tmp = apply_distributive_law (simplify_gen_binary (inner_code, mode,
10105 : : new_op0, new_op1));
10106 : 1509560 : if (GET_CODE (tmp) != outer_code
10107 : 1509560 : && (set_src_cost (tmp, mode, optimize_this_for_speed_p)
10108 : 250329 : < set_src_cost (x, mode, optimize_this_for_speed_p)))
10109 : : return tmp;
10110 : :
10111 : : return NULL_RTX;
10112 : : }
10113 : :
10114 : : /* Simplify a logical `and' of VAROP with the constant CONSTOP, to be done
10115 : : in MODE. Return an equivalent form, if different from (and VAROP
10116 : : (const_int CONSTOP)). Otherwise, return NULL_RTX. */
10117 : :
10118 : : static rtx
10119 : 11503329 : simplify_and_const_int_1 (scalar_int_mode mode, rtx varop,
10120 : : unsigned HOST_WIDE_INT constop)
10121 : : {
10122 : 11503329 : unsigned HOST_WIDE_INT nonzero;
10123 : 11503329 : unsigned HOST_WIDE_INT orig_constop;
10124 : 11503329 : rtx orig_varop;
10125 : 11503329 : int i;
10126 : :
10127 : 11503329 : orig_varop = varop;
10128 : 11503329 : orig_constop = constop;
10129 : 11503329 : if (GET_CODE (varop) == CLOBBER)
10130 : : return NULL_RTX;
10131 : :
10132 : : /* Simplify VAROP knowing that we will be only looking at some of the
10133 : : bits in it.
10134 : :
10135 : : Note by passing in CONSTOP, we guarantee that the bits not set in
10136 : : CONSTOP are not significant and will never be examined. We must
10137 : : ensure that is the case by explicitly masking out those bits
10138 : : before returning. */
10139 : 11503329 : varop = force_to_mode (varop, mode, constop, false);
10140 : :
10141 : : /* If VAROP is a CLOBBER, we will fail so return it. */
10142 : 11503329 : if (GET_CODE (varop) == CLOBBER)
10143 : : return varop;
10144 : :
10145 : : /* If VAROP is a CONST_INT, then we need to apply the mask in CONSTOP
10146 : : to VAROP and return the new constant. */
10147 : 11503319 : if (CONST_INT_P (varop))
10148 : 302799 : return gen_int_mode (INTVAL (varop) & constop, mode);
10149 : :
10150 : : /* See what bits may be nonzero in VAROP. Unlike the general case of
10151 : : a call to nonzero_bits, here we don't care about bits outside
10152 : : MODE unless WORD_REGISTER_OPERATIONS is true. */
10153 : :
10154 : 11200520 : scalar_int_mode tmode = mode;
10155 : 11200520 : if (WORD_REGISTER_OPERATIONS && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
10156 : : tmode = word_mode;
10157 : 11200520 : nonzero = nonzero_bits (varop, tmode) & GET_MODE_MASK (tmode);
10158 : :
10159 : : /* Turn off all bits in the constant that are known to already be zero.
10160 : : Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
10161 : : which is tested below. */
10162 : :
10163 : 11200520 : constop &= nonzero;
10164 : :
10165 : : /* If we don't have any bits left, return zero. */
10166 : 11200520 : if (constop == 0 && !side_effects_p (varop))
10167 : 0 : return const0_rtx;
10168 : :
10169 : : /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
10170 : : a power of two, we can replace this with an ASHIFT. */
10171 : 32495 : if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), tmode) == 1
10172 : 11206786 : && (i = exact_log2 (constop)) >= 0)
10173 : 117 : return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
10174 : :
10175 : : /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
10176 : : or XOR, then try to apply the distributive law. This may eliminate
10177 : : operations if either branch can be simplified because of the AND.
10178 : : It may also make some cases more complex, but those cases probably
10179 : : won't match a pattern either with or without this. */
10180 : :
10181 : 11200403 : if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
10182 : : {
10183 : 72233 : scalar_int_mode varop_mode = as_a <scalar_int_mode> (GET_MODE (varop));
10184 : 72233 : return
10185 : 72233 : gen_lowpart
10186 : 72233 : (mode,
10187 : : apply_distributive_law
10188 : 72233 : (simplify_gen_binary (GET_CODE (varop), varop_mode,
10189 : : simplify_and_const_int (NULL_RTX, varop_mode,
10190 : : XEXP (varop, 0),
10191 : : constop),
10192 : : simplify_and_const_int (NULL_RTX, varop_mode,
10193 : : XEXP (varop, 1),
10194 : : constop))));
10195 : : }
10196 : :
10197 : : /* If VAROP is PLUS, and the constant is a mask of low bits, distribute
10198 : : the AND and see if one of the operands simplifies to zero. If so, we
10199 : : may eliminate it. */
10200 : :
10201 : 11128170 : if (GET_CODE (varop) == PLUS
10202 : 11128170 : && pow2p_hwi (constop + 1))
10203 : : {
10204 : 430171 : rtx o0, o1;
10205 : :
10206 : 430171 : o0 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 0), constop);
10207 : 430171 : o1 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 1), constop);
10208 : 430171 : if (o0 == const0_rtx)
10209 : : return o1;
10210 : 430171 : if (o1 == const0_rtx)
10211 : : return o0;
10212 : : }
10213 : :
10214 : : /* Make a SUBREG if necessary. If we can't make it, fail. */
10215 : 11128081 : varop = gen_lowpart (mode, varop);
10216 : 11128081 : if (varop == NULL_RTX || GET_CODE (varop) == CLOBBER)
10217 : : return NULL_RTX;
10218 : :
10219 : : /* If we are only masking insignificant bits, return VAROP. */
10220 : 11128081 : if (constop == nonzero)
10221 : : return varop;
10222 : :
10223 : 10630842 : if (varop == orig_varop && constop == orig_constop)
10224 : : return NULL_RTX;
10225 : :
10226 : : /* Otherwise, return an AND. */
10227 : 5688224 : return simplify_gen_binary (AND, mode, varop, gen_int_mode (constop, mode));
10228 : : }
10229 : :
10230 : :
10231 : : /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
10232 : : in MODE.
10233 : :
10234 : : Return an equivalent form, if different from X. Otherwise, return X. If
10235 : : X is zero, we are to always construct the equivalent form. */
10236 : :
10237 : : static rtx
10238 : 11503329 : simplify_and_const_int (rtx x, scalar_int_mode mode, rtx varop,
10239 : : unsigned HOST_WIDE_INT constop)
10240 : : {
10241 : 11503329 : rtx tem = simplify_and_const_int_1 (mode, varop, constop);
10242 : 11503329 : if (tem)
10243 : : return tem;
10244 : :
10245 : 4942618 : if (!x)
10246 : 1238759 : x = simplify_gen_binary (AND, GET_MODE (varop), varop,
10247 : 1238759 : gen_int_mode (constop, mode));
10248 : 4942618 : if (GET_MODE (x) != mode)
10249 : 0 : x = gen_lowpart (mode, x);
10250 : : return x;
10251 : : }
10252 : :
10253 : : /* Given a REG X of mode XMODE, compute which bits in X can be nonzero.
10254 : : We don't care about bits outside of those defined in MODE.
10255 : : We DO care about all the bits in MODE, even if XMODE is smaller than MODE.
10256 : :
10257 : : For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
10258 : : a shift, AND, or zero_extract, we can do better. */
10259 : :
10260 : : static rtx
10261 : 419024188 : reg_nonzero_bits_for_combine (const_rtx x, scalar_int_mode xmode,
10262 : : scalar_int_mode mode,
10263 : : unsigned HOST_WIDE_INT *nonzero)
10264 : : {
10265 : 419024188 : rtx tem;
10266 : 419024188 : reg_stat_type *rsp;
10267 : :
10268 : : /* If X is a register whose nonzero bits value is current, use it.
10269 : : Otherwise, if X is a register whose value we can find, use that
10270 : : value. Otherwise, use the previously-computed global nonzero bits
10271 : : for this register. */
10272 : :
10273 : 419024188 : rsp = ®_stat[REGNO (x)];
10274 : 419024188 : if (rsp->last_set_value != 0
10275 : 390515183 : && (rsp->last_set_mode == mode
10276 : 1410 : || (REGNO (x) >= FIRST_PSEUDO_REGISTER
10277 : 0 : && GET_MODE_CLASS (rsp->last_set_mode) == MODE_INT
10278 : 0 : && GET_MODE_CLASS (mode) == MODE_INT))
10279 : 809537961 : && ((rsp->last_set_label >= label_tick_ebb_start
10280 : 297753457 : && rsp->last_set_label < label_tick)
10281 : 371231743 : || (rsp->last_set_label == label_tick
10282 : 278471427 : && DF_INSN_LUID (rsp->last_set) < subst_low_luid)
10283 : 120227788 : || (REGNO (x) >= FIRST_PSEUDO_REGISTER
10284 : 120172235 : && REGNO (x) < reg_n_sets_max
10285 : 120172150 : && REG_N_SETS (REGNO (x)) == 1
10286 : 134283476 : && !REGNO_REG_SET_P
10287 : : (DF_LR_IN (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb),
10288 : : REGNO (x)))))
10289 : : {
10290 : : /* Note that, even if the precision of last_set_mode is lower than that
10291 : : of mode, record_value_for_reg invoked nonzero_bits on the register
10292 : : with nonzero_bits_mode (because last_set_mode is necessarily integral
10293 : : and HWI_COMPUTABLE_MODE_P in this case) so bits in nonzero_bits_mode
10294 : : are all valid, hence in mode too since nonzero_bits_mode is defined
10295 : : to the largest HWI_COMPUTABLE_MODE_P mode. */
10296 : 337345587 : *nonzero &= rsp->last_set_nonzero_bits;
10297 : 337345587 : return NULL;
10298 : : }
10299 : :
10300 : 81678601 : tem = get_last_value (x);
10301 : 81678601 : if (tem)
10302 : : {
10303 : : if (SHORT_IMMEDIATES_SIGN_EXTEND)
10304 : : tem = sign_extend_short_imm (tem, xmode, GET_MODE_PRECISION (mode));
10305 : :
10306 : : return tem;
10307 : : }
10308 : :
10309 : 81678595 : if (nonzero_sign_valid && rsp->nonzero_bits)
10310 : : {
10311 : 49839003 : unsigned HOST_WIDE_INT mask = rsp->nonzero_bits;
10312 : :
10313 : 49839003 : if (GET_MODE_PRECISION (xmode) < GET_MODE_PRECISION (mode))
10314 : : /* We don't know anything about the upper bits. */
10315 : 0 : mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (xmode);
10316 : :
10317 : 49839003 : *nonzero &= mask;
10318 : : }
10319 : :
10320 : : return NULL;
10321 : : }
10322 : :
10323 : : /* Given a reg X of mode XMODE, return the number of bits at the high-order
10324 : : end of X that are known to be equal to the sign bit. X will be used
10325 : : in mode MODE; the returned value will always be between 1 and the
10326 : : number of bits in MODE. */
10327 : :
10328 : : static rtx
10329 : 121813977 : reg_num_sign_bit_copies_for_combine (const_rtx x, scalar_int_mode xmode,
10330 : : scalar_int_mode mode,
10331 : : unsigned int *result)
10332 : : {
10333 : 121813977 : rtx tem;
10334 : 121813977 : reg_stat_type *rsp;
10335 : :
10336 : 121813977 : rsp = ®_stat[REGNO (x)];
10337 : 121813977 : if (rsp->last_set_value != 0
10338 : 111405705 : && rsp->last_set_mode == mode
10339 : 233219513 : && ((rsp->last_set_label >= label_tick_ebb_start
10340 : 85304018 : && rsp->last_set_label < label_tick)
10341 : 106309573 : || (rsp->last_set_label == label_tick
10342 : 80208055 : && DF_INSN_LUID (rsp->last_set) < subst_low_luid)
10343 : 33644475 : || (REGNO (x) >= FIRST_PSEUDO_REGISTER
10344 : 33634168 : && REGNO (x) < reg_n_sets_max
10345 : 33634127 : && REG_N_SETS (REGNO (x)) == 1
10346 : 37773562 : && !REGNO_REG_SET_P
10347 : : (DF_LR_IN (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb),
10348 : : REGNO (x)))))
10349 : : {
10350 : 96626388 : *result = rsp->last_set_sign_bit_copies;
10351 : 96626388 : return NULL;
10352 : : }
10353 : :
10354 : 25187589 : tem = get_last_value (x);
10355 : 25187589 : if (tem != 0)
10356 : : return tem;
10357 : :
10358 : 16241759 : if (nonzero_sign_valid && rsp->sign_bit_copies != 0
10359 : 37508931 : && GET_MODE_PRECISION (xmode) == GET_MODE_PRECISION (mode))
10360 : 12321347 : *result = rsp->sign_bit_copies;
10361 : :
10362 : : return NULL;
10363 : : }
10364 : :
10365 : : /* Return the number of "extended" bits there are in X, when interpreted
10366 : : as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
10367 : : unsigned quantities, this is the number of high-order zero bits.
10368 : : For signed quantities, this is the number of copies of the sign bit
10369 : : minus 1. In both case, this function returns the number of "spare"
10370 : : bits. For example, if two quantities for which this function returns
10371 : : at least 1 are added, the addition is known not to overflow.
10372 : :
10373 : : This function will always return 0 unless called during combine, which
10374 : : implies that it must be called from a define_split. */
10375 : :
10376 : : unsigned int
10377 : 0 : extended_count (const_rtx x, machine_mode mode, bool unsignedp)
10378 : : {
10379 : 0 : if (nonzero_sign_valid == 0)
10380 : : return 0;
10381 : :
10382 : 0 : scalar_int_mode int_mode;
10383 : 0 : return (unsignedp
10384 : 0 : ? (is_a <scalar_int_mode> (mode, &int_mode)
10385 : 0 : && HWI_COMPUTABLE_MODE_P (int_mode)
10386 : 0 : ? (unsigned int) (GET_MODE_PRECISION (int_mode) - 1
10387 : 0 : - floor_log2 (nonzero_bits (x, int_mode)))
10388 : : : 0)
10389 : 0 : : num_sign_bit_copies (x, mode) - 1);
10390 : : }
10391 : :
10392 : : /* This function is called from `simplify_shift_const' to merge two
10393 : : outer operations. Specifically, we have already found that we need
10394 : : to perform operation *POP0 with constant *PCONST0 at the outermost
10395 : : position. We would now like to also perform OP1 with constant CONST1
10396 : : (with *POP0 being done last).
10397 : :
10398 : : Return true if we can do the operation and update *POP0 and *PCONST0 with
10399 : : the resulting operation. *PCOMP_P is set to true if we would need to
10400 : : complement the innermost operand, otherwise it is unchanged.
10401 : :
10402 : : MODE is the mode in which the operation will be done. No bits outside
10403 : : the width of this mode matter. It is assumed that the width of this mode
10404 : : is smaller than or equal to HOST_BITS_PER_WIDE_INT.
10405 : :
10406 : : If *POP0 or OP1 are UNKNOWN, it means no operation is required. Only NEG, PLUS,
10407 : : IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
10408 : : result is simply *PCONST0.
10409 : :
10410 : : If the resulting operation cannot be expressed as one operation, we
10411 : : return false and do not change *POP0, *PCONST0, and *PCOMP_P. */
10412 : :
10413 : : static bool
10414 : 3409926 : merge_outer_ops (enum rtx_code *pop0, HOST_WIDE_INT *pconst0,
10415 : : enum rtx_code op1, HOST_WIDE_INT const1,
10416 : : machine_mode mode, bool *pcomp_p)
10417 : : {
10418 : 3409926 : enum rtx_code op0 = *pop0;
10419 : 3409926 : HOST_WIDE_INT const0 = *pconst0;
10420 : :
10421 : 3409926 : const0 &= GET_MODE_MASK (mode);
10422 : 3409926 : const1 &= GET_MODE_MASK (mode);
10423 : :
10424 : : /* If OP0 is an AND, clear unimportant bits in CONST1. */
10425 : 3409926 : if (op0 == AND)
10426 : 29081 : const1 &= const0;
10427 : :
10428 : : /* If OP0 or OP1 is UNKNOWN, this is easy. Similarly if they are the same or
10429 : : if OP0 is SET. */
10430 : :
10431 : 3409926 : if (op1 == UNKNOWN || op0 == SET)
10432 : : return true;
10433 : :
10434 : 3409926 : else if (op0 == UNKNOWN)
10435 : : op0 = op1, const0 = const1;
10436 : :
10437 : 69095 : else if (op0 == op1)
10438 : : {
10439 : 28791 : switch (op0)
10440 : : {
10441 : 28786 : case AND:
10442 : 28786 : const0 &= const1;
10443 : 28786 : break;
10444 : 5 : case IOR:
10445 : 5 : const0 |= const1;
10446 : 5 : break;
10447 : 0 : case XOR:
10448 : 0 : const0 ^= const1;
10449 : 0 : break;
10450 : 0 : case PLUS:
10451 : 0 : const0 += const1;
10452 : 0 : break;
10453 : : case NEG:
10454 : 3388707 : op0 = UNKNOWN;
10455 : : break;
10456 : : default:
10457 : : break;
10458 : : }
10459 : : }
10460 : :
10461 : : /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
10462 : 40304 : else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
10463 : : return false;
10464 : :
10465 : : /* If the two constants aren't the same, we can't do anything. The
10466 : : remaining six cases can all be done. */
10467 : 19990 : else if (const0 != const1)
10468 : : return false;
10469 : :
10470 : : else
10471 : 19085 : switch (op0)
10472 : : {
10473 : 8 : case IOR:
10474 : 8 : if (op1 == AND)
10475 : : /* (a & b) | b == b */
10476 : 0 : op0 = SET;
10477 : : else /* op1 == XOR */
10478 : : /* (a ^ b) | b == a | b */
10479 : : {;}
10480 : : break;
10481 : :
10482 : 18904 : case XOR:
10483 : 18904 : if (op1 == AND)
10484 : : /* (a & b) ^ b == (~a) & b */
10485 : 18904 : op0 = AND, *pcomp_p = true;
10486 : : else /* op1 == IOR */
10487 : : /* (a | b) ^ b == a & ~b */
10488 : 0 : op0 = AND, const0 = ~const0;
10489 : : break;
10490 : :
10491 : 173 : case AND:
10492 : 173 : if (op1 == IOR)
10493 : : /* (a | b) & b == b */
10494 : : op0 = SET;
10495 : : else /* op1 == XOR */
10496 : : /* (a ^ b) & b) == (~a) & b */
10497 : 173 : *pcomp_p = true;
10498 : : break;
10499 : : default:
10500 : : break;
10501 : : }
10502 : :
10503 : : /* Check for NO-OP cases. */
10504 : 3388707 : const0 &= GET_MODE_MASK (mode);
10505 : 3388707 : if (const0 == 0
10506 : 19786 : && (op0 == IOR || op0 == XOR || op0 == PLUS))
10507 : : op0 = UNKNOWN;
10508 : 3386791 : else if (const0 == 0 && op0 == AND)
10509 : : op0 = SET;
10510 : 3386791 : else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
10511 : 13385 : && op0 == AND)
10512 : 3388707 : op0 = UNKNOWN;
10513 : :
10514 : 3388707 : *pop0 = op0;
10515 : :
10516 : : /* ??? Slightly redundant with the above mask, but not entirely.
10517 : : Moving this above means we'd have to sign-extend the mode mask
10518 : : for the final test. */
10519 : 3388707 : if (op0 != UNKNOWN && op0 != NEG)
10520 : 3361360 : *pconst0 = trunc_int_for_mode (const0, mode);
10521 : :
10522 : : return true;
10523 : : }
10524 : :
10525 : : /* A helper to simplify_shift_const_1 to determine the mode we can perform
10526 : : the shift in. The original shift operation CODE is performed on OP in
10527 : : ORIG_MODE. Return the wider mode MODE if we can perform the operation
10528 : : in that mode. Return ORIG_MODE otherwise. We can also assume that the
10529 : : result of the shift is subject to operation OUTER_CODE with operand
10530 : : OUTER_CONST. */
10531 : :
10532 : : static scalar_int_mode
10533 : 623713 : try_widen_shift_mode (enum rtx_code code, rtx op, int count,
10534 : : scalar_int_mode orig_mode, scalar_int_mode mode,
10535 : : enum rtx_code outer_code, HOST_WIDE_INT outer_const)
10536 : : {
10537 : 623713 : gcc_assert (GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (orig_mode));
10538 : :
10539 : : /* In general we can't perform in wider mode for right shift and rotate. */
10540 : 623713 : switch (code)
10541 : : {
10542 : 157009 : case ASHIFTRT:
10543 : : /* We can still widen if the bits brought in from the left are identical
10544 : : to the sign bit of ORIG_MODE. */
10545 : 157009 : if (num_sign_bit_copies (op, mode)
10546 : 157009 : > (unsigned) (GET_MODE_PRECISION (mode)
10547 : 157009 : - GET_MODE_PRECISION (orig_mode)))
10548 : 296 : return mode;
10549 : 156713 : return orig_mode;
10550 : :
10551 : 79429 : case LSHIFTRT:
10552 : : /* Similarly here but with zero bits. */
10553 : 79429 : if (HWI_COMPUTABLE_MODE_P (mode)
10554 : 79429 : && (nonzero_bits (op, mode) & ~GET_MODE_MASK (orig_mode)) == 0)
10555 : 13934 : return mode;
10556 : :
10557 : : /* We can also widen if the bits brought in will be masked off. This
10558 : : operation is performed in ORIG_MODE. */
10559 : 65495 : if (outer_code == AND)
10560 : : {
10561 : 24898 : int care_bits = low_bitmask_len (orig_mode, outer_const);
10562 : :
10563 : 24898 : if (care_bits >= 0
10564 : 24898 : && GET_MODE_PRECISION (orig_mode) - care_bits >= count)
10565 : 24642 : return mode;
10566 : : }
10567 : : /* fall through */
10568 : :
10569 : 42229 : case ROTATE:
10570 : 42229 : return orig_mode;
10571 : :
10572 : 0 : case ROTATERT:
10573 : 0 : gcc_unreachable ();
10574 : :
10575 : 385899 : default:
10576 : 385899 : return mode;
10577 : : }
10578 : : }
10579 : :
10580 : : /* Simplify a shift of VAROP by ORIG_COUNT bits. CODE says what kind
10581 : : of shift. The result of the shift is RESULT_MODE. Return NULL_RTX
10582 : : if we cannot simplify it. Otherwise, return a simplified value.
10583 : :
10584 : : The shift is normally computed in the widest mode we find in VAROP, as
10585 : : long as it isn't a different number of words than RESULT_MODE. Exceptions
10586 : : are ASHIFTRT and ROTATE, which are always done in their original mode. */
10587 : :
10588 : : static rtx
10589 : 22872216 : simplify_shift_const_1 (enum rtx_code code, machine_mode result_mode,
10590 : : rtx varop, int orig_count)
10591 : : {
10592 : 22872216 : enum rtx_code orig_code = code;
10593 : 22872216 : rtx orig_varop = varop;
10594 : 22872216 : int count, log2;
10595 : 22872216 : machine_mode mode = result_mode;
10596 : 22872216 : machine_mode shift_mode;
10597 : 22872216 : scalar_int_mode tmode, inner_mode, int_mode, int_varop_mode, int_result_mode;
10598 : : /* We form (outer_op (code varop count) (outer_const)). */
10599 : 22872216 : enum rtx_code outer_op = UNKNOWN;
10600 : 22872216 : HOST_WIDE_INT outer_const = 0;
10601 : 22872216 : bool complement_p = false;
10602 : 22872216 : rtx new_rtx, x;
10603 : :
10604 : : /* Make sure and truncate the "natural" shift on the way in. We don't
10605 : : want to do this inside the loop as it makes it more difficult to
10606 : : combine shifts. */
10607 : 22872216 : if (SHIFT_COUNT_TRUNCATED)
10608 : : orig_count &= GET_MODE_UNIT_BITSIZE (mode) - 1;
10609 : :
10610 : : /* If we were given an invalid count, don't do anything except exactly
10611 : : what was requested. */
10612 : :
10613 : 45744338 : if (orig_count < 0 || orig_count >= (int) GET_MODE_UNIT_PRECISION (mode))
10614 : : return NULL_RTX;
10615 : :
10616 : : count = orig_count;
10617 : :
10618 : : /* Unless one of the branches of the `if' in this loop does a `continue',
10619 : : we will `break' the loop after the `if'. */
10620 : :
10621 : 26710697 : while (count != 0)
10622 : : {
10623 : : /* If we have an operand of (clobber (const_int 0)), fail. */
10624 : 23122271 : if (GET_CODE (varop) == CLOBBER)
10625 : 22872216 : return NULL_RTX;
10626 : :
10627 : : /* Convert ROTATERT to ROTATE. */
10628 : 23122271 : if (code == ROTATERT)
10629 : : {
10630 : 11111 : unsigned int bitsize = GET_MODE_UNIT_PRECISION (result_mode);
10631 : 11111 : code = ROTATE;
10632 : 11111 : count = bitsize - count;
10633 : : }
10634 : :
10635 : 23122271 : shift_mode = result_mode;
10636 : 23122271 : if (shift_mode != mode)
10637 : : {
10638 : : /* We only change the modes of scalar shifts. */
10639 : 315558 : int_mode = as_a <scalar_int_mode> (mode);
10640 : 315558 : int_result_mode = as_a <scalar_int_mode> (result_mode);
10641 : 315558 : shift_mode = try_widen_shift_mode (code, varop, count,
10642 : : int_result_mode, int_mode,
10643 : : outer_op, outer_const);
10644 : : }
10645 : :
10646 : 23122271 : scalar_int_mode shift_unit_mode;
10647 : 65489469 : if (!is_a <scalar_int_mode> (GET_MODE_INNER (shift_mode),
10648 : : &shift_unit_mode))
10649 : : return NULL_RTX;
10650 : :
10651 : : /* Handle cases where the count is greater than the size of the mode
10652 : : minus 1. For ASHIFT, use the size minus one as the count (this can
10653 : : occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
10654 : : take the count modulo the size. For other shifts, the result is
10655 : : zero.
10656 : :
10657 : : Since these shifts are being produced by the compiler by combining
10658 : : multiple operations, each of which are defined, we know what the
10659 : : result is supposed to be. */
10660 : :
10661 : 23122271 : if (count > (GET_MODE_PRECISION (shift_unit_mode) - 1))
10662 : : {
10663 : 8565 : if (code == ASHIFTRT)
10664 : 8559 : count = GET_MODE_PRECISION (shift_unit_mode) - 1;
10665 : 6 : else if (code == ROTATE || code == ROTATERT)
10666 : 6 : count %= GET_MODE_PRECISION (shift_unit_mode);
10667 : : else
10668 : : {
10669 : : /* We can't simply return zero because there may be an
10670 : : outer op. */
10671 : 0 : varop = const0_rtx;
10672 : 0 : count = 0;
10673 : 0 : break;
10674 : : }
10675 : : }
10676 : :
10677 : : /* If we discovered we had to complement VAROP, leave. Making a NOT
10678 : : here would cause an infinite loop. */
10679 : 23122271 : if (complement_p)
10680 : : break;
10681 : :
10682 : 23111724 : if (shift_mode == shift_unit_mode)
10683 : : {
10684 : : /* An arithmetic right shift of a quantity known to be -1 or 0
10685 : : is a no-op. */
10686 : 22555539 : if (code == ASHIFTRT
10687 : 22555539 : && (num_sign_bit_copies (varop, shift_unit_mode)
10688 : 4457574 : == GET_MODE_PRECISION (shift_unit_mode)))
10689 : : {
10690 : : count = 0;
10691 : : break;
10692 : : }
10693 : :
10694 : : /* If we are doing an arithmetic right shift and discarding all but
10695 : : the sign bit copies, this is equivalent to doing a shift by the
10696 : : bitsize minus one. Convert it into that shift because it will
10697 : : often allow other simplifications. */
10698 : :
10699 : 22555502 : if (code == ASHIFTRT
10700 : 22555502 : && (count + num_sign_bit_copies (varop, shift_unit_mode)
10701 : 4457537 : >= GET_MODE_PRECISION (shift_unit_mode)))
10702 : 279702 : count = GET_MODE_PRECISION (shift_unit_mode) - 1;
10703 : :
10704 : : /* We simplify the tests below and elsewhere by converting
10705 : : ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
10706 : : `make_compound_operation' will convert it to an ASHIFTRT for
10707 : : those machines (such as VAX) that don't have an LSHIFTRT. */
10708 : 22555502 : if (code == ASHIFTRT
10709 : 4457537 : && HWI_COMPUTABLE_MODE_P (shift_unit_mode)
10710 : 26988218 : && val_signbit_known_clear_p (shift_unit_mode,
10711 : : nonzero_bits (varop,
10712 : : shift_unit_mode)))
10713 : : code = LSHIFTRT;
10714 : :
10715 : 22533631 : if (((code == LSHIFTRT
10716 : 5444689 : && HWI_COMPUTABLE_MODE_P (shift_unit_mode)
10717 : 5423998 : && !(nonzero_bits (varop, shift_unit_mode) >> count))
10718 : 22553720 : || (code == ASHIFT
10719 : 12645460 : && HWI_COMPUTABLE_MODE_P (shift_unit_mode)
10720 : 12166780 : && !((nonzero_bits (varop, shift_unit_mode) << count)
10721 : 12166780 : & GET_MODE_MASK (shift_unit_mode))))
10722 : 22537047 : && !side_effects_p (varop))
10723 : 3416 : varop = const0_rtx;
10724 : : }
10725 : :
10726 : 23111687 : switch (GET_CODE (varop))
10727 : : {
10728 : 501595 : case SIGN_EXTEND:
10729 : 501595 : case ZERO_EXTEND:
10730 : 501595 : case SIGN_EXTRACT:
10731 : 501595 : case ZERO_EXTRACT:
10732 : 501595 : new_rtx = expand_compound_operation (varop);
10733 : 501595 : if (new_rtx != varop)
10734 : : {
10735 : 55602 : varop = new_rtx;
10736 : 26766299 : continue;
10737 : : }
10738 : : break;
10739 : :
10740 : 242747 : case MEM:
10741 : : /* The following rules apply only to scalars. */
10742 : 242747 : if (shift_mode != shift_unit_mode)
10743 : : break;
10744 : 229454 : int_mode = as_a <scalar_int_mode> (mode);
10745 : :
10746 : : /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
10747 : : minus the width of a smaller mode, we can do this with a
10748 : : SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
10749 : 233467 : if ((code == ASHIFTRT || code == LSHIFTRT)
10750 : 76892 : && ! mode_dependent_address_p (XEXP (varop, 0),
10751 : 76892 : MEM_ADDR_SPACE (varop))
10752 : 76892 : && ! MEM_VOLATILE_P (varop)
10753 : 304597 : && (int_mode_for_size (GET_MODE_BITSIZE (int_mode) - count, 1)
10754 : 225441 : .exists (&tmode)))
10755 : : {
10756 : 4013 : new_rtx = adjust_address_nv (varop, tmode,
10757 : : BYTES_BIG_ENDIAN ? 0
10758 : : : count / BITS_PER_UNIT);
10759 : :
10760 : 4013 : varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
10761 : : : ZERO_EXTEND, int_mode, new_rtx);
10762 : 4013 : count = 0;
10763 : 4013 : continue;
10764 : : }
10765 : : break;
10766 : :
10767 : 4469275 : case SUBREG:
10768 : : /* The following rules apply only to scalars. */
10769 : 4469275 : if (shift_mode != shift_unit_mode)
10770 : : break;
10771 : 4088131 : int_mode = as_a <scalar_int_mode> (mode);
10772 : 4088131 : int_varop_mode = as_a <scalar_int_mode> (GET_MODE (varop));
10773 : :
10774 : : /* If VAROP is a SUBREG, strip it as long as the inner operand has
10775 : : the same number of words as what we've seen so far. Then store
10776 : : the widest mode in MODE. */
10777 : 4088131 : if (subreg_lowpart_p (varop)
10778 : 26945684 : && is_int_mode (GET_MODE (SUBREG_REG (varop)), &inner_mode)
10779 : 8127814 : && GET_MODE_SIZE (inner_mode) > GET_MODE_SIZE (int_varop_mode)
10780 : 335021 : && (CEIL (GET_MODE_SIZE (inner_mode), UNITS_PER_WORD)
10781 : 319513 : == CEIL (GET_MODE_SIZE (int_mode), UNITS_PER_WORD))
10782 : 4396807 : && GET_MODE_CLASS (int_varop_mode) == MODE_INT)
10783 : : {
10784 : 308676 : varop = SUBREG_REG (varop);
10785 : 926028 : if (GET_MODE_SIZE (inner_mode) > GET_MODE_SIZE (int_mode))
10786 : 308676 : mode = inner_mode;
10787 : 308676 : continue;
10788 : : }
10789 : : break;
10790 : :
10791 : 437060 : case MULT:
10792 : : /* Some machines use MULT instead of ASHIFT because MULT
10793 : : is cheaper. But it is still better on those machines to
10794 : : merge two shifts into one. */
10795 : 437060 : if (CONST_INT_P (XEXP (varop, 1))
10796 : 437060 : && (log2 = exact_log2 (UINTVAL (XEXP (varop, 1)))) >= 0)
10797 : : {
10798 : 2 : rtx log2_rtx = gen_int_shift_amount (GET_MODE (varop), log2);
10799 : 2 : varop = simplify_gen_binary (ASHIFT, GET_MODE (varop),
10800 : : XEXP (varop, 0), log2_rtx);
10801 : 2 : continue;
10802 : 2 : }
10803 : : break;
10804 : :
10805 : 6953 : case UDIV:
10806 : : /* Similar, for when divides are cheaper. */
10807 : 6953 : if (CONST_INT_P (XEXP (varop, 1))
10808 : 6953 : && (log2 = exact_log2 (UINTVAL (XEXP (varop, 1)))) >= 0)
10809 : : {
10810 : 9 : rtx log2_rtx = gen_int_shift_amount (GET_MODE (varop), log2);
10811 : 9 : varop = simplify_gen_binary (LSHIFTRT, GET_MODE (varop),
10812 : : XEXP (varop, 0), log2_rtx);
10813 : 9 : continue;
10814 : 9 : }
10815 : : break;
10816 : :
10817 : 349518 : case ASHIFTRT:
10818 : : /* If we are extracting just the sign bit of an arithmetic
10819 : : right shift, that shift is not needed. However, the sign
10820 : : bit of a wider mode may be different from what would be
10821 : : interpreted as the sign bit in a narrower mode, so, if
10822 : : the result is narrower, don't discard the shift. */
10823 : 351544 : if (code == LSHIFTRT
10824 : 12190 : && count == (GET_MODE_UNIT_BITSIZE (result_mode) - 1)
10825 : 349518 : && (GET_MODE_UNIT_BITSIZE (result_mode)
10826 : 4078 : >= GET_MODE_UNIT_BITSIZE (GET_MODE (varop))))
10827 : : {
10828 : 2026 : varop = XEXP (varop, 0);
10829 : 2026 : continue;
10830 : : }
10831 : :
10832 : : /* fall through */
10833 : :
10834 : 5849944 : case LSHIFTRT:
10835 : 5849944 : case ASHIFT:
10836 : 5849944 : case ROTATE:
10837 : : /* The following rules apply only to scalars. */
10838 : 5849944 : if (shift_mode != shift_unit_mode)
10839 : : break;
10840 : 5842731 : int_mode = as_a <scalar_int_mode> (mode);
10841 : 5842731 : int_varop_mode = as_a <scalar_int_mode> (GET_MODE (varop));
10842 : 5842731 : int_result_mode = as_a <scalar_int_mode> (result_mode);
10843 : :
10844 : : /* Here we have two nested shifts. The result is usually the
10845 : : AND of a new shift with a mask. We compute the result below. */
10846 : 5842731 : if (CONST_INT_P (XEXP (varop, 1))
10847 : 5823178 : && INTVAL (XEXP (varop, 1)) >= 0
10848 : 5823175 : && INTVAL (XEXP (varop, 1)) < GET_MODE_PRECISION (int_varop_mode)
10849 : 5823175 : && HWI_COMPUTABLE_MODE_P (int_result_mode)
10850 : 11632890 : && HWI_COMPUTABLE_MODE_P (int_mode))
10851 : : {
10852 : 5790159 : enum rtx_code first_code = GET_CODE (varop);
10853 : 5790159 : unsigned int first_count = INTVAL (XEXP (varop, 1));
10854 : 5790159 : unsigned HOST_WIDE_INT mask;
10855 : 5790159 : rtx mask_rtx;
10856 : :
10857 : : /* We have one common special case. We can't do any merging if
10858 : : the inner code is an ASHIFTRT of a smaller mode. However, if
10859 : : we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
10860 : : with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
10861 : : we can convert it to
10862 : : (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0) C3) C2) C1).
10863 : : This simplifies certain SIGN_EXTEND operations. */
10864 : 5790159 : if (code == ASHIFT && first_code == ASHIFTRT
10865 : 5790159 : && count == (GET_MODE_PRECISION (int_result_mode)
10866 : 318652 : - GET_MODE_PRECISION (int_varop_mode)))
10867 : : {
10868 : : /* C3 has the low-order C1 bits zero. */
10869 : :
10870 : 0 : mask = GET_MODE_MASK (int_mode)
10871 : 0 : & ~((HOST_WIDE_INT_1U << first_count) - 1);
10872 : :
10873 : 0 : varop = simplify_and_const_int (NULL_RTX, int_result_mode,
10874 : : XEXP (varop, 0), mask);
10875 : 0 : varop = simplify_shift_const (NULL_RTX, ASHIFT,
10876 : : int_result_mode, varop, count);
10877 : 0 : count = first_count;
10878 : 0 : code = ASHIFTRT;
10879 : 0 : continue;
10880 : : }
10881 : :
10882 : : /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
10883 : : than C1 high-order bits equal to the sign bit, we can convert
10884 : : this to either an ASHIFT or an ASHIFTRT depending on the
10885 : : two counts.
10886 : :
10887 : : We cannot do this if VAROP's mode is not SHIFT_UNIT_MODE. */
10888 : :
10889 : 5791655 : if (code == ASHIFTRT && first_code == ASHIFT
10890 : 3104289 : && int_varop_mode == shift_unit_mode
10891 : 8824657 : && (num_sign_bit_copies (XEXP (varop, 0), shift_unit_mode)
10892 : : > first_count))
10893 : : {
10894 : 1496 : varop = XEXP (varop, 0);
10895 : 1496 : count -= first_count;
10896 : 1496 : if (count < 0)
10897 : : {
10898 : 0 : count = -count;
10899 : 0 : code = ASHIFT;
10900 : : }
10901 : :
10902 : 1496 : continue;
10903 : : }
10904 : :
10905 : : /* There are some cases we can't do. If CODE is ASHIFTRT,
10906 : : we can only do this if FIRST_CODE is also ASHIFTRT.
10907 : :
10908 : : We can't do the case when CODE is ROTATE and FIRST_CODE is
10909 : : ASHIFTRT.
10910 : :
10911 : : If the mode of this shift is not the mode of the outer shift,
10912 : : we can't do this if either shift is a right shift or ROTATE.
10913 : :
10914 : : Finally, we can't do any of these if the mode is too wide
10915 : : unless the codes are the same.
10916 : :
10917 : : Handle the case where the shift codes are the same
10918 : : first. */
10919 : :
10920 : 5788663 : if (code == first_code)
10921 : : {
10922 : 24785 : if (int_varop_mode != int_result_mode
10923 : 24785 : && (code == ASHIFTRT || code == LSHIFTRT
10924 : 370 : || code == ROTATE))
10925 : : break;
10926 : :
10927 : 24691 : count += first_count;
10928 : 24691 : varop = XEXP (varop, 0);
10929 : 24691 : continue;
10930 : : }
10931 : :
10932 : 5763878 : if (code == ASHIFTRT
10933 : 2661050 : || (code == ROTATE && first_code == ASHIFTRT)
10934 : 2661008 : || GET_MODE_PRECISION (int_mode) > HOST_BITS_PER_WIDE_INT
10935 : 8424886 : || (int_varop_mode != int_result_mode
10936 : 66102 : && (first_code == ASHIFTRT || first_code == LSHIFTRT
10937 : 66102 : || first_code == ROTATE
10938 : 27139 : || code == ROTATE)))
10939 : : break;
10940 : :
10941 : : /* To compute the mask to apply after the shift, shift the
10942 : : nonzero bits of the inner shift the same way the
10943 : : outer shift will. */
10944 : :
10945 : 2622045 : mask_rtx = gen_int_mode (nonzero_bits (varop, int_varop_mode),
10946 : : int_result_mode);
10947 : 2622045 : rtx count_rtx = gen_int_shift_amount (int_result_mode, count);
10948 : 2622045 : mask_rtx
10949 : 2622045 : = simplify_const_binary_operation (code, int_result_mode,
10950 : : mask_rtx, count_rtx);
10951 : :
10952 : : /* Give up if we can't compute an outer operation to use. */
10953 : 2622045 : if (mask_rtx == 0
10954 : 2622045 : || !CONST_INT_P (mask_rtx)
10955 : 5244090 : || ! merge_outer_ops (&outer_op, &outer_const, AND,
10956 : : INTVAL (mask_rtx),
10957 : : int_result_mode, &complement_p))
10958 : : break;
10959 : :
10960 : : /* If the shifts are in the same direction, we add the
10961 : : counts. Otherwise, we subtract them. */
10962 : 2602145 : if ((code == ASHIFTRT || code == LSHIFTRT)
10963 : 2602145 : == (first_code == ASHIFTRT || first_code == LSHIFTRT))
10964 : 8919 : count += first_count;
10965 : : else
10966 : 2593226 : count -= first_count;
10967 : :
10968 : : /* If COUNT is positive, the new shift is usually CODE,
10969 : : except for the two exceptions below, in which case it is
10970 : : FIRST_CODE. If the count is negative, FIRST_CODE should
10971 : : always be used */
10972 : 2602145 : if (count > 0
10973 : 545834 : && ((first_code == ROTATE && code == ASHIFT)
10974 : 545147 : || (first_code == ASHIFTRT && code == LSHIFTRT)))
10975 : : code = first_code;
10976 : 2593232 : else if (count < 0)
10977 : 312818 : code = first_code, count = -count;
10978 : :
10979 : 2602145 : varop = XEXP (varop, 0);
10980 : 2602145 : continue;
10981 : 2602145 : }
10982 : :
10983 : : /* If we have (A << B << C) for any shift, we can convert this to
10984 : : (A << C << B). This wins if A is a constant. Only try this if
10985 : : B is not a constant. */
10986 : :
10987 : 52572 : else if (GET_CODE (varop) == code
10988 : 4468 : && CONST_INT_P (XEXP (varop, 0))
10989 : 764 : && !CONST_INT_P (XEXP (varop, 1)))
10990 : : {
10991 : : /* For ((unsigned) (cstULL >> count)) >> cst2 we have to make
10992 : : sure the result will be masked. See PR70222. */
10993 : 764 : if (code == LSHIFTRT
10994 : 7 : && int_mode != int_result_mode
10995 : 771 : && !merge_outer_ops (&outer_op, &outer_const, AND,
10996 : 7 : GET_MODE_MASK (int_result_mode)
10997 : 7 : >> orig_count, int_result_mode,
10998 : : &complement_p))
10999 : : break;
11000 : : /* For ((int) (cstLL >> count)) >> cst2 just give up. Queuing
11001 : : up outer sign extension (often left and right shift) is
11002 : : hardly more efficient than the original. See PR70429.
11003 : : Similarly punt for rotates with different modes.
11004 : : See PR97386. */
11005 : 764 : if ((code == ASHIFTRT || code == ROTATE)
11006 : 764 : && int_mode != int_result_mode)
11007 : : break;
11008 : :
11009 : 750 : rtx count_rtx = gen_int_shift_amount (int_result_mode, count);
11010 : 750 : rtx new_rtx = simplify_const_binary_operation (code, int_mode,
11011 : : XEXP (varop, 0),
11012 : : count_rtx);
11013 : 750 : varop = gen_rtx_fmt_ee (code, int_mode, new_rtx, XEXP (varop, 1));
11014 : 750 : count = 0;
11015 : 750 : continue;
11016 : 750 : }
11017 : : break;
11018 : :
11019 : 47751 : case NOT:
11020 : : /* The following rules apply only to scalars. */
11021 : 47751 : if (shift_mode != shift_unit_mode)
11022 : : break;
11023 : :
11024 : : /* Make this fit the case below. */
11025 : 47749 : varop = gen_rtx_XOR (mode, XEXP (varop, 0), constm1_rtx);
11026 : 47749 : continue;
11027 : :
11028 : 761657 : case IOR:
11029 : 761657 : case AND:
11030 : 761657 : case XOR:
11031 : : /* The following rules apply only to scalars. */
11032 : 761657 : if (shift_mode != shift_unit_mode)
11033 : : break;
11034 : 760256 : int_varop_mode = as_a <scalar_int_mode> (GET_MODE (varop));
11035 : 760256 : int_result_mode = as_a <scalar_int_mode> (result_mode);
11036 : :
11037 : : /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
11038 : : with C the size of VAROP - 1 and the shift is logical if
11039 : : STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
11040 : : we have an (le X 0) operation. If we have an arithmetic shift
11041 : : and STORE_FLAG_VALUE is 1 or we have a logical shift with
11042 : : STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
11043 : :
11044 : 281673 : if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
11045 : 1996 : && XEXP (XEXP (varop, 0), 1) == constm1_rtx
11046 : : && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
11047 : 221 : && (code == LSHIFTRT || code == ASHIFTRT)
11048 : 221 : && count == (GET_MODE_PRECISION (int_varop_mode) - 1)
11049 : 760477 : && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
11050 : : {
11051 : 53 : count = 0;
11052 : 53 : varop = gen_rtx_LE (int_varop_mode, XEXP (varop, 1),
11053 : : const0_rtx);
11054 : :
11055 : 53 : if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
11056 : 53 : varop = gen_rtx_NEG (int_varop_mode, varop);
11057 : :
11058 : 53 : continue;
11059 : : }
11060 : :
11061 : : /* If we have (shift (logical)), move the logical to the outside
11062 : : to allow it to possibly combine with another logical and the
11063 : : shift to combine with another shift. This also canonicalizes to
11064 : : what a ZERO_EXTRACT looks like. Also, some machines have
11065 : : (and (shift)) insns. */
11066 : :
11067 : 1158546 : if (CONST_INT_P (XEXP (varop, 1))
11068 : : /* We can't do this if we have (ashiftrt (xor)) and the
11069 : : constant has its sign bit set in shift_unit_mode with
11070 : : shift_unit_mode wider than result_mode. */
11071 : 399369 : && !(code == ASHIFTRT && GET_CODE (varop) == XOR
11072 : 6255 : && int_result_mode != shift_unit_mode
11073 : 0 : && trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
11074 : : shift_unit_mode) < 0)
11075 : 399369 : && (new_rtx = simplify_const_binary_operation
11076 : 399369 : (code, int_result_mode,
11077 : 399369 : gen_int_mode (INTVAL (XEXP (varop, 1)), int_result_mode),
11078 : 399369 : gen_int_shift_amount (int_result_mode, count))) != 0
11079 : 399369 : && CONST_INT_P (new_rtx)
11080 : 1159572 : && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
11081 : : INTVAL (new_rtx), int_result_mode,
11082 : : &complement_p))
11083 : : {
11084 : 398343 : varop = XEXP (varop, 0);
11085 : 398343 : continue;
11086 : : }
11087 : :
11088 : : /* If we can't do that, try to simplify the shift in each arm of the
11089 : : logical expression, make a new logical expression, and apply
11090 : : the inverse distributive law. This also can't be done for
11091 : : (ashiftrt (xor)) where we've widened the shift and the constant
11092 : : changes the sign bit. */
11093 : 361860 : if (CONST_INT_P (XEXP (varop, 1))
11094 : 361860 : && !(code == ASHIFTRT && GET_CODE (varop) == XOR
11095 : 0 : && int_result_mode != shift_unit_mode
11096 : 0 : && trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
11097 : : shift_unit_mode) < 0))
11098 : : {
11099 : 1026 : rtx lhs = simplify_shift_const (NULL_RTX, code, shift_unit_mode,
11100 : : XEXP (varop, 0), count);
11101 : 1026 : rtx rhs = simplify_shift_const (NULL_RTX, code, shift_unit_mode,
11102 : : XEXP (varop, 1), count);
11103 : :
11104 : 1026 : varop = simplify_gen_binary (GET_CODE (varop), shift_unit_mode,
11105 : : lhs, rhs);
11106 : 1026 : varop = apply_distributive_law (varop);
11107 : :
11108 : 1026 : count = 0;
11109 : 1026 : continue;
11110 : 1026 : }
11111 : : break;
11112 : :
11113 : 29465 : case EQ:
11114 : : /* The following rules apply only to scalars. */
11115 : 29465 : if (shift_mode != shift_unit_mode)
11116 : : break;
11117 : 29465 : int_result_mode = as_a <scalar_int_mode> (result_mode);
11118 : :
11119 : : /* Convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
11120 : : says that the sign bit can be tested, FOO has mode MODE, C is
11121 : : GET_MODE_PRECISION (MODE) - 1, and FOO has only its low-order bit
11122 : : that may be nonzero. */
11123 : 29465 : if (code == LSHIFTRT
11124 : : && XEXP (varop, 1) == const0_rtx
11125 : : && GET_MODE (XEXP (varop, 0)) == int_result_mode
11126 : : && count == (GET_MODE_PRECISION (int_result_mode) - 1)
11127 : : && HWI_COMPUTABLE_MODE_P (int_result_mode)
11128 : : && STORE_FLAG_VALUE == -1
11129 : : && nonzero_bits (XEXP (varop, 0), int_result_mode) == 1
11130 : : && merge_outer_ops (&outer_op, &outer_const, XOR, 1,
11131 : : int_result_mode, &complement_p))
11132 : : {
11133 : : varop = XEXP (varop, 0);
11134 : : count = 0;
11135 : : continue;
11136 : : }
11137 : : break;
11138 : :
11139 : 27204 : case NEG:
11140 : : /* The following rules apply only to scalars. */
11141 : 27204 : if (shift_mode != shift_unit_mode)
11142 : : break;
11143 : 27072 : int_result_mode = as_a <scalar_int_mode> (result_mode);
11144 : :
11145 : : /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
11146 : : than the number of bits in the mode is equivalent to A. */
11147 : 27077 : if (code == LSHIFTRT
11148 : 5884 : && count == (GET_MODE_PRECISION (int_result_mode) - 1)
11149 : 29892 : && nonzero_bits (XEXP (varop, 0), int_result_mode) == 1)
11150 : : {
11151 : 5 : varop = XEXP (varop, 0);
11152 : 5 : count = 0;
11153 : 5 : continue;
11154 : : }
11155 : :
11156 : : /* NEG commutes with ASHIFT since it is multiplication. Move the
11157 : : NEG outside to allow shifts to combine. */
11158 : 44937 : if (code == ASHIFT
11159 : 27067 : && merge_outer_ops (&outer_op, &outer_const, NEG, 0,
11160 : : int_result_mode, &complement_p))
11161 : : {
11162 : 17870 : varop = XEXP (varop, 0);
11163 : 17870 : continue;
11164 : : }
11165 : : break;
11166 : :
11167 : 1828225 : case PLUS:
11168 : : /* The following rules apply only to scalars. */
11169 : 1828225 : if (shift_mode != shift_unit_mode)
11170 : : break;
11171 : 1784625 : int_result_mode = as_a <scalar_int_mode> (result_mode);
11172 : :
11173 : : /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
11174 : : is one less than the number of bits in the mode is
11175 : : equivalent to (xor A 1). */
11176 : 1784625 : if (code == LSHIFTRT
11177 : 367454 : && count == (GET_MODE_PRECISION (int_result_mode) - 1)
11178 : 30142 : && XEXP (varop, 1) == constm1_rtx
11179 : 13294 : && nonzero_bits (XEXP (varop, 0), int_result_mode) == 1
11180 : 1784625 : && merge_outer_ops (&outer_op, &outer_const, XOR, 1,
11181 : : int_result_mode, &complement_p))
11182 : : {
11183 : 0 : count = 0;
11184 : 0 : varop = XEXP (varop, 0);
11185 : 0 : continue;
11186 : : }
11187 : :
11188 : : /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
11189 : : that might be nonzero in BAR are those being shifted out and those
11190 : : bits are known zero in FOO, we can replace the PLUS with FOO.
11191 : : Similarly in the other operand order. This code occurs when
11192 : : we are computing the size of a variable-size array. */
11193 : :
11194 : 1787985 : if ((code == ASHIFTRT || code == LSHIFTRT)
11195 : 521766 : && count < HOST_BITS_PER_WIDE_INT
11196 : 521682 : && nonzero_bits (XEXP (varop, 1), int_result_mode) >> count == 0
11197 : 1911524 : && (nonzero_bits (XEXP (varop, 1), int_result_mode)
11198 : 126899 : & nonzero_bits (XEXP (varop, 0), int_result_mode)) == 0)
11199 : : {
11200 : 3360 : varop = XEXP (varop, 0);
11201 : 3360 : continue;
11202 : : }
11203 : 1781296 : else if ((code == ASHIFTRT || code == LSHIFTRT)
11204 : 518406 : && count < HOST_BITS_PER_WIDE_INT
11205 : 518322 : && HWI_COMPUTABLE_MODE_P (int_result_mode)
11206 : 517151 : && (nonzero_bits (XEXP (varop, 0), int_result_mode)
11207 : 517151 : >> count) == 0
11208 : 1843516 : && (nonzero_bits (XEXP (varop, 0), int_result_mode)
11209 : 62251 : & nonzero_bits (XEXP (varop, 1), int_result_mode)) == 0)
11210 : : {
11211 : 31 : varop = XEXP (varop, 1);
11212 : 31 : continue;
11213 : : }
11214 : :
11215 : : /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
11216 : 2143705 : if (code == ASHIFT
11217 : 1253650 : && CONST_INT_P (XEXP (varop, 1))
11218 : 362760 : && (new_rtx = simplify_const_binary_operation
11219 : 362760 : (ASHIFT, int_result_mode,
11220 : 362760 : gen_int_mode (INTVAL (XEXP (varop, 1)), int_result_mode),
11221 : 362760 : gen_int_shift_amount (int_result_mode, count))) != 0
11222 : 362760 : && CONST_INT_P (new_rtx)
11223 : 2143994 : && merge_outer_ops (&outer_op, &outer_const, PLUS,
11224 : : INTVAL (new_rtx), int_result_mode,
11225 : : &complement_p))
11226 : : {
11227 : 362471 : varop = XEXP (varop, 0);
11228 : 362471 : continue;
11229 : : }
11230 : :
11231 : : /* Check for 'PLUS signbit', which is the canonical form of 'XOR
11232 : : signbit', and attempt to change the PLUS to an XOR and move it to
11233 : : the outer operation as is done above in the AND/IOR/XOR case
11234 : : leg for shift(logical). See details in logical handling above
11235 : : for reasoning in doing so. */
11236 : 1426634 : if (code == LSHIFTRT
11237 : 364143 : && CONST_INT_P (XEXP (varop, 1))
11238 : 251622 : && mode_signbit_p (int_result_mode, XEXP (varop, 1))
11239 : 7871 : && (new_rtx = simplify_const_binary_operation
11240 : 1418763 : (code, int_result_mode,
11241 : 7871 : gen_int_mode (INTVAL (XEXP (varop, 1)), int_result_mode),
11242 : 7871 : gen_int_shift_amount (int_result_mode, count))) != 0
11243 : 7871 : && CONST_INT_P (new_rtx)
11244 : 1426634 : && merge_outer_ops (&outer_op, &outer_const, XOR,
11245 : : INTVAL (new_rtx), int_result_mode,
11246 : : &complement_p))
11247 : : {
11248 : 7871 : varop = XEXP (varop, 0);
11249 : 7871 : continue;
11250 : : }
11251 : :
11252 : : break;
11253 : :
11254 : 522682 : case MINUS:
11255 : : /* The following rules apply only to scalars. */
11256 : 522682 : if (shift_mode != shift_unit_mode)
11257 : : break;
11258 : 512544 : int_varop_mode = as_a <scalar_int_mode> (GET_MODE (varop));
11259 : :
11260 : : /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
11261 : : with C the size of VAROP - 1 and the shift is logical if
11262 : : STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
11263 : : we have a (gt X 0) operation. If the shift is arithmetic with
11264 : : STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
11265 : : we have a (neg (gt X 0)) operation. */
11266 : :
11267 : 512544 : if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
11268 : 512544 : && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
11269 : 9613 : && count == (GET_MODE_PRECISION (int_varop_mode) - 1)
11270 : 45 : && (code == LSHIFTRT || code == ASHIFTRT)
11271 : 13 : && CONST_INT_P (XEXP (XEXP (varop, 0), 1))
11272 : 13 : && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
11273 : 512544 : && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
11274 : : {
11275 : 0 : count = 0;
11276 : 0 : varop = gen_rtx_GT (int_varop_mode, XEXP (varop, 1),
11277 : : const0_rtx);
11278 : :
11279 : 0 : if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
11280 : 0 : varop = gen_rtx_NEG (int_varop_mode, varop);
11281 : :
11282 : 0 : continue;
11283 : : }
11284 : : break;
11285 : :
11286 : 676 : case TRUNCATE:
11287 : : /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
11288 : : if the truncate does not affect the value. */
11289 : 676 : if (code == LSHIFTRT
11290 : 516 : && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
11291 : 516 : && CONST_INT_P (XEXP (XEXP (varop, 0), 1))
11292 : 676 : && (INTVAL (XEXP (XEXP (varop, 0), 1))
11293 : 516 : >= (GET_MODE_UNIT_PRECISION (GET_MODE (XEXP (varop, 0)))
11294 : 1032 : - GET_MODE_UNIT_PRECISION (GET_MODE (varop)))))
11295 : : {
11296 : 516 : rtx varop_inner = XEXP (varop, 0);
11297 : 516 : int new_count = count + INTVAL (XEXP (varop_inner, 1));
11298 : 516 : rtx new_count_rtx = gen_int_shift_amount (GET_MODE (varop_inner),
11299 : : new_count);
11300 : 516 : varop_inner = gen_rtx_LSHIFTRT (GET_MODE (varop_inner),
11301 : : XEXP (varop_inner, 0),
11302 : : new_count_rtx);
11303 : 516 : varop = gen_rtx_TRUNCATE (GET_MODE (varop), varop_inner);
11304 : 516 : count = 0;
11305 : 516 : continue;
11306 : 516 : }
11307 : : break;
11308 : :
11309 : : default:
11310 : : break;
11311 : 47749 : }
11312 : :
11313 : : break;
11314 : : }
11315 : :
11316 : 22871992 : shift_mode = result_mode;
11317 : 22871992 : if (shift_mode != mode)
11318 : : {
11319 : : /* We only change the modes of scalar shifts. */
11320 : 308155 : int_mode = as_a <scalar_int_mode> (mode);
11321 : 308155 : int_result_mode = as_a <scalar_int_mode> (result_mode);
11322 : 308155 : shift_mode = try_widen_shift_mode (code, varop, count, int_result_mode,
11323 : : int_mode, outer_op, outer_const);
11324 : : }
11325 : :
11326 : : /* We have now finished analyzing the shift. The result should be
11327 : : a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
11328 : : OUTER_OP is non-UNKNOWN, it is an operation that needs to be applied
11329 : : to the result of the shift. OUTER_CONST is the relevant constant,
11330 : : but we must turn off all bits turned off in the shift. */
11331 : :
11332 : 22871992 : if (outer_op == UNKNOWN
11333 : 19540638 : && orig_code == code && orig_count == count
11334 : 19498483 : && varop == orig_varop
11335 : 19247026 : && shift_mode == GET_MODE (varop))
11336 : : return NULL_RTX;
11337 : :
11338 : : /* Make a SUBREG if necessary. If we can't make it, fail. */
11339 : 3627289 : varop = gen_lowpart (shift_mode, varop);
11340 : 3627289 : if (varop == NULL_RTX || GET_CODE (varop) == CLOBBER)
11341 : : return NULL_RTX;
11342 : :
11343 : : /* If we have an outer operation and we just made a shift, it is
11344 : : possible that we could have simplified the shift were it not
11345 : : for the outer operation. So try to do the simplification
11346 : : recursively. */
11347 : :
11348 : 3627289 : if (outer_op != UNKNOWN)
11349 : 3331354 : x = simplify_shift_const_1 (code, shift_mode, varop, count);
11350 : : else
11351 : : x = NULL_RTX;
11352 : :
11353 : 3331354 : if (x == NULL_RTX)
11354 : 3596126 : x = simplify_gen_binary (code, shift_mode, varop,
11355 : 3596126 : gen_int_shift_amount (shift_mode, count));
11356 : :
11357 : : /* If we were doing an LSHIFTRT in a wider mode than it was originally,
11358 : : turn off all the bits that the shift would have turned off. */
11359 : 3627289 : if (orig_code == LSHIFTRT && result_mode != shift_mode)
11360 : : /* We only change the modes of scalar shifts. */
11361 : 31681 : x = simplify_and_const_int (NULL_RTX, as_a <scalar_int_mode> (shift_mode),
11362 : 31681 : x, GET_MODE_MASK (result_mode) >> orig_count);
11363 : :
11364 : : /* Do the remainder of the processing in RESULT_MODE. */
11365 : 3627289 : x = gen_lowpart_or_truncate (result_mode, x);
11366 : :
11367 : : /* If COMPLEMENT_P is set, we have to complement X before doing the outer
11368 : : operation. */
11369 : 3627289 : if (complement_p)
11370 : 19077 : x = simplify_gen_unary (NOT, result_mode, x, result_mode);
11371 : :
11372 : 3627289 : if (outer_op != UNKNOWN)
11373 : : {
11374 : 3331354 : int_result_mode = as_a <scalar_int_mode> (result_mode);
11375 : :
11376 : 3331354 : if (GET_RTX_CLASS (outer_op) != RTX_UNARY
11377 : 3331354 : && GET_MODE_PRECISION (int_result_mode) < HOST_BITS_PER_WIDE_INT)
11378 : 1257732 : outer_const = trunc_int_for_mode (outer_const, int_result_mode);
11379 : :
11380 : 3331354 : if (outer_op == AND)
11381 : 2895929 : x = simplify_and_const_int (NULL_RTX, int_result_mode, x, outer_const);
11382 : 435425 : else if (outer_op == SET)
11383 : : {
11384 : : /* This means that we have determined that the result is
11385 : : equivalent to a constant. This should be rare. */
11386 : 0 : if (!side_effects_p (x))
11387 : 0 : x = GEN_INT (outer_const);
11388 : : }
11389 : 435425 : else if (GET_RTX_CLASS (outer_op) == RTX_UNARY)
11390 : 17870 : x = simplify_gen_unary (outer_op, int_result_mode, x, int_result_mode);
11391 : : else
11392 : 417555 : x = simplify_gen_binary (outer_op, int_result_mode, x,
11393 : : GEN_INT (outer_const));
11394 : : }
11395 : :
11396 : : return x;
11397 : : }
11398 : :
11399 : : /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
11400 : : The result of the shift is RESULT_MODE. If we cannot simplify it,
11401 : : return X or, if it is NULL, synthesize the expression with
11402 : : simplify_gen_binary. Otherwise, return a simplified value.
11403 : :
11404 : : The shift is normally computed in the widest mode we find in VAROP, as
11405 : : long as it isn't a different number of words than RESULT_MODE. Exceptions
11406 : : are ASHIFTRT and ROTATE, which are always done in their original mode. */
11407 : :
11408 : : static rtx
11409 : 19540862 : simplify_shift_const (rtx x, enum rtx_code code, machine_mode result_mode,
11410 : : rtx varop, int count)
11411 : : {
11412 : 19540862 : rtx tem = simplify_shift_const_1 (code, result_mode, varop, count);
11413 : 19540862 : if (tem)
11414 : : return tem;
11415 : :
11416 : 15944736 : if (!x)
11417 : 4813528 : x = simplify_gen_binary (code, GET_MODE (varop), varop,
11418 : 4813528 : gen_int_shift_amount (GET_MODE (varop), count));
11419 : 15944736 : if (GET_MODE (x) != result_mode)
11420 : 0 : x = gen_lowpart (result_mode, x);
11421 : : return x;
11422 : : }
11423 : :
11424 : :
11425 : : /* A subroutine of recog_for_combine. See there for arguments and
11426 : : return value. */
11427 : :
11428 : : static int
11429 : 45553883 : recog_for_combine_1 (rtx *pnewpat, rtx_insn *insn, rtx *pnotes,
11430 : : unsigned old_nregs, unsigned new_nregs)
11431 : : {
11432 : 45553883 : rtx pat = *pnewpat;
11433 : 45553883 : rtx pat_without_clobbers;
11434 : 45553883 : int insn_code_number;
11435 : 45553883 : int num_clobbers_to_add = 0;
11436 : 45553883 : int i;
11437 : 45553883 : rtx notes = NULL_RTX;
11438 : 45553883 : rtx old_notes, old_pat;
11439 : 45553883 : int old_icode;
11440 : :
11441 : : /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
11442 : : we use to indicate that something didn't match. If we find such a
11443 : : thing, force rejection. */
11444 : 45553883 : if (GET_CODE (pat) == PARALLEL)
11445 : 48993116 : for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
11446 : 33804937 : if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
11447 : 6981675 : && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
11448 : : return -1;
11449 : :
11450 : 45551976 : old_pat = PATTERN (insn);
11451 : 45551976 : old_notes = REG_NOTES (insn);
11452 : 45551976 : PATTERN (insn) = pat;
11453 : 45551976 : REG_NOTES (insn) = NULL_RTX;
11454 : :
11455 : 45551976 : insn_code_number = recog (pat, insn, &num_clobbers_to_add);
11456 : 45551976 : if (dump_file && (dump_flags & TDF_DETAILS))
11457 : : {
11458 : 277 : if (insn_code_number < 0)
11459 : 177 : fputs ("Failed to match this instruction:\n", dump_file);
11460 : : else
11461 : 100 : fputs ("Successfully matched this instruction:\n", dump_file);
11462 : 277 : print_rtl_single (dump_file, pat);
11463 : : }
11464 : :
11465 : : /* If it isn't, there is the possibility that we previously had an insn
11466 : : that clobbered some register as a side effect, but the combined
11467 : : insn doesn't need to do that. So try once more without the clobbers
11468 : : unless this represents an ASM insn. */
11469 : :
11470 : 36320945 : if (insn_code_number < 0 && ! check_asm_operands (pat)
11471 : 81871439 : && GET_CODE (pat) == PARALLEL)
11472 : : {
11473 : : int pos;
11474 : :
11475 : 47648501 : for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
11476 : 32891908 : if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
11477 : : {
11478 : 26287391 : if (i != pos)
11479 : 2215998 : SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
11480 : 26287391 : pos++;
11481 : : }
11482 : :
11483 : 14756593 : SUBST_INT (XVECLEN (pat, 0), pos);
11484 : :
11485 : 14756593 : if (pos == 1)
11486 : 4434942 : pat = XVECEXP (pat, 0, 0);
11487 : :
11488 : 14756593 : PATTERN (insn) = pat;
11489 : 14756593 : insn_code_number = recog (pat, insn, &num_clobbers_to_add);
11490 : 14756593 : if (dump_file && (dump_flags & TDF_DETAILS))
11491 : : {
11492 : 82 : if (insn_code_number < 0)
11493 : 81 : fputs ("Failed to match this instruction:\n", dump_file);
11494 : : else
11495 : 1 : fputs ("Successfully matched this instruction:\n", dump_file);
11496 : 82 : print_rtl_single (dump_file, pat);
11497 : : }
11498 : : }
11499 : :
11500 : 45551976 : pat_without_clobbers = pat;
11501 : :
11502 : 45551976 : PATTERN (insn) = old_pat;
11503 : 45551976 : REG_NOTES (insn) = old_notes;
11504 : :
11505 : : /* Recognize all noop sets, these will be killed by followup pass. */
11506 : 45551976 : if (insn_code_number < 0 && GET_CODE (pat) == SET && set_noop_p (pat))
11507 : 194316 : insn_code_number = NOOP_MOVE_INSN_CODE, num_clobbers_to_add = 0;
11508 : :
11509 : : /* If we had any clobbers to add, make a new pattern than contains
11510 : : them. Then check to make sure that all of them are dead. */
11511 : 45551976 : if (num_clobbers_to_add)
11512 : : {
11513 : 1530600 : rtx newpat = gen_rtx_PARALLEL (VOIDmode,
11514 : : rtvec_alloc (GET_CODE (pat) == PARALLEL
11515 : : ? (XVECLEN (pat, 0)
11516 : : + num_clobbers_to_add)
11517 : : : num_clobbers_to_add + 1));
11518 : :
11519 : 1530600 : if (GET_CODE (pat) == PARALLEL)
11520 : 1302 : for (i = 0; i < XVECLEN (pat, 0); i++)
11521 : 868 : XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
11522 : : else
11523 : 1530166 : XVECEXP (newpat, 0, 0) = pat;
11524 : :
11525 : 1530600 : add_clobbers (newpat, insn_code_number);
11526 : :
11527 : 2956730 : for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
11528 : 2956730 : i < XVECLEN (newpat, 0); i++)
11529 : : {
11530 : 1555006 : if (REG_P (XEXP (XVECEXP (newpat, 0, i), 0))
11531 : 1555006 : && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
11532 : : return -1;
11533 : 1426130 : if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) != SCRATCH)
11534 : : {
11535 : 1377729 : gcc_assert (REG_P (XEXP (XVECEXP (newpat, 0, i), 0)));
11536 : 1377729 : notes = alloc_reg_note (REG_UNUSED,
11537 : : XEXP (XVECEXP (newpat, 0, i), 0), notes);
11538 : : }
11539 : : }
11540 : : pat = newpat;
11541 : : }
11542 : :
11543 : 45423100 : if (insn_code_number >= 0
11544 : 45423100 : && insn_code_number != NOOP_MOVE_INSN_CODE)
11545 : : {
11546 : : /* Create the reg dead notes if needed for the regs that were created via split. */
11547 : 9415433 : for (; old_nregs < new_nregs; old_nregs++)
11548 : 3420 : notes = alloc_reg_note (REG_DEAD, regno_reg_rtx[old_nregs], notes);
11549 : 9412013 : old_pat = PATTERN (insn);
11550 : 9412013 : old_notes = REG_NOTES (insn);
11551 : 9412013 : old_icode = INSN_CODE (insn);
11552 : 9412013 : PATTERN (insn) = pat;
11553 : 9412013 : REG_NOTES (insn) = notes;
11554 : 9412013 : INSN_CODE (insn) = insn_code_number;
11555 : :
11556 : : /* Allow targets to reject combined insn. */
11557 : 9412013 : if (!targetm.legitimate_combined_insn (insn))
11558 : : {
11559 : 3718 : if (dump_file && (dump_flags & TDF_DETAILS))
11560 : 0 : fputs ("Instruction not appropriate for target.",
11561 : : dump_file);
11562 : :
11563 : : /* Callers expect recog_for_combine to strip
11564 : : clobbers from the pattern on failure. */
11565 : : pat = pat_without_clobbers;
11566 : : notes = NULL_RTX;
11567 : :
11568 : : insn_code_number = -1;
11569 : : }
11570 : :
11571 : 9412013 : PATTERN (insn) = old_pat;
11572 : 9412013 : REG_NOTES (insn) = old_notes;
11573 : 9412013 : INSN_CODE (insn) = old_icode;
11574 : : }
11575 : :
11576 : 45423100 : *pnewpat = pat;
11577 : 45423100 : *pnotes = notes;
11578 : :
11579 : 45423100 : return insn_code_number;
11580 : : }
11581 : :
11582 : : /* Change every ZERO_EXTRACT and ZERO_EXTEND of a SUBREG that can be
11583 : : expressed as an AND and maybe an LSHIFTRT, to that formulation.
11584 : : Return whether anything was so changed. */
11585 : :
11586 : : static bool
11587 : 45499777 : change_zero_ext (rtx pat)
11588 : : {
11589 : 45499777 : bool changed = false;
11590 : 45499777 : rtx *src = &SET_SRC (pat);
11591 : :
11592 : 45499777 : subrtx_ptr_iterator::array_type array;
11593 : 312059649 : FOR_EACH_SUBRTX_PTR (iter, array, src, NONCONST)
11594 : : {
11595 : 266559872 : rtx x = **iter;
11596 : 266559872 : scalar_int_mode mode, inner_mode;
11597 : 266559872 : if (!is_a <scalar_int_mode> (GET_MODE (x), &mode))
11598 : 390492910 : continue;
11599 : 142626834 : int size;
11600 : :
11601 : 142626834 : if (GET_CODE (x) == ZERO_EXTRACT
11602 : 756307 : && CONST_INT_P (XEXP (x, 1))
11603 : 756287 : && CONST_INT_P (XEXP (x, 2))
11604 : 714550 : && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &inner_mode)
11605 : 143341384 : && GET_MODE_PRECISION (inner_mode) <= GET_MODE_PRECISION (mode))
11606 : : {
11607 : 714544 : size = INTVAL (XEXP (x, 1));
11608 : :
11609 : 714544 : int start = INTVAL (XEXP (x, 2));
11610 : 714544 : if (BITS_BIG_ENDIAN)
11611 : : start = GET_MODE_PRECISION (inner_mode) - size - start;
11612 : :
11613 : 714544 : if (start != 0)
11614 : 608752 : x = gen_rtx_LSHIFTRT (inner_mode, XEXP (x, 0),
11615 : : gen_int_shift_amount (inner_mode, start));
11616 : : else
11617 : : x = XEXP (x, 0);
11618 : :
11619 : 714544 : if (mode != inner_mode)
11620 : : {
11621 : 335 : if (REG_P (x) && HARD_REGISTER_P (x)
11622 : 181795 : && !can_change_dest_mode (x, 0, mode))
11623 : 0 : continue;
11624 : :
11625 : 181795 : x = gen_lowpart_SUBREG (mode, x);
11626 : : }
11627 : : }
11628 : 141912290 : else if (GET_CODE (x) == ZERO_EXTEND
11629 : 2061537 : && GET_CODE (XEXP (x, 0)) == SUBREG
11630 : 365931 : && SCALAR_INT_MODE_P (GET_MODE (SUBREG_REG (XEXP (x, 0))))
11631 : 365082 : && !paradoxical_subreg_p (XEXP (x, 0))
11632 : 142277372 : && subreg_lowpart_p (XEXP (x, 0)))
11633 : : {
11634 : 257484 : inner_mode = as_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)));
11635 : 257484 : size = GET_MODE_PRECISION (inner_mode);
11636 : 257484 : x = SUBREG_REG (XEXP (x, 0));
11637 : 257484 : if (GET_MODE (x) != mode)
11638 : : {
11639 : 31450 : if (REG_P (x) && HARD_REGISTER_P (x)
11640 : 32455 : && !can_change_dest_mode (x, 0, mode))
11641 : 0 : continue;
11642 : :
11643 : 32455 : x = gen_lowpart_SUBREG (mode, x);
11644 : : }
11645 : : }
11646 : 283309545 : else if (GET_CODE (x) == ZERO_EXTEND
11647 : 1804053 : && REG_P (XEXP (x, 0))
11648 : 944751 : && HARD_REGISTER_P (XEXP (x, 0))
11649 : 141654873 : && can_change_dest_mode (XEXP (x, 0), 0, mode))
11650 : : {
11651 : 67 : inner_mode = as_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)));
11652 : 67 : size = GET_MODE_PRECISION (inner_mode);
11653 : 67 : x = gen_rtx_REG (mode, REGNO (XEXP (x, 0)));
11654 : : }
11655 : : else
11656 : 141654739 : continue;
11657 : :
11658 : 1425082 : if (!(GET_CODE (x) == LSHIFTRT
11659 : 452987 : && CONST_INT_P (XEXP (x, 1))
11660 : 452987 : && size + INTVAL (XEXP (x, 1)) == GET_MODE_PRECISION (mode)))
11661 : : {
11662 : 782293 : wide_int mask = wi::mask (size, false, GET_MODE_PRECISION (mode));
11663 : 782293 : x = gen_rtx_AND (mode, x, immed_wide_int_const (mask, mode));
11664 : 782293 : }
11665 : :
11666 : 972095 : SUBST (**iter, x);
11667 : 972095 : changed = true;
11668 : : }
11669 : :
11670 : 45499777 : if (changed)
11671 : 8790399 : FOR_EACH_SUBRTX_PTR (iter, array, src, NONCONST)
11672 : 7835616 : maybe_swap_commutative_operands (**iter);
11673 : :
11674 : 45499777 : rtx *dst = &SET_DEST (pat);
11675 : 45499777 : scalar_int_mode mode;
11676 : 45499777 : if (GET_CODE (*dst) == ZERO_EXTRACT
11677 : 11183 : && REG_P (XEXP (*dst, 0))
11678 : 387 : && is_a <scalar_int_mode> (GET_MODE (XEXP (*dst, 0)), &mode)
11679 : 387 : && CONST_INT_P (XEXP (*dst, 1))
11680 : 45500164 : && CONST_INT_P (XEXP (*dst, 2)))
11681 : : {
11682 : 235 : rtx reg = XEXP (*dst, 0);
11683 : 235 : int width = INTVAL (XEXP (*dst, 1));
11684 : 235 : int offset = INTVAL (XEXP (*dst, 2));
11685 : 235 : int reg_width = GET_MODE_PRECISION (mode);
11686 : 235 : if (BITS_BIG_ENDIAN)
11687 : : offset = reg_width - width - offset;
11688 : :
11689 : 235 : rtx x, y, z, w;
11690 : 235 : wide_int mask = wi::shifted_mask (offset, width, true, reg_width);
11691 : 235 : wide_int mask2 = wi::shifted_mask (offset, width, false, reg_width);
11692 : 235 : x = gen_rtx_AND (mode, reg, immed_wide_int_const (mask, mode));
11693 : 235 : if (offset)
11694 : 187 : y = gen_rtx_ASHIFT (mode, SET_SRC (pat), GEN_INT (offset));
11695 : : else
11696 : 48 : y = SET_SRC (pat);
11697 : 235 : z = gen_rtx_AND (mode, y, immed_wide_int_const (mask2, mode));
11698 : 235 : w = gen_rtx_IOR (mode, x, z);
11699 : 235 : SUBST (SET_DEST (pat), reg);
11700 : 235 : SUBST (SET_SRC (pat), w);
11701 : :
11702 : 235 : changed = true;
11703 : 235 : }
11704 : :
11705 : 45499777 : return changed;
11706 : 45499777 : }
11707 : :
11708 : : /* Like recog, but we receive the address of a pointer to a new pattern.
11709 : : We try to match the rtx that the pointer points to.
11710 : : If that fails, we may try to modify or replace the pattern,
11711 : : storing the replacement into the same pointer object.
11712 : :
11713 : : Modifications include deletion or addition of CLOBBERs. If the
11714 : : instruction will still not match, we change ZERO_EXTEND and ZERO_EXTRACT
11715 : : to the equivalent AND and perhaps LSHIFTRT patterns, and try with that
11716 : : (and undo if that fails).
11717 : :
11718 : : PNOTES is a pointer to a location where any REG_UNUSED notes added for
11719 : : the CLOBBERs are placed.
11720 : : If OLD_NREGS != NEW_NREGS, then PNOTES also includes REG_DEAD notes added.
11721 : :
11722 : : The value is the final insn code from the pattern ultimately matched,
11723 : : or -1. */
11724 : :
11725 : : static int
11726 : 44343701 : recog_for_combine (rtx *pnewpat, rtx_insn *insn, rtx *pnotes,
11727 : : unsigned int old_nregs, unsigned int new_nregs)
11728 : : {
11729 : 44343701 : rtx pat = *pnewpat;
11730 : 44343701 : int insn_code_number = recog_for_combine_1 (pnewpat, insn, pnotes,
11731 : : old_nregs, new_nregs);
11732 : 44343701 : if (insn_code_number >= 0 || check_asm_operands (pat))
11733 : 9458534 : return insn_code_number;
11734 : :
11735 : 34885167 : void *marker = get_undo_marker ();
11736 : 34885167 : bool changed = false;
11737 : :
11738 : 34885167 : if (GET_CODE (pat) == SET)
11739 : : {
11740 : : /* For an unrecognized single set of a constant, try placing it in
11741 : : the constant pool, if this function already uses one. */
11742 : 20637921 : rtx src = SET_SRC (pat);
11743 : 20637921 : if (CONSTANT_P (src)
11744 : 444487 : && !CONST_INT_P (src)
11745 : 399267 : && crtl->uses_const_pool)
11746 : : {
11747 : 351012 : machine_mode mode = GET_MODE (src);
11748 : 351012 : if (mode == VOIDmode)
11749 : 1137 : mode = GET_MODE (SET_DEST (pat));
11750 : 351012 : src = force_const_mem (mode, src);
11751 : 351012 : if (src)
11752 : : {
11753 : 351010 : SUBST (SET_SRC (pat), src);
11754 : 351010 : changed = true;
11755 : : }
11756 : : }
11757 : : else
11758 : 20286909 : changed = change_zero_ext (pat);
11759 : : }
11760 : 14247246 : else if (GET_CODE (pat) == PARALLEL)
11761 : : {
11762 : : int i;
11763 : 39731858 : for (i = 0; i < XVECLEN (pat, 0); i++)
11764 : : {
11765 : 25507157 : rtx set = XVECEXP (pat, 0, i);
11766 : 25507157 : if (GET_CODE (set) == SET)
11767 : 25212868 : changed |= change_zero_ext (set);
11768 : : }
11769 : : }
11770 : :
11771 : 34862620 : if (changed)
11772 : : {
11773 : 1210182 : insn_code_number = recog_for_combine_1 (pnewpat, insn, pnotes,
11774 : : old_nregs, new_nregs);
11775 : :
11776 : 1210182 : if (insn_code_number < 0)
11777 : 1064623 : undo_to_marker (marker);
11778 : : }
11779 : :
11780 : : return insn_code_number;
11781 : : }
11782 : :
11783 : : /* Like gen_lowpart_general but for use by combine. In combine it
11784 : : is not possible to create any new pseudoregs. However, it is
11785 : : safe to create invalid memory addresses, because combine will
11786 : : try to recognize them and all they will do is make the combine
11787 : : attempt fail.
11788 : :
11789 : : If for some reason this cannot do its job, an rtx
11790 : : (clobber (const_int 0)) is returned.
11791 : : An insn containing that will not be recognized. */
11792 : :
11793 : : static rtx
11794 : 147768835 : gen_lowpart_for_combine (machine_mode omode, rtx x)
11795 : : {
11796 : 147768835 : machine_mode imode = GET_MODE (x);
11797 : 147768835 : rtx result;
11798 : :
11799 : 147768835 : if (omode == imode)
11800 : : return x;
11801 : :
11802 : : /* We can only support MODE being wider than a word if X is a
11803 : : constant integer or has a mode the same size. */
11804 : 51101300 : if (maybe_gt (GET_MODE_SIZE (omode), UNITS_PER_WORD)
11805 : 24260788 : && ! (CONST_SCALAR_INT_P (x)
11806 : 9680148 : || known_eq (GET_MODE_SIZE (imode), GET_MODE_SIZE (omode))))
11807 : 2976457 : goto fail;
11808 : :
11809 : : /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
11810 : : won't know what to do. So we will strip off the SUBREG here and
11811 : : process normally. */
11812 : 21284331 : if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
11813 : : {
11814 : 10791 : x = SUBREG_REG (x);
11815 : :
11816 : : /* For use in case we fall down into the address adjustments
11817 : : further below, we need to adjust the known mode and size of
11818 : : x; imode and isize, since we just adjusted x. */
11819 : 10791 : imode = GET_MODE (x);
11820 : :
11821 : 10791 : if (imode == omode)
11822 : : return x;
11823 : : }
11824 : :
11825 : 21276651 : result = gen_lowpart_common (omode, x);
11826 : :
11827 : 21276651 : if (result)
11828 : : return result;
11829 : :
11830 : 8387853 : if (MEM_P (x))
11831 : : {
11832 : : /* Refuse to work on a volatile memory ref or one with a mode-dependent
11833 : : address. */
11834 : 1894650 : if (MEM_VOLATILE_P (x)
11835 : 3750595 : || mode_dependent_address_p (XEXP (x, 0), MEM_ADDR_SPACE (x)))
11836 : 38735 : goto fail;
11837 : :
11838 : : /* If we want to refer to something bigger than the original memref,
11839 : : generate a paradoxical subreg instead. That will force a reload
11840 : : of the original memref X. */
11841 : 1855915 : if (paradoxical_subreg_p (omode, imode))
11842 : 1655336 : return gen_rtx_SUBREG (omode, x, 0);
11843 : :
11844 : 200579 : poly_int64 offset = byte_lowpart_offset (omode, imode);
11845 : 200579 : return adjust_address_nv (x, omode, offset);
11846 : : }
11847 : :
11848 : : /* If X is a comparison operator, rewrite it in a new mode. This
11849 : : probably won't match, but may allow further simplifications. */
11850 : 6493203 : else if (COMPARISON_P (x)
11851 : 127143 : && SCALAR_INT_MODE_P (imode)
11852 : 48005 : && SCALAR_INT_MODE_P (omode))
11853 : 47994 : return gen_rtx_fmt_ee (GET_CODE (x), omode, XEXP (x, 0), XEXP (x, 1));
11854 : :
11855 : : /* If we couldn't simplify X any other way, just enclose it in a
11856 : : SUBREG. Normally, this SUBREG won't match, but some patterns may
11857 : : include an explicit SUBREG or we may simplify it further in combine. */
11858 : : else
11859 : : {
11860 : 6445209 : rtx res;
11861 : :
11862 : 6445209 : if (imode == VOIDmode)
11863 : : {
11864 : 4 : imode = int_mode_for_mode (omode).require ();
11865 : 4 : x = gen_lowpart_common (imode, x);
11866 : 4 : if (x == NULL)
11867 : 0 : goto fail;
11868 : : }
11869 : 6445209 : res = lowpart_subreg (omode, x, imode);
11870 : 6445209 : if (res)
11871 : : return res;
11872 : : }
11873 : :
11874 : 12851 : fail:
11875 : 3028043 : return gen_rtx_CLOBBER (omode, const0_rtx);
11876 : : }
11877 : :
11878 : : /* Try to simplify a comparison between OP0 and a constant OP1,
11879 : : where CODE is the comparison code that will be tested, into a
11880 : : (CODE OP0 const0_rtx) form.
11881 : :
11882 : : The result is a possibly different comparison code to use.
11883 : : *POP0 and *POP1 may be updated. */
11884 : :
11885 : : static enum rtx_code
11886 : 13987055 : simplify_compare_const (enum rtx_code code, machine_mode mode,
11887 : : rtx *pop0, rtx *pop1)
11888 : : {
11889 : 13987055 : scalar_int_mode int_mode;
11890 : 13987055 : rtx op0 = *pop0;
11891 : 13987055 : HOST_WIDE_INT const_op = INTVAL (*pop1);
11892 : :
11893 : : /* Get the constant we are comparing against and turn off all bits
11894 : : not on in our mode. */
11895 : 13987055 : if (mode != VOIDmode)
11896 : 13754889 : const_op = trunc_int_for_mode (const_op, mode);
11897 : :
11898 : : /* If we are comparing against a constant power of two and the value
11899 : : being compared can only have that single bit nonzero (e.g., it was
11900 : : `and'ed with that bit), we can replace this with a comparison
11901 : : with zero. */
11902 : 13987055 : if (const_op
11903 : 3704768 : && (code == EQ || code == NE || code == GEU || code == LTU
11904 : : /* This optimization is incorrect for signed >= INT_MIN or
11905 : : < INT_MIN, those are always true or always false. */
11906 : 22141 : || ((code == GE || code == LT) && const_op > 0))
11907 : 2540640 : && is_a <scalar_int_mode> (mode, &int_mode)
11908 : 2540640 : && GET_MODE_PRECISION (int_mode) - 1 < HOST_BITS_PER_WIDE_INT
11909 : 2520509 : && pow2p_hwi (const_op & GET_MODE_MASK (int_mode))
11910 : 14813176 : && (nonzero_bits (op0, int_mode)
11911 : 826121 : == (unsigned HOST_WIDE_INT) (const_op & GET_MODE_MASK (int_mode))))
11912 : : {
11913 : 2834 : code = (code == EQ || code == GE || code == GEU ? NE : EQ);
11914 : : const_op = 0;
11915 : : }
11916 : :
11917 : : /* Similarly, if we are comparing a value known to be either -1 or
11918 : : 0 with -1, change it to the opposite comparison against zero. */
11919 : 13984221 : if (const_op == -1
11920 : 221721 : && (code == EQ || code == NE || code == GT || code == LE
11921 : : || code == GEU || code == LTU)
11922 : 14196429 : && is_a <scalar_int_mode> (mode, &int_mode)
11923 : 14203468 : && num_sign_bit_copies (op0, int_mode) == GET_MODE_PRECISION (int_mode))
11924 : : {
11925 : 9873 : code = (code == EQ || code == LE || code == GEU ? NE : EQ);
11926 : : const_op = 0;
11927 : : }
11928 : :
11929 : : /* Do some canonicalizations based on the comparison code. We prefer
11930 : : comparisons against zero and then prefer equality comparisons.
11931 : : If we can reduce the size of a constant, we will do that too. */
11932 : 13977182 : switch (code)
11933 : : {
11934 : 247831 : case LT:
11935 : : /* < C is equivalent to <= (C - 1) */
11936 : 247831 : if (const_op > 0)
11937 : : {
11938 : 4170 : const_op -= 1;
11939 : 4170 : code = LE;
11940 : : /* ... fall through to LE case below. */
11941 : 334018 : gcc_fallthrough ();
11942 : : }
11943 : : else
11944 : : break;
11945 : :
11946 : 334018 : case LE:
11947 : : /* <= C is equivalent to < (C + 1); we do this for C < 0 */
11948 : 334018 : if (const_op < 0)
11949 : : {
11950 : 42 : const_op += 1;
11951 : 42 : code = LT;
11952 : : }
11953 : :
11954 : : /* If we are doing a <= 0 comparison on a value known to have
11955 : : a zero sign bit, we can replace this with == 0. */
11956 : 333976 : else if (const_op == 0
11957 : 210794 : && is_a <scalar_int_mode> (mode, &int_mode)
11958 : 210794 : && GET_MODE_PRECISION (int_mode) - 1 < HOST_BITS_PER_WIDE_INT
11959 : 544770 : && (nonzero_bits (op0, int_mode)
11960 : 210794 : & (HOST_WIDE_INT_1U << (GET_MODE_PRECISION (int_mode) - 1)))
11961 : 210794 : == 0)
11962 : : code = EQ;
11963 : : break;
11964 : :
11965 : 231640 : case GE:
11966 : : /* >= C is equivalent to > (C - 1). */
11967 : 231640 : if (const_op > 0)
11968 : : {
11969 : 1503 : const_op -= 1;
11970 : 1503 : code = GT;
11971 : : /* ... fall through to GT below. */
11972 : 256272 : gcc_fallthrough ();
11973 : : }
11974 : : else
11975 : : break;
11976 : :
11977 : 256272 : case GT:
11978 : : /* > C is equivalent to >= (C + 1); we do this for C < 0. */
11979 : 256272 : if (const_op < 0)
11980 : : {
11981 : 134 : const_op += 1;
11982 : 134 : code = GE;
11983 : : }
11984 : :
11985 : : /* If we are doing a > 0 comparison on a value known to have
11986 : : a zero sign bit, we can replace this with != 0. */
11987 : 256138 : else if (const_op == 0
11988 : 140217 : && is_a <scalar_int_mode> (mode, &int_mode)
11989 : 140217 : && GET_MODE_PRECISION (int_mode) - 1 < HOST_BITS_PER_WIDE_INT
11990 : 396355 : && (nonzero_bits (op0, int_mode)
11991 : 140217 : & (HOST_WIDE_INT_1U << (GET_MODE_PRECISION (int_mode) - 1)))
11992 : 140217 : == 0)
11993 : : code = NE;
11994 : : break;
11995 : :
11996 : 69819 : case LTU:
11997 : : /* < C is equivalent to <= (C - 1). */
11998 : 69819 : if (const_op > 0)
11999 : : {
12000 : 60049 : const_op -= 1;
12001 : 60049 : code = LEU;
12002 : : /* ... fall through ... */
12003 : 60049 : gcc_fallthrough ();
12004 : : }
12005 : : /* (unsigned) < 0x80000000 is equivalent to >= 0. */
12006 : 9770 : else if (is_a <scalar_int_mode> (mode, &int_mode)
12007 : 9770 : && GET_MODE_PRECISION (int_mode) - 1 < HOST_BITS_PER_WIDE_INT
12008 : 9000 : && (((unsigned HOST_WIDE_INT) const_op & GET_MODE_MASK (int_mode))
12009 : 9000 : == HOST_WIDE_INT_1U << (GET_MODE_PRECISION (int_mode) - 1)))
12010 : : {
12011 : : const_op = 0;
12012 : : code = GE;
12013 : : break;
12014 : : }
12015 : : else
12016 : : break;
12017 : :
12018 : 555314 : case LEU:
12019 : : /* unsigned <= 0 is equivalent to == 0 */
12020 : 555314 : if (const_op == 0)
12021 : : code = EQ;
12022 : : /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
12023 : 555051 : else if (is_a <scalar_int_mode> (mode, &int_mode)
12024 : 555051 : && GET_MODE_PRECISION (int_mode) - 1 < HOST_BITS_PER_WIDE_INT
12025 : 553110 : && ((unsigned HOST_WIDE_INT) const_op
12026 : : == ((HOST_WIDE_INT_1U
12027 : 553110 : << (GET_MODE_PRECISION (int_mode) - 1)) - 1)))
12028 : : {
12029 : : const_op = 0;
12030 : : code = GE;
12031 : : }
12032 : : break;
12033 : :
12034 : 22536 : case GEU:
12035 : : /* >= C is equivalent to > (C - 1). */
12036 : 22536 : if (const_op > 1)
12037 : : {
12038 : 13905 : const_op -= 1;
12039 : 13905 : code = GTU;
12040 : : /* ... fall through ... */
12041 : 13905 : gcc_fallthrough ();
12042 : : }
12043 : :
12044 : : /* (unsigned) >= 0x80000000 is equivalent to < 0. */
12045 : 8631 : else if (is_a <scalar_int_mode> (mode, &int_mode)
12046 : 8631 : && GET_MODE_PRECISION (int_mode) - 1 < HOST_BITS_PER_WIDE_INT
12047 : 7440 : && (((unsigned HOST_WIDE_INT) const_op & GET_MODE_MASK (int_mode))
12048 : 7440 : == HOST_WIDE_INT_1U << (GET_MODE_PRECISION (int_mode) - 1)))
12049 : : {
12050 : : const_op = 0;
12051 : : code = LT;
12052 : : break;
12053 : : }
12054 : : else
12055 : : break;
12056 : :
12057 : 432694 : case GTU:
12058 : : /* unsigned > 0 is equivalent to != 0 */
12059 : 432694 : if (const_op == 0)
12060 : : code = NE;
12061 : : /* (unsigned) > 0x7fffffff is equivalent to < 0. */
12062 : 432694 : else if (is_a <scalar_int_mode> (mode, &int_mode)
12063 : 432694 : && GET_MODE_PRECISION (int_mode) - 1 < HOST_BITS_PER_WIDE_INT
12064 : 431683 : && ((unsigned HOST_WIDE_INT) const_op
12065 : : == (HOST_WIDE_INT_1U
12066 : 431683 : << (GET_MODE_PRECISION (int_mode) - 1)) - 1))
12067 : : {
12068 : : const_op = 0;
12069 : : code = LT;
12070 : : }
12071 : : break;
12072 : :
12073 : : default:
12074 : : break;
12075 : : }
12076 : :
12077 : : /* Narrow non-symmetric comparison of memory and constant as e.g.
12078 : : x0...x7 <= 0x3fffffffffffffff into x0 <= 0x3f where x0 is the most
12079 : : significant byte. Likewise, transform x0...x7 >= 0x4000000000000000 into
12080 : : x0 >= 0x40. */
12081 : 13414653 : if ((code == LEU || code == LTU || code == GEU || code == GTU)
12082 : 1002828 : && is_a <scalar_int_mode> (GET_MODE (op0), &int_mode)
12083 : 1002805 : && HWI_COMPUTABLE_MODE_P (int_mode)
12084 : 997892 : && MEM_P (op0)
12085 : 63831 : && !MEM_VOLATILE_P (op0)
12086 : : /* The optimization makes only sense for constants which are big enough
12087 : : so that we have a chance to chop off something at all. */
12088 : 63172 : && ((unsigned HOST_WIDE_INT) const_op & GET_MODE_MASK (int_mode)) > 0xff
12089 : : /* Ensure that we do not overflow during normalization. */
12090 : 16493 : && (code != GTU
12091 : 2400 : || ((unsigned HOST_WIDE_INT) const_op & GET_MODE_MASK (int_mode))
12092 : : < HOST_WIDE_INT_M1U)
12093 : 14003548 : && trunc_int_for_mode (const_op, int_mode) == const_op)
12094 : : {
12095 : 16493 : unsigned HOST_WIDE_INT n
12096 : 16493 : = (unsigned HOST_WIDE_INT) const_op & GET_MODE_MASK (int_mode);
12097 : 16493 : enum rtx_code adjusted_code;
12098 : :
12099 : : /* Normalize code to either LEU or GEU. */
12100 : 16493 : if (code == LTU)
12101 : : {
12102 : 332 : --n;
12103 : 332 : adjusted_code = LEU;
12104 : : }
12105 : 16161 : else if (code == GTU)
12106 : : {
12107 : 2400 : ++n;
12108 : 2400 : adjusted_code = GEU;
12109 : : }
12110 : : else
12111 : : adjusted_code = code;
12112 : :
12113 : 16493 : scalar_int_mode narrow_mode_iter;
12114 : 51805 : FOR_EACH_MODE_UNTIL (narrow_mode_iter, int_mode)
12115 : : {
12116 : 35925 : unsigned nbits = GET_MODE_PRECISION (int_mode)
12117 : 35925 : - GET_MODE_PRECISION (narrow_mode_iter);
12118 : 35925 : unsigned HOST_WIDE_INT mask = (HOST_WIDE_INT_1U << nbits) - 1;
12119 : 35925 : unsigned HOST_WIDE_INT lower_bits = n & mask;
12120 : 35925 : if ((adjusted_code == LEU && lower_bits == mask)
12121 : 35678 : || (adjusted_code == GEU && lower_bits == 0))
12122 : : {
12123 : 613 : n >>= nbits;
12124 : 613 : break;
12125 : : }
12126 : : }
12127 : :
12128 : 16493 : if (narrow_mode_iter < int_mode)
12129 : : {
12130 : 613 : if (dump_file && (dump_flags & TDF_DETAILS))
12131 : : {
12132 : 12 : fprintf (
12133 : : dump_file, "narrow comparison from mode %s to %s: (MEM %s "
12134 : : HOST_WIDE_INT_PRINT_HEX ") to (MEM %s "
12135 : 12 : HOST_WIDE_INT_PRINT_HEX ").\n", GET_MODE_NAME (int_mode),
12136 : 12 : GET_MODE_NAME (narrow_mode_iter), GET_RTX_NAME (code),
12137 : 12 : (unsigned HOST_WIDE_INT) const_op & GET_MODE_MASK (int_mode),
12138 : 12 : GET_RTX_NAME (adjusted_code), n);
12139 : : }
12140 : 613 : poly_int64 offset = (BYTES_BIG_ENDIAN
12141 : 613 : ? 0
12142 : 613 : : (GET_MODE_SIZE (int_mode)
12143 : 613 : - GET_MODE_SIZE (narrow_mode_iter)));
12144 : 613 : *pop0 = adjust_address_nv (op0, narrow_mode_iter, offset);
12145 : 613 : *pop1 = gen_int_mode (n, narrow_mode_iter);
12146 : 613 : return adjusted_code;
12147 : : }
12148 : : }
12149 : :
12150 : 13986442 : *pop1 = GEN_INT (const_op);
12151 : 13986442 : return code;
12152 : : }
12153 : :
12154 : : /* Simplify a comparison between *POP0 and *POP1 where CODE is the
12155 : : comparison code that will be tested.
12156 : :
12157 : : The result is a possibly different comparison code to use. *POP0 and
12158 : : *POP1 may be updated.
12159 : :
12160 : : It is possible that we might detect that a comparison is either always
12161 : : true or always false. However, we do not perform general constant
12162 : : folding in combine, so this knowledge isn't useful. Such tautologies
12163 : : should have been detected earlier. Hence we ignore all such cases. */
12164 : :
12165 : : static enum rtx_code
12166 : 20589616 : simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
12167 : : {
12168 : 20589616 : rtx op0 = *pop0;
12169 : 20589616 : rtx op1 = *pop1;
12170 : 20589616 : rtx tem, tem1;
12171 : 20589616 : int i;
12172 : 20589616 : scalar_int_mode mode, inner_mode, tmode;
12173 : 20589616 : opt_scalar_int_mode tmode_iter;
12174 : :
12175 : : /* Try a few ways of applying the same transformation to both operands. */
12176 : 20589898 : while (1)
12177 : : {
12178 : : /* The test below this one won't handle SIGN_EXTENDs on these machines,
12179 : : so check specially. */
12180 : 20589898 : if (!WORD_REGISTER_OPERATIONS
12181 : 20589898 : && code != GTU && code != GEU && code != LTU && code != LEU
12182 : 17938516 : && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
12183 : 1657 : && GET_CODE (XEXP (op0, 0)) == ASHIFT
12184 : 1179 : && GET_CODE (XEXP (op1, 0)) == ASHIFT
12185 : 688 : && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
12186 : 611 : && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
12187 : 611 : && is_a <scalar_int_mode> (GET_MODE (op0), &mode)
12188 : : && (is_a <scalar_int_mode>
12189 : 611 : (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))), &inner_mode))
12190 : 611 : && inner_mode == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0)))
12191 : 611 : && CONST_INT_P (XEXP (op0, 1))
12192 : 611 : && XEXP (op0, 1) == XEXP (op1, 1)
12193 : 86 : && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
12194 : 86 : && XEXP (op0, 1) == XEXP (XEXP (op1, 0), 1)
12195 : 86 : && (INTVAL (XEXP (op0, 1))
12196 : 86 : == (GET_MODE_PRECISION (mode)
12197 : 86 : - GET_MODE_PRECISION (inner_mode))))
12198 : : {
12199 : 86 : op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
12200 : 86 : op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
12201 : : }
12202 : :
12203 : : /* If both operands are the same constant shift, see if we can ignore the
12204 : : shift. We can if the shift is a rotate or if the bits shifted out of
12205 : : this shift are known to be zero for both inputs and if the type of
12206 : : comparison is compatible with the shift. */
12207 : 20589898 : if (GET_CODE (op0) == GET_CODE (op1)
12208 : 3007788 : && HWI_COMPUTABLE_MODE_P (GET_MODE (op0))
12209 : 2799229 : && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
12210 : 2799229 : || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
12211 : 962 : && (code != GT && code != LT && code != GE && code != LE))
12212 : 2798322 : || (GET_CODE (op0) == ASHIFTRT
12213 : 1633 : && (code != GTU && code != LTU
12214 : 1578 : && code != GEU && code != LEU)))
12215 : 2478 : && CONST_INT_P (XEXP (op0, 1))
12216 : 2446 : && INTVAL (XEXP (op0, 1)) >= 0
12217 : 2446 : && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
12218 : 20592344 : && XEXP (op0, 1) == XEXP (op1, 1))
12219 : : {
12220 : 1146 : machine_mode mode = GET_MODE (op0);
12221 : 1146 : unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
12222 : 1146 : int shift_count = INTVAL (XEXP (op0, 1));
12223 : :
12224 : 1146 : if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
12225 : 737 : mask &= (mask >> shift_count) << shift_count;
12226 : 409 : else if (GET_CODE (op0) == ASHIFT)
12227 : 409 : mask = (mask & (mask << shift_count)) >> shift_count;
12228 : :
12229 : 1146 : if ((nonzero_bits (XEXP (op0, 0), mode) & ~mask) == 0
12230 : 1146 : && (nonzero_bits (XEXP (op1, 0), mode) & ~mask) == 0)
12231 : 131 : op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
12232 : : else
12233 : : break;
12234 : : }
12235 : :
12236 : : /* If both operands are AND's of a paradoxical SUBREG by constant, the
12237 : : SUBREGs are of the same mode, and, in both cases, the AND would
12238 : : be redundant if the comparison was done in the narrower mode,
12239 : : do the comparison in the narrower mode (e.g., we are AND'ing with 1
12240 : : and the operand's possibly nonzero bits are 0xffffff01; in that case
12241 : : if we only care about QImode, we don't need the AND). This case
12242 : : occurs if the output mode of an scc insn is not SImode and
12243 : : STORE_FLAG_VALUE == 1 (e.g., the 386).
12244 : :
12245 : : Similarly, check for a case where the AND's are ZERO_EXTEND
12246 : : operations from some narrower mode even though a SUBREG is not
12247 : : present. */
12248 : :
12249 : 20588752 : else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
12250 : 2463 : && CONST_INT_P (XEXP (op0, 1))
12251 : 2369 : && CONST_INT_P (XEXP (op1, 1)))
12252 : : {
12253 : 2354 : rtx inner_op0 = XEXP (op0, 0);
12254 : 2354 : rtx inner_op1 = XEXP (op1, 0);
12255 : 2354 : HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
12256 : 2354 : HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
12257 : 2354 : bool changed = false;
12258 : :
12259 : 2354 : if (paradoxical_subreg_p (inner_op0)
12260 : 814 : && GET_CODE (inner_op1) == SUBREG
12261 : 381 : && HWI_COMPUTABLE_MODE_P (GET_MODE (SUBREG_REG (inner_op0)))
12262 : 381 : && (GET_MODE (SUBREG_REG (inner_op0))
12263 : 381 : == GET_MODE (SUBREG_REG (inner_op1)))
12264 : 105 : && ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
12265 : : GET_MODE (SUBREG_REG (inner_op0)))) == 0
12266 : 2453 : && ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
12267 : 99 : GET_MODE (SUBREG_REG (inner_op1)))) == 0)
12268 : : {
12269 : 93 : op0 = SUBREG_REG (inner_op0);
12270 : 93 : op1 = SUBREG_REG (inner_op1);
12271 : :
12272 : : /* The resulting comparison is always unsigned since we masked
12273 : : off the original sign bit. */
12274 : 93 : code = unsigned_condition (code);
12275 : :
12276 : 93 : changed = true;
12277 : : }
12278 : :
12279 : 2261 : else if (c0 == c1)
12280 : 4799 : FOR_EACH_MODE_UNTIL (tmode,
12281 : : as_a <scalar_int_mode> (GET_MODE (op0)))
12282 : 2910 : if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
12283 : : {
12284 : 49 : op0 = gen_lowpart_or_truncate (tmode, inner_op0);
12285 : 49 : op1 = gen_lowpart_or_truncate (tmode, inner_op1);
12286 : 49 : code = unsigned_condition (code);
12287 : 49 : changed = true;
12288 : 49 : break;
12289 : : }
12290 : :
12291 : 2031 : if (! changed)
12292 : : break;
12293 : : }
12294 : :
12295 : : /* If both operands are NOT, we can strip off the outer operation
12296 : : and adjust the comparison code for swapped operands; similarly for
12297 : : NEG, except that this must be an equality comparison. */
12298 : 20586398 : else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
12299 : 20586397 : || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
12300 : 8 : && (code == EQ || code == NE)))
12301 : 9 : op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
12302 : :
12303 : : else
12304 : : break;
12305 : : }
12306 : :
12307 : : /* If the first operand is a constant, swap the operands and adjust the
12308 : : comparison code appropriately, but don't do this if the second operand
12309 : : is already a constant integer. */
12310 : 20589616 : if (swap_commutative_operands_p (op0, op1))
12311 : : {
12312 : 1281440 : std::swap (op0, op1);
12313 : 1281440 : code = swap_condition (code);
12314 : : }
12315 : :
12316 : : /* We now enter a loop during which we will try to simplify the comparison.
12317 : : For the most part, we only are concerned with comparisons with zero,
12318 : : but some things may really be comparisons with zero but not start
12319 : : out looking that way. */
12320 : :
12321 : 21603415 : while (CONST_INT_P (op1))
12322 : : {
12323 : 14208781 : machine_mode raw_mode = GET_MODE (op0);
12324 : 14208781 : scalar_int_mode int_mode;
12325 : 14208781 : int equality_comparison_p;
12326 : 14208781 : int sign_bit_comparison_p;
12327 : 14208781 : int unsigned_comparison_p;
12328 : 14208781 : HOST_WIDE_INT const_op;
12329 : :
12330 : : /* We only want to handle integral modes. This catches VOIDmode,
12331 : : CCmode, and the floating-point modes. An exception is that we
12332 : : can handle VOIDmode if OP0 is a COMPARE or a comparison
12333 : : operation. */
12334 : :
12335 : 14208781 : if (GET_MODE_CLASS (raw_mode) != MODE_INT
12336 : 1206897 : && ! (raw_mode == VOIDmode
12337 : 232211 : && (GET_CODE (op0) == COMPARE || COMPARISON_P (op0))))
12338 : : break;
12339 : :
12340 : : /* Try to simplify the compare to constant, possibly changing the
12341 : : comparison op, and/or changing op1 to zero. */
12342 : 13234050 : code = simplify_compare_const (code, raw_mode, &op0, &op1);
12343 : 13234050 : const_op = INTVAL (op1);
12344 : :
12345 : : /* Compute some predicates to simplify code below. */
12346 : :
12347 : 13234050 : equality_comparison_p = (code == EQ || code == NE);
12348 : 13234050 : sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
12349 : 13234050 : unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
12350 : 13234050 : || code == GEU);
12351 : :
12352 : : /* If this is a sign bit comparison and we can do arithmetic in
12353 : : MODE, say that we will only be needing the sign bit of OP0. */
12354 : 13234050 : if (sign_bit_comparison_p
12355 : 433410 : && is_a <scalar_int_mode> (raw_mode, &int_mode)
12356 : 13667460 : && HWI_COMPUTABLE_MODE_P (int_mode))
12357 : 433018 : op0 = force_to_mode (op0, int_mode,
12358 : : HOST_WIDE_INT_1U
12359 : 433018 : << (GET_MODE_PRECISION (int_mode) - 1), false);
12360 : :
12361 : 13234050 : if (COMPARISON_P (op0))
12362 : : {
12363 : : /* We can't do anything if OP0 is a condition code value, rather
12364 : : than an actual data value. */
12365 : 569635 : if (const_op != 0
12366 : 569635 : || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
12367 : : break;
12368 : :
12369 : : /* Get the two operands being compared. */
12370 : 74544 : if (GET_CODE (XEXP (op0, 0)) == COMPARE)
12371 : 0 : tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
12372 : : else
12373 : 74544 : tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
12374 : :
12375 : : /* Check for the cases where we simply want the result of the
12376 : : earlier test or the opposite of that result. */
12377 : 74544 : if (code == NE || code == EQ
12378 : 74544 : || (val_signbit_known_set_p (raw_mode, STORE_FLAG_VALUE)
12379 : 0 : && (code == LT || code == GE)))
12380 : : {
12381 : 74544 : enum rtx_code new_code;
12382 : 74544 : if (code == LT || code == NE)
12383 : 74544 : new_code = GET_CODE (op0);
12384 : : else
12385 : 0 : new_code = reversed_comparison_code (op0, NULL);
12386 : :
12387 : 74544 : if (new_code != UNKNOWN)
12388 : : {
12389 : 74544 : code = new_code;
12390 : 74544 : op0 = tem;
12391 : 74544 : op1 = tem1;
12392 : 21677959 : continue;
12393 : : }
12394 : : }
12395 : : break;
12396 : : }
12397 : :
12398 : 12664415 : if (raw_mode == VOIDmode)
12399 : : break;
12400 : 12664415 : scalar_int_mode mode = as_a <scalar_int_mode> (raw_mode);
12401 : :
12402 : : /* Now try cases based on the opcode of OP0. If none of the cases
12403 : : does a "continue", we exit this loop immediately after the
12404 : : switch. */
12405 : :
12406 : 12664415 : unsigned int mode_width = GET_MODE_PRECISION (mode);
12407 : 12664415 : unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
12408 : 12664415 : switch (GET_CODE (op0))
12409 : : {
12410 : 295717 : case ZERO_EXTRACT:
12411 : : /* If we are extracting a single bit from a variable position in
12412 : : a constant that has only a single bit set and are comparing it
12413 : : with zero, we can convert this into an equality comparison
12414 : : between the position and the location of the single bit. */
12415 : : /* Except we can't if SHIFT_COUNT_TRUNCATED is set, since we might
12416 : : have already reduced the shift count modulo the word size. */
12417 : 295717 : if (!SHIFT_COUNT_TRUNCATED
12418 : 295717 : && CONST_INT_P (XEXP (op0, 0))
12419 : 9222 : && XEXP (op0, 1) == const1_rtx
12420 : 9202 : && equality_comparison_p && const_op == 0
12421 : 304919 : && (i = exact_log2 (UINTVAL (XEXP (op0, 0)))) >= 0)
12422 : : {
12423 : 0 : if (BITS_BIG_ENDIAN)
12424 : : i = BITS_PER_WORD - 1 - i;
12425 : :
12426 : 0 : op0 = XEXP (op0, 2);
12427 : 0 : op1 = GEN_INT (i);
12428 : 0 : const_op = i;
12429 : :
12430 : : /* Result is nonzero iff shift count is equal to I. */
12431 : 0 : code = reverse_condition (code);
12432 : 0 : continue;
12433 : : }
12434 : :
12435 : : /* fall through */
12436 : :
12437 : 295721 : case SIGN_EXTRACT:
12438 : 295721 : tem = expand_compound_operation (op0);
12439 : 295721 : if (tem != op0)
12440 : : {
12441 : 260585 : op0 = tem;
12442 : 260585 : continue;
12443 : : }
12444 : : break;
12445 : :
12446 : 26568 : case NOT:
12447 : : /* If testing for equality, we can take the NOT of the constant. */
12448 : 36533 : if (equality_comparison_p
12449 : 26568 : && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
12450 : : {
12451 : 9965 : op0 = XEXP (op0, 0);
12452 : 9965 : op1 = tem;
12453 : 9965 : continue;
12454 : : }
12455 : :
12456 : : /* If just looking at the sign bit, reverse the sense of the
12457 : : comparison. */
12458 : 16603 : if (sign_bit_comparison_p)
12459 : : {
12460 : 16258 : op0 = XEXP (op0, 0);
12461 : 16258 : code = (code == GE ? LT : GE);
12462 : 16258 : continue;
12463 : : }
12464 : : break;
12465 : :
12466 : 276387 : case NEG:
12467 : : /* If testing for equality, we can take the NEG of the constant. */
12468 : 548529 : if (equality_comparison_p
12469 : 276387 : && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
12470 : : {
12471 : 272142 : op0 = XEXP (op0, 0);
12472 : 272142 : op1 = tem;
12473 : 272142 : continue;
12474 : : }
12475 : :
12476 : : /* The remaining cases only apply to comparisons with zero. */
12477 : 4245 : if (const_op != 0)
12478 : : break;
12479 : :
12480 : : /* When X is ABS or is known positive,
12481 : : (neg X) is < 0 if and only if X != 0. */
12482 : :
12483 : 3708 : if (sign_bit_comparison_p
12484 : 3670 : && (GET_CODE (XEXP (op0, 0)) == ABS
12485 : 3669 : || (mode_width <= HOST_BITS_PER_WIDE_INT
12486 : 3669 : && (nonzero_bits (XEXP (op0, 0), mode)
12487 : 3669 : & (HOST_WIDE_INT_1U << (mode_width - 1)))
12488 : 3669 : == 0)))
12489 : : {
12490 : 38 : op0 = XEXP (op0, 0);
12491 : 38 : code = (code == LT ? NE : EQ);
12492 : 38 : continue;
12493 : : }
12494 : :
12495 : : /* If we have NEG of something whose two high-order bits are the
12496 : : same, we know that "(-a) < 0" is equivalent to "a > 0". */
12497 : 3632 : if (num_sign_bit_copies (op0, mode) >= 2)
12498 : : {
12499 : 3 : op0 = XEXP (op0, 0);
12500 : 3 : code = swap_condition (code);
12501 : 3 : continue;
12502 : : }
12503 : : break;
12504 : :
12505 : 146 : case ROTATE:
12506 : : /* If we are testing equality and our count is a constant, we
12507 : : can perform the inverse operation on our RHS. */
12508 : 146 : if (equality_comparison_p && CONST_INT_P (XEXP (op0, 1))
12509 : 146 : && (tem = simplify_binary_operation (ROTATERT, mode,
12510 : : op1, XEXP (op0, 1))) != 0)
12511 : : {
12512 : 0 : op0 = XEXP (op0, 0);
12513 : 0 : op1 = tem;
12514 : 0 : continue;
12515 : : }
12516 : :
12517 : : /* If we are doing a < 0 or >= 0 comparison, it means we are testing
12518 : : a particular bit. Convert it to an AND of a constant of that
12519 : : bit. This will be converted into a ZERO_EXTRACT. */
12520 : 146 : if (const_op == 0 && sign_bit_comparison_p
12521 : 0 : && CONST_INT_P (XEXP (op0, 1))
12522 : 0 : && mode_width <= HOST_BITS_PER_WIDE_INT
12523 : 0 : && UINTVAL (XEXP (op0, 1)) < mode_width)
12524 : : {
12525 : 0 : op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
12526 : : (HOST_WIDE_INT_1U
12527 : : << (mode_width - 1
12528 : 0 : - INTVAL (XEXP (op0, 1)))));
12529 : 0 : code = (code == LT ? NE : EQ);
12530 : 0 : continue;
12531 : : }
12532 : :
12533 : : /* Fall through. */
12534 : :
12535 : 2775 : case ABS:
12536 : : /* ABS is ignorable inside an equality comparison with zero. */
12537 : 2775 : if (const_op == 0 && equality_comparison_p)
12538 : : {
12539 : 1 : op0 = XEXP (op0, 0);
12540 : 1 : continue;
12541 : : }
12542 : : break;
12543 : :
12544 : 1042 : case SIGN_EXTEND:
12545 : : /* Can simplify (compare (zero/sign_extend FOO) CONST) to
12546 : : (compare FOO CONST) if CONST fits in FOO's mode and we
12547 : : are either testing inequality or have an unsigned
12548 : : comparison with ZERO_EXTEND or a signed comparison with
12549 : : SIGN_EXTEND. But don't do it if we don't have a compare
12550 : : insn of the given mode, since we'd have to revert it
12551 : : later on, and then we wouldn't know whether to sign- or
12552 : : zero-extend. */
12553 : 1042 : if (is_int_mode (GET_MODE (XEXP (op0, 0)), &mode)
12554 : 1042 : && ! unsigned_comparison_p
12555 : 915 : && HWI_COMPUTABLE_MODE_P (mode)
12556 : 915 : && trunc_int_for_mode (const_op, mode) == const_op
12557 : 915 : && have_insn_for (COMPARE, mode))
12558 : : {
12559 : 915 : op0 = XEXP (op0, 0);
12560 : 915 : continue;
12561 : : }
12562 : : break;
12563 : :
12564 : 407603 : case SUBREG:
12565 : : /* Check for the case where we are comparing A - C1 with C2, that is
12566 : :
12567 : : (subreg:MODE (plus (A) (-C1))) op (C2)
12568 : :
12569 : : with C1 a constant, and try to lift the SUBREG, i.e. to do the
12570 : : comparison in the wider mode. One of the following two conditions
12571 : : must be true in order for this to be valid:
12572 : :
12573 : : 1. The mode extension results in the same bit pattern being added
12574 : : on both sides and the comparison is equality or unsigned. As
12575 : : C2 has been truncated to fit in MODE, the pattern can only be
12576 : : all 0s or all 1s.
12577 : :
12578 : : 2. The mode extension results in the sign bit being copied on
12579 : : each side.
12580 : :
12581 : : The difficulty here is that we have predicates for A but not for
12582 : : (A - C1) so we need to check that C1 is within proper bounds so
12583 : : as to perturbate A as little as possible. */
12584 : :
12585 : 407603 : if (mode_width <= HOST_BITS_PER_WIDE_INT
12586 : 407507 : && subreg_lowpart_p (op0)
12587 : 378716 : && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (op0)),
12588 : : &inner_mode)
12589 : 377427 : && GET_MODE_PRECISION (inner_mode) > mode_width
12590 : 377427 : && GET_CODE (SUBREG_REG (op0)) == PLUS
12591 : 407603 : && CONST_INT_P (XEXP (SUBREG_REG (op0), 1)))
12592 : : {
12593 : 0 : rtx a = XEXP (SUBREG_REG (op0), 0);
12594 : 0 : HOST_WIDE_INT c1 = -INTVAL (XEXP (SUBREG_REG (op0), 1));
12595 : :
12596 : 0 : if ((c1 > 0
12597 : 0 : && (unsigned HOST_WIDE_INT) c1
12598 : 0 : < HOST_WIDE_INT_1U << (mode_width - 1)
12599 : 0 : && (equality_comparison_p || unsigned_comparison_p)
12600 : : /* (A - C1) zero-extends if it is positive and sign-extends
12601 : : if it is negative, C2 both zero- and sign-extends. */
12602 : 0 : && (((nonzero_bits (a, inner_mode)
12603 : 0 : & ~GET_MODE_MASK (mode)) == 0
12604 : 0 : && const_op >= 0)
12605 : : /* (A - C1) sign-extends if it is positive and 1-extends
12606 : : if it is negative, C2 both sign- and 1-extends. */
12607 : 0 : || (num_sign_bit_copies (a, inner_mode)
12608 : 0 : > (unsigned int) (GET_MODE_PRECISION (inner_mode)
12609 : 0 : - mode_width)
12610 : 0 : && const_op < 0)))
12611 : 0 : || ((unsigned HOST_WIDE_INT) c1
12612 : 0 : < HOST_WIDE_INT_1U << (mode_width - 2)
12613 : : /* (A - C1) always sign-extends, like C2. */
12614 : 0 : && num_sign_bit_copies (a, inner_mode)
12615 : 0 : > (unsigned int) (GET_MODE_PRECISION (inner_mode)
12616 : 0 : - (mode_width - 1))))
12617 : : {
12618 : 0 : op0 = SUBREG_REG (op0);
12619 : 0 : continue;
12620 : : }
12621 : : }
12622 : :
12623 : : /* If the inner mode is narrower and we are extracting the low part,
12624 : : we can treat the SUBREG as if it were a ZERO_EXTEND ... */
12625 : 407603 : if (paradoxical_subreg_p (op0))
12626 : : {
12627 : : if (WORD_REGISTER_OPERATIONS
12628 : : && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (op0)),
12629 : : &inner_mode)
12630 : : && GET_MODE_PRECISION (inner_mode) < BITS_PER_WORD
12631 : : /* On WORD_REGISTER_OPERATIONS targets the bits
12632 : : beyond sub_mode aren't considered undefined,
12633 : : so optimize only if it is a MEM load when MEM loads
12634 : : zero extend, because then the upper bits are all zero. */
12635 : : && !(MEM_P (SUBREG_REG (op0))
12636 : : && load_extend_op (inner_mode) == ZERO_EXTEND))
12637 : : break;
12638 : : /* FALLTHROUGH to case ZERO_EXTEND */
12639 : : }
12640 : 407603 : else if (subreg_lowpart_p (op0)
12641 : 378812 : && GET_MODE_CLASS (mode) == MODE_INT
12642 : 378812 : && is_int_mode (GET_MODE (SUBREG_REG (op0)), &inner_mode)
12643 : 377427 : && (code == NE || code == EQ)
12644 : 282415 : && GET_MODE_PRECISION (inner_mode) <= HOST_BITS_PER_WIDE_INT
12645 : 276604 : && !paradoxical_subreg_p (op0)
12646 : 684207 : && (nonzero_bits (SUBREG_REG (op0), inner_mode)
12647 : 276604 : & ~GET_MODE_MASK (mode)) == 0)
12648 : : {
12649 : : /* Remove outer subregs that don't do anything. */
12650 : 93557 : tem = gen_lowpart (inner_mode, op1);
12651 : :
12652 : 93557 : if ((nonzero_bits (tem, inner_mode)
12653 : 93557 : & ~GET_MODE_MASK (mode)) == 0)
12654 : : {
12655 : 92444 : op0 = SUBREG_REG (op0);
12656 : 92444 : op1 = tem;
12657 : 92444 : continue;
12658 : : }
12659 : : break;
12660 : : }
12661 : : else
12662 : : break;
12663 : :
12664 : : /* FALLTHROUGH */
12665 : :
12666 : 38544 : case ZERO_EXTEND:
12667 : 38544 : if (is_int_mode (GET_MODE (XEXP (op0, 0)), &mode)
12668 : 38544 : && (unsigned_comparison_p || equality_comparison_p)
12669 : 38502 : && HWI_COMPUTABLE_MODE_P (mode)
12670 : 38502 : && (unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (mode)
12671 : 38502 : && const_op >= 0
12672 : 38493 : && have_insn_for (COMPARE, mode))
12673 : : {
12674 : 38493 : op0 = XEXP (op0, 0);
12675 : 38493 : continue;
12676 : : }
12677 : : break;
12678 : :
12679 : 390151 : case PLUS:
12680 : : /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
12681 : : this for equality comparisons due to pathological cases involving
12682 : : overflows. */
12683 : 436491 : if (equality_comparison_p
12684 : 390151 : && (tem = simplify_binary_operation (MINUS, mode,
12685 : : op1, XEXP (op0, 1))) != 0)
12686 : : {
12687 : 46340 : op0 = XEXP (op0, 0);
12688 : 46340 : op1 = tem;
12689 : 46340 : continue;
12690 : : }
12691 : :
12692 : : /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
12693 : 343811 : if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
12694 : 12643 : && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
12695 : : {
12696 : 0 : op0 = XEXP (XEXP (op0, 0), 0);
12697 : 0 : code = (code == LT ? EQ : NE);
12698 : 0 : continue;
12699 : : }
12700 : : break;
12701 : :
12702 : 136858 : case MINUS:
12703 : : /* We used to optimize signed comparisons against zero, but that
12704 : : was incorrect. Unsigned comparisons against zero (GTU, LEU)
12705 : : arrive here as equality comparisons, or (GEU, LTU) are
12706 : : optimized away. No need to special-case them. */
12707 : :
12708 : : /* (eq (minus A B) C) -> (eq A (plus B C)) or
12709 : : (eq B (minus A C)), whichever simplifies. We can only do
12710 : : this for equality comparisons due to pathological cases involving
12711 : : overflows. */
12712 : 165890 : if (equality_comparison_p
12713 : 136858 : && (tem = simplify_binary_operation (PLUS, mode,
12714 : : XEXP (op0, 1), op1)) != 0)
12715 : : {
12716 : 29032 : op0 = XEXP (op0, 0);
12717 : 29032 : op1 = tem;
12718 : 29032 : continue;
12719 : : }
12720 : :
12721 : 130483 : if (equality_comparison_p
12722 : 107826 : && (tem = simplify_binary_operation (MINUS, mode,
12723 : : XEXP (op0, 0), op1)) != 0)
12724 : : {
12725 : 22657 : op0 = XEXP (op0, 1);
12726 : 22657 : op1 = tem;
12727 : 22657 : continue;
12728 : : }
12729 : :
12730 : : /* The sign bit of (minus (ashiftrt X C) X), where C is the number
12731 : : of bits in X minus 1, is one iff X > 0. */
12732 : 16317 : if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
12733 : 510 : && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
12734 : 510 : && UINTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
12735 : 85187 : && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
12736 : : {
12737 : 0 : op0 = XEXP (op0, 1);
12738 : 0 : code = (code == GE ? LE : GT);
12739 : 0 : continue;
12740 : : }
12741 : : break;
12742 : :
12743 : 5551 : case XOR:
12744 : : /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
12745 : : if C is zero or B is a constant. */
12746 : 5617 : if (equality_comparison_p
12747 : 5551 : && (tem = simplify_binary_operation (XOR, mode,
12748 : : XEXP (op0, 1), op1)) != 0)
12749 : : {
12750 : 66 : op0 = XEXP (op0, 0);
12751 : 66 : op1 = tem;
12752 : 66 : continue;
12753 : : }
12754 : : break;
12755 : :
12756 : :
12757 : 343665 : case IOR:
12758 : : /* The sign bit of (ior (plus X (const_int -1)) X) is nonzero
12759 : : iff X <= 0. */
12760 : 6072 : if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
12761 : 1270 : && XEXP (XEXP (op0, 0), 1) == constm1_rtx
12762 : 343713 : && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
12763 : : {
12764 : 48 : op0 = XEXP (op0, 1);
12765 : 48 : code = (code == GE ? GT : LE);
12766 : 48 : continue;
12767 : : }
12768 : : break;
12769 : :
12770 : 1517635 : case AND:
12771 : : /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
12772 : : will be converted to a ZERO_EXTRACT later. */
12773 : 1517635 : if (const_op == 0 && equality_comparison_p
12774 : 1385533 : && GET_CODE (XEXP (op0, 0)) == ASHIFT
12775 : 42719 : && XEXP (XEXP (op0, 0), 0) == const1_rtx)
12776 : : {
12777 : 6194 : op0 = gen_rtx_LSHIFTRT (mode, XEXP (op0, 1),
12778 : : XEXP (XEXP (op0, 0), 1));
12779 : 6194 : op0 = simplify_and_const_int (NULL_RTX, mode, op0, 1);
12780 : 6194 : continue;
12781 : : }
12782 : :
12783 : : /* If we are comparing (and (lshiftrt X C1) C2) for equality with
12784 : : zero and X is a comparison and C1 and C2 describe only bits set
12785 : : in STORE_FLAG_VALUE, we can compare with X. */
12786 : 1511441 : if (const_op == 0 && equality_comparison_p
12787 : 1379339 : && mode_width <= HOST_BITS_PER_WIDE_INT
12788 : 1375570 : && CONST_INT_P (XEXP (op0, 1))
12789 : 1088346 : && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
12790 : 377732 : && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
12791 : 364520 : && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
12792 : 364520 : && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
12793 : : {
12794 : 364520 : mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
12795 : 364520 : << INTVAL (XEXP (XEXP (op0, 0), 1)));
12796 : 364520 : if ((~STORE_FLAG_VALUE & mask) == 0
12797 : 364520 : && (COMPARISON_P (XEXP (XEXP (op0, 0), 0))
12798 : 0 : || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
12799 : 0 : && COMPARISON_P (tem))))
12800 : : {
12801 : 0 : op0 = XEXP (XEXP (op0, 0), 0);
12802 : 0 : continue;
12803 : : }
12804 : : }
12805 : :
12806 : : /* If we are doing an equality comparison of an AND of a bit equal
12807 : : to the sign bit, replace this with a LT or GE comparison of
12808 : : the underlying value. */
12809 : 1511975 : if (equality_comparison_p
12810 : : && const_op == 0
12811 : 1379339 : && CONST_INT_P (XEXP (op0, 1))
12812 : 1088647 : && mode_width <= HOST_BITS_PER_WIDE_INT
12813 : 1511441 : && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
12814 : 1088346 : == HOST_WIDE_INT_1U << (mode_width - 1)))
12815 : : {
12816 : 534 : op0 = XEXP (op0, 0);
12817 : 534 : code = (code == EQ ? GE : LT);
12818 : 534 : continue;
12819 : : }
12820 : :
12821 : : /* If this AND operation is really a ZERO_EXTEND from a narrower
12822 : : mode, the constant fits within that mode, and this is either an
12823 : : equality or unsigned comparison, try to do this comparison in
12824 : : the narrower mode.
12825 : :
12826 : : Note that in:
12827 : :
12828 : : (ne:DI (and:DI (reg:DI 4) (const_int 0xffffffff)) (const_int 0))
12829 : : -> (ne:DI (reg:SI 4) (const_int 0))
12830 : :
12831 : : unless TARGET_TRULY_NOOP_TRUNCATION allows it or the register is
12832 : : known to hold a value of the required mode the
12833 : : transformation is invalid. */
12834 : 1527514 : if ((equality_comparison_p || unsigned_comparison_p)
12835 : 1497191 : && CONST_INT_P (XEXP (op0, 1))
12836 : 3449845 : && (i = exact_log2 ((UINTVAL (XEXP (op0, 1))
12837 : 1202759 : & GET_MODE_MASK (mode))
12838 : : + 1)) >= 0
12839 : 752786 : && const_op >> i == 0
12840 : 3744277 : && int_mode_for_size (i, 1).exists (&tmode))
12841 : : {
12842 : 16607 : op0 = gen_lowpart_or_truncate (tmode, XEXP (op0, 0));
12843 : 16607 : continue;
12844 : : }
12845 : :
12846 : : /* If this is (and:M1 (subreg:M1 X:M2 0) (const_int C1)) where C1
12847 : : fits in both M1 and M2 and the SUBREG is either paradoxical
12848 : : or represents the low part, permute the SUBREG and the AND
12849 : : and try again. */
12850 : 1494300 : if (GET_CODE (XEXP (op0, 0)) == SUBREG
12851 : 94790 : && CONST_INT_P (XEXP (op0, 1)))
12852 : : {
12853 : 89624 : unsigned HOST_WIDE_INT c1 = INTVAL (XEXP (op0, 1));
12854 : : /* Require an integral mode, to avoid creating something like
12855 : : (AND:SF ...). */
12856 : 116294 : if ((is_a <scalar_int_mode>
12857 : 89624 : (GET_MODE (SUBREG_REG (XEXP (op0, 0))), &tmode))
12858 : : /* It is unsafe to commute the AND into the SUBREG if the
12859 : : SUBREG is paradoxical and WORD_REGISTER_OPERATIONS is
12860 : : not defined. As originally written the upper bits
12861 : : have a defined value due to the AND operation.
12862 : : However, if we commute the AND inside the SUBREG then
12863 : : they no longer have defined values and the meaning of
12864 : : the code has been changed.
12865 : : Also C1 should not change value in the smaller mode,
12866 : : see PR67028 (a positive C1 can become negative in the
12867 : : smaller mode, so that the AND does no longer mask the
12868 : : upper bits). */
12869 : 89601 : && ((WORD_REGISTER_OPERATIONS
12870 : : && mode_width > GET_MODE_PRECISION (tmode)
12871 : : && mode_width <= BITS_PER_WORD
12872 : : && trunc_int_for_mode (c1, tmode) == (HOST_WIDE_INT) c1)
12873 : 89601 : || (mode_width <= GET_MODE_PRECISION (tmode)
12874 : 28458 : && subreg_lowpart_p (XEXP (op0, 0))))
12875 : 28434 : && mode_width <= HOST_BITS_PER_WIDE_INT
12876 : 28434 : && HWI_COMPUTABLE_MODE_P (tmode)
12877 : 28317 : && (c1 & ~mask) == 0
12878 : 26670 : && (c1 & ~GET_MODE_MASK (tmode)) == 0
12879 : 26670 : && c1 != mask
12880 : 26670 : && c1 != GET_MODE_MASK (tmode))
12881 : : {
12882 : 26670 : op0 = simplify_gen_binary (AND, tmode,
12883 : 26670 : SUBREG_REG (XEXP (op0, 0)),
12884 : 26670 : gen_int_mode (c1, tmode));
12885 : 26670 : op0 = gen_lowpart (mode, op0);
12886 : 26670 : continue;
12887 : : }
12888 : : }
12889 : :
12890 : : /* Convert (ne (and (not X) 1) 0) to (eq (and X 1) 0). */
12891 : 1467630 : if (const_op == 0 && equality_comparison_p
12892 : 1344322 : && XEXP (op0, 1) == const1_rtx
12893 : 530194 : && GET_CODE (XEXP (op0, 0)) == NOT)
12894 : : {
12895 : 5005 : op0 = simplify_and_const_int (NULL_RTX, mode,
12896 : : XEXP (XEXP (op0, 0), 0), 1);
12897 : 5005 : code = (code == NE ? EQ : NE);
12898 : 5005 : continue;
12899 : : }
12900 : :
12901 : : /* Convert (ne (and (lshiftrt (not X)) 1) 0) to
12902 : : (eq (and (lshiftrt X) 1) 0).
12903 : : Also handle the case where (not X) is expressed using xor. */
12904 : 1462625 : if (const_op == 0 && equality_comparison_p
12905 : 1339317 : && XEXP (op0, 1) == const1_rtx
12906 : 525189 : && GET_CODE (XEXP (op0, 0)) == LSHIFTRT)
12907 : : {
12908 : 371442 : rtx shift_op = XEXP (XEXP (op0, 0), 0);
12909 : 371442 : rtx shift_count = XEXP (XEXP (op0, 0), 1);
12910 : :
12911 : 374150 : if (GET_CODE (shift_op) == NOT
12912 : 371442 : || (GET_CODE (shift_op) == XOR
12913 : 4453 : && CONST_INT_P (XEXP (shift_op, 1))
12914 : 2708 : && CONST_INT_P (shift_count)
12915 : 2708 : && HWI_COMPUTABLE_MODE_P (mode)
12916 : 2708 : && (UINTVAL (XEXP (shift_op, 1))
12917 : : == HOST_WIDE_INT_1U
12918 : 2708 : << INTVAL (shift_count))))
12919 : : {
12920 : 2708 : op0
12921 : 2708 : = gen_rtx_LSHIFTRT (mode, XEXP (shift_op, 0), shift_count);
12922 : 2708 : op0 = simplify_and_const_int (NULL_RTX, mode, op0, 1);
12923 : 2708 : code = (code == NE ? EQ : NE);
12924 : 2708 : continue;
12925 : : }
12926 : : }
12927 : : break;
12928 : :
12929 : 45423 : case ASHIFT:
12930 : : /* If we have (compare (ashift FOO N) (const_int C)) and
12931 : : the high order N bits of FOO (N+1 if an inequality comparison)
12932 : : are known to be zero, we can do this by comparing FOO with C
12933 : : shifted right N bits so long as the low-order N bits of C are
12934 : : zero. */
12935 : 45423 : if (CONST_INT_P (XEXP (op0, 1))
12936 : 41990 : && INTVAL (XEXP (op0, 1)) >= 0
12937 : 41990 : && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
12938 : : < HOST_BITS_PER_WIDE_INT)
12939 : 41988 : && (((unsigned HOST_WIDE_INT) const_op
12940 : 41988 : & ((HOST_WIDE_INT_1U << INTVAL (XEXP (op0, 1)))
12941 : : - 1)) == 0)
12942 : 31451 : && mode_width <= HOST_BITS_PER_WIDE_INT
12943 : 76844 : && (nonzero_bits (XEXP (op0, 0), mode)
12944 : 31421 : & ~(mask >> (INTVAL (XEXP (op0, 1))
12945 : 31421 : + ! equality_comparison_p))) == 0)
12946 : : {
12947 : : /* We must perform a logical shift, not an arithmetic one,
12948 : : as we want the top N bits of C to be zero. */
12949 : 271 : unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode);
12950 : :
12951 : 271 : temp >>= INTVAL (XEXP (op0, 1));
12952 : 271 : op1 = gen_int_mode (temp, mode);
12953 : 271 : op0 = XEXP (op0, 0);
12954 : 271 : continue;
12955 : 271 : }
12956 : :
12957 : : /* If we are doing a sign bit comparison, it means we are testing
12958 : : a particular bit. Convert it to the appropriate AND. */
12959 : 45152 : if (sign_bit_comparison_p && CONST_INT_P (XEXP (op0, 1))
12960 : 1311 : && mode_width <= HOST_BITS_PER_WIDE_INT)
12961 : : {
12962 : 2622 : op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
12963 : : (HOST_WIDE_INT_1U
12964 : : << (mode_width - 1
12965 : 1311 : - INTVAL (XEXP (op0, 1)))));
12966 : 1311 : code = (code == LT ? NE : EQ);
12967 : 1311 : continue;
12968 : : }
12969 : :
12970 : : /* If this an equality comparison with zero and we are shifting
12971 : : the low bit to the sign bit, we can convert this to an AND of the
12972 : : low-order bit. */
12973 : 43841 : if (const_op == 0 && equality_comparison_p
12974 : 9849 : && CONST_INT_P (XEXP (op0, 1))
12975 : 7540 : && UINTVAL (XEXP (op0, 1)) == mode_width - 1)
12976 : : {
12977 : 88 : op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0), 1);
12978 : 88 : continue;
12979 : : }
12980 : : break;
12981 : :
12982 : 36297 : case ASHIFTRT:
12983 : : /* If this is an equality comparison with zero, we can do this
12984 : : as a logical shift, which might be much simpler. */
12985 : 36297 : if (equality_comparison_p && const_op == 0
12986 : 20324 : && CONST_INT_P (XEXP (op0, 1)))
12987 : : {
12988 : 39808 : op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
12989 : : XEXP (op0, 0),
12990 : 19904 : INTVAL (XEXP (op0, 1)));
12991 : 19904 : continue;
12992 : : }
12993 : :
12994 : : /* If OP0 is a sign extension and CODE is not an unsigned comparison,
12995 : : do the comparison in a narrower mode. */
12996 : 21156 : if (! unsigned_comparison_p
12997 : 13467 : && CONST_INT_P (XEXP (op0, 1))
12998 : 13019 : && GET_CODE (XEXP (op0, 0)) == ASHIFT
12999 : 5481 : && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
13000 : 5251 : && (int_mode_for_size (mode_width - INTVAL (XEXP (op0, 1)), 1)
13001 : 16393 : .exists (&tmode))
13002 : 16393 : && (((unsigned HOST_WIDE_INT) const_op
13003 : 4763 : + (GET_MODE_MASK (tmode) >> 1) + 1)
13004 : 4763 : <= GET_MODE_MASK (tmode)))
13005 : : {
13006 : 4763 : op0 = gen_lowpart (tmode, XEXP (XEXP (op0, 0), 0));
13007 : 4763 : continue;
13008 : : }
13009 : :
13010 : : /* Likewise if OP0 is a PLUS of a sign extension with a
13011 : : constant, which is usually represented with the PLUS
13012 : : between the shifts. */
13013 : 11630 : if (! unsigned_comparison_p
13014 : 8704 : && CONST_INT_P (XEXP (op0, 1))
13015 : 8256 : && GET_CODE (XEXP (op0, 0)) == PLUS
13016 : 54 : && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
13017 : 22 : && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
13018 : 2 : && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
13019 : 0 : && (int_mode_for_size (mode_width - INTVAL (XEXP (op0, 1)), 1)
13020 : 11630 : .exists (&tmode))
13021 : 11630 : && (((unsigned HOST_WIDE_INT) const_op
13022 : 0 : + (GET_MODE_MASK (tmode) >> 1) + 1)
13023 : 0 : <= GET_MODE_MASK (tmode)))
13024 : : {
13025 : 0 : rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0);
13026 : 0 : rtx add_const = XEXP (XEXP (op0, 0), 1);
13027 : 0 : rtx new_const = simplify_gen_binary (ASHIFTRT, mode,
13028 : : add_const, XEXP (op0, 1));
13029 : :
13030 : 0 : op0 = simplify_gen_binary (PLUS, tmode,
13031 : 0 : gen_lowpart (tmode, inner),
13032 : : new_const);
13033 : 0 : continue;
13034 : 0 : }
13035 : :
13036 : : /* FALLTHROUGH */
13037 : 113867 : case LSHIFTRT:
13038 : : /* If we have (compare (xshiftrt FOO N) (const_int C)) and
13039 : : the low order N bits of FOO are known to be zero, we can do this
13040 : : by comparing FOO with C shifted left N bits so long as no
13041 : : overflow occurs. Even if the low order N bits of FOO aren't known
13042 : : to be zero, if the comparison is >= or < we can use the same
13043 : : optimization and for > or <= by setting all the low
13044 : : order N bits in the comparison constant. */
13045 : 113867 : if (CONST_INT_P (XEXP (op0, 1))
13046 : 109624 : && INTVAL (XEXP (op0, 1)) > 0
13047 : 109624 : && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
13048 : 109264 : && mode_width <= HOST_BITS_PER_WIDE_INT
13049 : 113867 : && (((unsigned HOST_WIDE_INT) const_op
13050 : 217006 : + (GET_CODE (op0) != LSHIFTRT
13051 : 108503 : ? ((GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1)) >> 1)
13052 : : + 1)
13053 : : : 0))
13054 : 108503 : <= GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1))))
13055 : : {
13056 : 108385 : unsigned HOST_WIDE_INT low_bits
13057 : 108385 : = (nonzero_bits (XEXP (op0, 0), mode)
13058 : 108385 : & ((HOST_WIDE_INT_1U
13059 : 108385 : << INTVAL (XEXP (op0, 1))) - 1));
13060 : 108385 : if (low_bits == 0 || !equality_comparison_p)
13061 : : {
13062 : : /* If the shift was logical, then we must make the condition
13063 : : unsigned. */
13064 : 21288 : if (GET_CODE (op0) == LSHIFTRT)
13065 : 16710 : code = unsigned_condition (code);
13066 : :
13067 : 21288 : const_op = (unsigned HOST_WIDE_INT) const_op
13068 : 21288 : << INTVAL (XEXP (op0, 1));
13069 : 21288 : if (low_bits != 0
13070 : 4128 : && (code == GT || code == GTU
13071 : 1731 : || code == LE || code == LEU))
13072 : 4048 : const_op
13073 : 4048 : |= ((HOST_WIDE_INT_1 << INTVAL (XEXP (op0, 1))) - 1);
13074 : 21288 : op1 = GEN_INT (const_op);
13075 : 21288 : op0 = XEXP (op0, 0);
13076 : 21288 : continue;
13077 : : }
13078 : : }
13079 : :
13080 : : /* If we are using this shift to extract just the sign bit, we
13081 : : can replace this with an LT or GE comparison. */
13082 : 92579 : if (const_op == 0
13083 : 82518 : && (equality_comparison_p || sign_bit_comparison_p)
13084 : 82490 : && CONST_INT_P (XEXP (op0, 1))
13085 : 78507 : && UINTVAL (XEXP (op0, 1)) == mode_width - 1)
13086 : : {
13087 : 44925 : op0 = XEXP (op0, 0);
13088 : 44925 : code = (code == NE || code == GT ? LT : GE);
13089 : 44925 : continue;
13090 : : }
13091 : : break;
13092 : :
13093 : : default:
13094 : : break;
13095 : : }
13096 : :
13097 : : break;
13098 : : }
13099 : :
13100 : : /* Now make any compound operations involved in this comparison. Then,
13101 : : check for an outmost SUBREG on OP0 that is not doing anything or is
13102 : : paradoxical. The latter transformation must only be performed when
13103 : : it is known that the "extra" bits will be the same in op0 and op1 or
13104 : : that they don't matter. There are three cases to consider:
13105 : :
13106 : : 1. SUBREG_REG (op0) is a register. In this case the bits are don't
13107 : : care bits and we can assume they have any convenient value. So
13108 : : making the transformation is safe.
13109 : :
13110 : : 2. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is UNKNOWN.
13111 : : In this case the upper bits of op0 are undefined. We should not make
13112 : : the simplification in that case as we do not know the contents of
13113 : : those bits.
13114 : :
13115 : : 3. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is not UNKNOWN.
13116 : : In that case we know those bits are zeros or ones. We must also be
13117 : : sure that they are the same as the upper bits of op1.
13118 : :
13119 : : We can never remove a SUBREG for a non-equality comparison because
13120 : : the sign bit is in a different place in the underlying object. */
13121 : :
13122 : 20589616 : rtx_code op0_mco_code = SET;
13123 : 20589616 : if (op1 == const0_rtx)
13124 : 9828570 : op0_mco_code = code == NE || code == EQ ? EQ : COMPARE;
13125 : :
13126 : 20589616 : op0 = make_compound_operation (op0, op0_mco_code);
13127 : 20589616 : op1 = make_compound_operation (op1, SET);
13128 : :
13129 : 449084 : if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
13130 : 419310 : && is_int_mode (GET_MODE (op0), &mode)
13131 : 398585 : && is_int_mode (GET_MODE (SUBREG_REG (op0)), &inner_mode)
13132 : 20985616 : && (code == NE || code == EQ))
13133 : : {
13134 : 233133 : if (paradoxical_subreg_p (op0))
13135 : : {
13136 : : /* For paradoxical subregs, allow case 1 as above. Case 3 isn't
13137 : : implemented. */
13138 : 0 : if (REG_P (SUBREG_REG (op0)))
13139 : : {
13140 : 0 : op0 = SUBREG_REG (op0);
13141 : 0 : op1 = gen_lowpart (inner_mode, op1);
13142 : : }
13143 : : }
13144 : 233133 : else if (GET_MODE_PRECISION (inner_mode) <= HOST_BITS_PER_WIDE_INT
13145 : 233133 : && (nonzero_bits (SUBREG_REG (op0), inner_mode)
13146 : 226312 : & ~GET_MODE_MASK (mode)) == 0)
13147 : : {
13148 : 28870 : tem = gen_lowpart (inner_mode, op1);
13149 : :
13150 : 28870 : if ((nonzero_bits (tem, inner_mode) & ~GET_MODE_MASK (mode)) == 0)
13151 : 21037 : op0 = SUBREG_REG (op0), op1 = tem;
13152 : : }
13153 : : }
13154 : :
13155 : : /* We now do the opposite procedure: Some machines don't have compare
13156 : : insns in all modes. If OP0's mode is an integer mode smaller than a
13157 : : word and we can't do a compare in that mode, see if there is a larger
13158 : : mode for which we can do the compare. There are a number of cases in
13159 : : which we can use the wider mode. */
13160 : :
13161 : 20589616 : if (is_int_mode (GET_MODE (op0), &mode)
13162 : 21555653 : && GET_MODE_SIZE (mode) < UNITS_PER_WORD
13163 : 7920674 : && ! have_insn_for (COMPARE, mode))
13164 : 0 : FOR_EACH_WIDER_MODE (tmode_iter, mode)
13165 : : {
13166 : 0 : tmode = tmode_iter.require ();
13167 : 0 : if (!HWI_COMPUTABLE_MODE_P (tmode))
13168 : : break;
13169 : 0 : if (have_insn_for (COMPARE, tmode))
13170 : : {
13171 : 0 : int zero_extended;
13172 : :
13173 : : /* If this is a test for negative, we can make an explicit
13174 : : test of the sign bit. Test this first so we can use
13175 : : a paradoxical subreg to extend OP0. */
13176 : :
13177 : 0 : if (op1 == const0_rtx && (code == LT || code == GE)
13178 : 0 : && HWI_COMPUTABLE_MODE_P (mode))
13179 : : {
13180 : 0 : unsigned HOST_WIDE_INT sign
13181 : 0 : = HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (mode) - 1);
13182 : 0 : op0 = simplify_gen_binary (AND, tmode,
13183 : 0 : gen_lowpart (tmode, op0),
13184 : 0 : gen_int_mode (sign, tmode));
13185 : 0 : code = (code == LT) ? NE : EQ;
13186 : : break;
13187 : : }
13188 : :
13189 : : /* If the only nonzero bits in OP0 and OP1 are those in the
13190 : : narrower mode and this is an equality or unsigned comparison,
13191 : : we can use the wider mode. Similarly for sign-extended
13192 : : values, in which case it is true for all comparisons. */
13193 : 0 : zero_extended = ((code == EQ || code == NE
13194 : 0 : || code == GEU || code == GTU
13195 : 0 : || code == LEU || code == LTU)
13196 : 0 : && (nonzero_bits (op0, tmode)
13197 : 0 : & ~GET_MODE_MASK (mode)) == 0
13198 : 0 : && ((CONST_INT_P (op1)
13199 : 0 : || (nonzero_bits (op1, tmode)
13200 : 0 : & ~GET_MODE_MASK (mode)) == 0)));
13201 : :
13202 : 0 : if (zero_extended
13203 : 0 : || ((num_sign_bit_copies (op0, tmode)
13204 : 0 : > (unsigned int) (GET_MODE_PRECISION (tmode)
13205 : 0 : - GET_MODE_PRECISION (mode)))
13206 : 0 : && (num_sign_bit_copies (op1, tmode)
13207 : 0 : > (unsigned int) (GET_MODE_PRECISION (tmode)
13208 : 0 : - GET_MODE_PRECISION (mode)))))
13209 : : {
13210 : : /* If OP0 is an AND and we don't have an AND in MODE either,
13211 : : make a new AND in the proper mode. */
13212 : 0 : if (GET_CODE (op0) == AND
13213 : 0 : && !have_insn_for (AND, mode))
13214 : 0 : op0 = simplify_gen_binary (AND, tmode,
13215 : 0 : gen_lowpart (tmode,
13216 : : XEXP (op0, 0)),
13217 : 0 : gen_lowpart (tmode,
13218 : : XEXP (op0, 1)));
13219 : : else
13220 : : {
13221 : 0 : if (zero_extended)
13222 : : {
13223 : 0 : op0 = simplify_gen_unary (ZERO_EXTEND, tmode,
13224 : : op0, mode);
13225 : 0 : op1 = simplify_gen_unary (ZERO_EXTEND, tmode,
13226 : : op1, mode);
13227 : : }
13228 : : else
13229 : : {
13230 : 0 : op0 = simplify_gen_unary (SIGN_EXTEND, tmode,
13231 : : op0, mode);
13232 : 0 : op1 = simplify_gen_unary (SIGN_EXTEND, tmode,
13233 : : op1, mode);
13234 : : }
13235 : : break;
13236 : : }
13237 : : }
13238 : : }
13239 : : }
13240 : :
13241 : : /* We may have changed the comparison operands. Re-canonicalize. */
13242 : 20589616 : if (swap_commutative_operands_p (op0, op1))
13243 : : {
13244 : 72378 : std::swap (op0, op1);
13245 : 72378 : code = swap_condition (code);
13246 : : }
13247 : :
13248 : : /* If this machine only supports a subset of valid comparisons, see if we
13249 : : can convert an unsupported one into a supported one. */
13250 : 20589616 : target_canonicalize_comparison (&code, &op0, &op1, 0);
13251 : :
13252 : 20589616 : *pop0 = op0;
13253 : 20589616 : *pop1 = op1;
13254 : :
13255 : 20589616 : return code;
13256 : : }
13257 : :
13258 : : /* Utility function for record_value_for_reg. Count number of
13259 : : rtxs in X. */
13260 : : static int
13261 : 1611 : count_rtxs (rtx x)
13262 : : {
13263 : 1611 : enum rtx_code code = GET_CODE (x);
13264 : 1611 : const char *fmt;
13265 : 1611 : int i, j, ret = 1;
13266 : :
13267 : 1611 : if (GET_RTX_CLASS (code) == RTX_BIN_ARITH
13268 : 1611 : || GET_RTX_CLASS (code) == RTX_COMM_ARITH)
13269 : : {
13270 : 59 : rtx x0 = XEXP (x, 0);
13271 : 59 : rtx x1 = XEXP (x, 1);
13272 : :
13273 : 59 : if (x0 == x1)
13274 : 0 : return 1 + 2 * count_rtxs (x0);
13275 : :
13276 : 59 : if ((GET_RTX_CLASS (GET_CODE (x1)) == RTX_BIN_ARITH
13277 : 59 : || GET_RTX_CLASS (GET_CODE (x1)) == RTX_COMM_ARITH)
13278 : 0 : && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
13279 : 0 : return 2 + 2 * count_rtxs (x0)
13280 : 0 : + count_rtxs (x == XEXP (x1, 0)
13281 : 0 : ? XEXP (x1, 1) : XEXP (x1, 0));
13282 : :
13283 : 59 : if ((GET_RTX_CLASS (GET_CODE (x0)) == RTX_BIN_ARITH
13284 : 59 : || GET_RTX_CLASS (GET_CODE (x0)) == RTX_COMM_ARITH)
13285 : 0 : && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
13286 : 0 : return 2 + 2 * count_rtxs (x1)
13287 : 0 : + count_rtxs (x == XEXP (x0, 0)
13288 : 0 : ? XEXP (x0, 1) : XEXP (x0, 0));
13289 : : }
13290 : :
13291 : 1611 : fmt = GET_RTX_FORMAT (code);
13292 : 3864 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
13293 : 2253 : if (fmt[i] == 'e')
13294 : 894 : ret += count_rtxs (XEXP (x, i));
13295 : 1359 : else if (fmt[i] == 'E')
13296 : 250 : for (j = 0; j < XVECLEN (x, i); j++)
13297 : 196 : ret += count_rtxs (XVECEXP (x, i, j));
13298 : :
13299 : : return ret;
13300 : : }
13301 : :
13302 : : /* Utility function for following routine. Called when X is part of a value
13303 : : being stored into last_set_value. Sets last_set_table_tick
13304 : : for each register mentioned. Similar to mention_regs in cse.cc */
13305 : :
13306 : : static void
13307 : 231230972 : update_table_tick (rtx x)
13308 : : {
13309 : 231871549 : enum rtx_code code = GET_CODE (x);
13310 : 231871549 : const char *fmt = GET_RTX_FORMAT (code);
13311 : 231871549 : int i, j;
13312 : :
13313 : 231871549 : if (code == REG)
13314 : : {
13315 : 77689518 : unsigned int regno = REGNO (x);
13316 : 77689518 : unsigned int endregno = END_REGNO (x);
13317 : 77689518 : unsigned int r;
13318 : :
13319 : 155476204 : for (r = regno; r < endregno; r++)
13320 : : {
13321 : 77786686 : reg_stat_type *rsp = ®_stat[r];
13322 : 77786686 : rsp->last_set_table_tick = label_tick;
13323 : : }
13324 : :
13325 : : return;
13326 : : }
13327 : :
13328 : 397118432 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
13329 : 243694624 : if (fmt[i] == 'e')
13330 : : {
13331 : : /* Check for identical subexpressions. If x contains
13332 : : identical subexpression we only have to traverse one of
13333 : : them. */
13334 : 143361226 : if (i == 0 && ARITHMETIC_P (x))
13335 : : {
13336 : : /* Note that at this point x1 has already been
13337 : : processed. */
13338 : 56140799 : rtx x0 = XEXP (x, 0);
13339 : 56140799 : rtx x1 = XEXP (x, 1);
13340 : :
13341 : : /* If x0 and x1 are identical then there is no need to
13342 : : process x0. */
13343 : 56140799 : if (x0 == x1)
13344 : : break;
13345 : :
13346 : : /* If x0 is identical to a subexpression of x1 then while
13347 : : processing x1, x0 has already been processed. Thus we
13348 : : are done with x. */
13349 : 56023279 : if (ARITHMETIC_P (x1)
13350 : 442506 : && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
13351 : : break;
13352 : :
13353 : : /* If x1 is identical to a subexpression of x0 then we
13354 : : still have to process the rest of x0. */
13355 : 56023153 : if (ARITHMETIC_P (x0)
13356 : 15892205 : && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
13357 : : {
13358 : 640577 : update_table_tick (XEXP (x0, x1 == XEXP (x0, 0) ? 1 : 0));
13359 : 640577 : break;
13360 : : }
13361 : : }
13362 : :
13363 : 142603003 : update_table_tick (XEXP (x, i));
13364 : : }
13365 : 100333398 : else if (fmt[i] == 'E')
13366 : 8835151 : for (j = 0; j < XVECLEN (x, i); j++)
13367 : 6421280 : update_table_tick (XVECEXP (x, i, j));
13368 : : }
13369 : :
13370 : : /* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
13371 : : are saying that the register is clobbered and we no longer know its
13372 : : value. If INSN is zero, don't update reg_stat[].last_set; this is
13373 : : only permitted with VALUE also zero and is used to invalidate the
13374 : : register. */
13375 : :
13376 : : static void
13377 : 108233776 : record_value_for_reg (rtx reg, rtx_insn *insn, rtx value)
13378 : : {
13379 : 108233776 : unsigned int regno = REGNO (reg);
13380 : 108233776 : unsigned int endregno = END_REGNO (reg);
13381 : 108233776 : unsigned int i;
13382 : 108233776 : reg_stat_type *rsp;
13383 : :
13384 : : /* If VALUE contains REG and we have a previous value for REG, substitute
13385 : : the previous value. */
13386 : 108233776 : if (value && insn && reg_overlap_mentioned_p (reg, value))
13387 : : {
13388 : 6036553 : rtx tem;
13389 : :
13390 : : /* Set things up so get_last_value is allowed to see anything set up to
13391 : : our insn. */
13392 : 6036553 : subst_low_luid = DF_INSN_LUID (insn);
13393 : 6036553 : tem = get_last_value (reg);
13394 : :
13395 : : /* If TEM is simply a binary operation with two CLOBBERs as operands,
13396 : : it isn't going to be useful and will take a lot of time to process,
13397 : : so just use the CLOBBER. */
13398 : :
13399 : 6036553 : if (tem)
13400 : : {
13401 : 2470669 : if (ARITHMETIC_P (tem)
13402 : 2254934 : && GET_CODE (XEXP (tem, 0)) == CLOBBER
13403 : 1057025 : && GET_CODE (XEXP (tem, 1)) == CLOBBER)
13404 : : tem = XEXP (tem, 0);
13405 : 2469451 : else if (count_occurrences (value, reg, 1) >= 2)
13406 : : {
13407 : : /* If there are two or more occurrences of REG in VALUE,
13408 : : prevent the value from growing too much. */
13409 : 521 : if (count_rtxs (tem) > param_max_last_value_rtl)
13410 : 0 : tem = gen_rtx_CLOBBER (GET_MODE (tem), const0_rtx);
13411 : : }
13412 : :
13413 : 2470669 : value = replace_rtx (copy_rtx (value), reg, tem);
13414 : : }
13415 : : }
13416 : :
13417 : : /* For each register modified, show we don't know its value, that
13418 : : we don't know about its bitwise content, that its value has been
13419 : : updated, and that we don't know the location of the death of the
13420 : : register. */
13421 : 216824743 : for (i = regno; i < endregno; i++)
13422 : : {
13423 : 108590967 : rsp = ®_stat[i];
13424 : :
13425 : 108590967 : if (insn)
13426 : 99051526 : rsp->last_set = insn;
13427 : :
13428 : 108590967 : rsp->last_set_value = 0;
13429 : 108590967 : rsp->last_set_mode = VOIDmode;
13430 : 108590967 : rsp->last_set_nonzero_bits = 0;
13431 : 108590967 : rsp->last_set_sign_bit_copies = 0;
13432 : 108590967 : rsp->last_death = 0;
13433 : 108590967 : rsp->truncated_to_mode = VOIDmode;
13434 : : }
13435 : :
13436 : : /* Mark registers that are being referenced in this value. */
13437 : 108233776 : if (value)
13438 : 82206689 : update_table_tick (value);
13439 : :
13440 : : /* Now update the status of each register being set.
13441 : : If someone is using this register in this block, set this register
13442 : : to invalid since we will get confused between the two lives in this
13443 : : basic block. This makes using this register always invalid. In cse, we
13444 : : scan the table to invalidate all entries using this register, but this
13445 : : is too much work for us. */
13446 : :
13447 : 216824743 : for (i = regno; i < endregno; i++)
13448 : : {
13449 : 108590967 : rsp = ®_stat[i];
13450 : 108590967 : rsp->last_set_label = label_tick;
13451 : 108590967 : if (!insn
13452 : 99051526 : || (value && rsp->last_set_table_tick >= label_tick_ebb_start))
13453 : 19630334 : rsp->last_set_invalid = true;
13454 : : else
13455 : 88960633 : rsp->last_set_invalid = false;
13456 : : }
13457 : :
13458 : : /* The value being assigned might refer to X (like in "x++;"). In that
13459 : : case, we must replace it with (clobber (const_int 0)) to prevent
13460 : : infinite loops. */
13461 : 108233776 : rsp = ®_stat[regno];
13462 : 108233776 : if (value && !get_last_value_validate (&value, insn, label_tick, false))
13463 : : {
13464 : 10699879 : value = copy_rtx (value);
13465 : 10699879 : if (!get_last_value_validate (&value, insn, label_tick, true))
13466 : 0 : value = 0;
13467 : : }
13468 : :
13469 : : /* For the main register being modified, update the value, the mode, the
13470 : : nonzero bits, and the number of sign bit copies. */
13471 : :
13472 : 108233776 : rsp->last_set_value = value;
13473 : :
13474 : 108233776 : if (value)
13475 : : {
13476 : 82206689 : machine_mode mode = GET_MODE (reg);
13477 : 82206689 : subst_low_luid = DF_INSN_LUID (insn);
13478 : 82206689 : rsp->last_set_mode = mode;
13479 : 82206689 : if (GET_MODE_CLASS (mode) == MODE_INT
13480 : 82206689 : && HWI_COMPUTABLE_MODE_P (mode))
13481 : 62153858 : mode = nonzero_bits_mode;
13482 : 82206689 : rsp->last_set_nonzero_bits = nonzero_bits (value, mode);
13483 : 82206689 : rsp->last_set_sign_bit_copies
13484 : 82206689 : = num_sign_bit_copies (value, GET_MODE (reg));
13485 : : }
13486 : 108233776 : }
13487 : :
13488 : : /* Called via note_stores from record_dead_and_set_regs to handle one
13489 : : SET or CLOBBER in an insn. DATA is the instruction in which the
13490 : : set is occurring. */
13491 : :
13492 : : static void
13493 : 128849638 : record_dead_and_set_regs_1 (rtx dest, const_rtx setter, void *data)
13494 : : {
13495 : 128849638 : rtx_insn *record_dead_insn = (rtx_insn *) data;
13496 : :
13497 : 128849638 : if (GET_CODE (dest) == SUBREG)
13498 : 5 : dest = SUBREG_REG (dest);
13499 : :
13500 : 128849638 : if (!record_dead_insn)
13501 : : {
13502 : 4861834 : if (REG_P (dest))
13503 : 4861834 : record_value_for_reg (dest, NULL, NULL_RTX);
13504 : 4861834 : return;
13505 : : }
13506 : :
13507 : 123987804 : if (REG_P (dest))
13508 : : {
13509 : : /* If we are setting the whole register, we know its value. */
13510 : 98869813 : if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
13511 : 82057023 : record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
13512 : : /* We can handle a SUBREG if it's the low part, but we must be
13513 : : careful with paradoxical SUBREGs on RISC architectures because
13514 : : we cannot strip e.g. an extension around a load and record the
13515 : : naked load since the RTL middle-end considers that the upper bits
13516 : : are defined according to LOAD_EXTEND_OP. */
13517 : 16812790 : else if (GET_CODE (setter) == SET
13518 : 591735 : && GET_CODE (SET_DEST (setter)) == SUBREG
13519 : 578523 : && SUBREG_REG (SET_DEST (setter)) == dest
13520 : 944256 : && known_le (GET_MODE_PRECISION (GET_MODE (dest)),
13521 : : BITS_PER_WORD)
13522 : 16912024 : && subreg_lowpart_p (SET_DEST (setter)))
13523 : : {
13524 : 99234 : if (WORD_REGISTER_OPERATIONS
13525 : : && word_register_operation_p (SET_SRC (setter))
13526 : : && paradoxical_subreg_p (SET_DEST (setter)))
13527 : : record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
13528 : 99234 : else if (!partial_subreg_p (SET_DEST (setter)))
13529 : 87376 : record_value_for_reg (dest, record_dead_insn,
13530 : 87376 : gen_lowpart (GET_MODE (dest),
13531 : 87376 : SET_SRC (setter)));
13532 : : else
13533 : : {
13534 : 11858 : record_value_for_reg (dest, record_dead_insn,
13535 : 11858 : gen_lowpart (GET_MODE (dest),
13536 : 11858 : SET_SRC (setter)));
13537 : :
13538 : 11858 : unsigned HOST_WIDE_INT mask;
13539 : 11858 : reg_stat_type *rsp = ®_stat[REGNO (dest)];
13540 : 11858 : mask = GET_MODE_MASK (GET_MODE (SET_DEST (setter)));
13541 : 11858 : rsp->last_set_nonzero_bits |= ~mask;
13542 : 11858 : rsp->last_set_sign_bit_copies = 1;
13543 : : }
13544 : : }
13545 : : /* Otherwise show that we don't know the value. */
13546 : : else
13547 : 16713556 : record_value_for_reg (dest, record_dead_insn, NULL_RTX);
13548 : : }
13549 : 25117991 : else if (MEM_P (dest)
13550 : : /* Ignore pushes, they clobber nothing. */
13551 : 25117991 : && ! push_operand (dest, GET_MODE (dest)))
13552 : 12692071 : mem_last_set = DF_INSN_LUID (record_dead_insn);
13553 : : }
13554 : :
13555 : : /* Update the records of when each REG was most recently set or killed
13556 : : for the things done by INSN. This is the last thing done in processing
13557 : : INSN in the combiner loop.
13558 : :
13559 : : We update reg_stat[], in particular fields last_set, last_set_value,
13560 : : last_set_mode, last_set_nonzero_bits, last_set_sign_bit_copies,
13561 : : last_death, and also the similar information mem_last_set (which insn
13562 : : most recently modified memory) and last_call_luid (which insn was the
13563 : : most recent subroutine call). */
13564 : :
13565 : : static void
13566 : 159514911 : record_dead_and_set_regs (rtx_insn *insn)
13567 : : {
13568 : 159514911 : rtx link;
13569 : 159514911 : unsigned int i;
13570 : :
13571 : 288821161 : for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
13572 : : {
13573 : 129306250 : if (REG_NOTE_KIND (link) == REG_DEAD
13574 : 74143317 : && REG_P (XEXP (link, 0)))
13575 : : {
13576 : 74143317 : unsigned int regno = REGNO (XEXP (link, 0));
13577 : 74143317 : unsigned int endregno = END_REGNO (XEXP (link, 0));
13578 : :
13579 : 148470107 : for (i = regno; i < endregno; i++)
13580 : : {
13581 : 74326790 : reg_stat_type *rsp;
13582 : :
13583 : 74326790 : rsp = ®_stat[i];
13584 : 74326790 : rsp->last_death = insn;
13585 : : }
13586 : : }
13587 : 55162933 : else if (REG_NOTE_KIND (link) == REG_INC)
13588 : 0 : record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
13589 : : }
13590 : :
13591 : 159514911 : if (CALL_P (insn))
13592 : : {
13593 : 8982725 : HARD_REG_SET callee_clobbers
13594 : 8982725 : = insn_callee_abi (insn).full_and_partial_reg_clobbers ();
13595 : 8982725 : hard_reg_set_iterator hrsi;
13596 : 741356162 : EXECUTE_IF_SET_IN_HARD_REG_SET (callee_clobbers, 0, i, hrsi)
13597 : : {
13598 : 732373437 : reg_stat_type *rsp;
13599 : :
13600 : : /* ??? We could try to preserve some information from the last
13601 : : set of register I if the call doesn't actually clobber
13602 : : (reg:last_set_mode I), which might be true for ABIs with
13603 : : partial clobbers. However, it would be difficult to
13604 : : update last_set_nonzero_bits and last_sign_bit_copies
13605 : : to account for the part of I that actually was clobbered.
13606 : : It wouldn't help much anyway, since we rarely see this
13607 : : situation before RA. */
13608 : 732373437 : rsp = ®_stat[i];
13609 : 732373437 : rsp->last_set_invalid = true;
13610 : 732373437 : rsp->last_set = insn;
13611 : 732373437 : rsp->last_set_value = 0;
13612 : 732373437 : rsp->last_set_mode = VOIDmode;
13613 : 732373437 : rsp->last_set_nonzero_bits = 0;
13614 : 732373437 : rsp->last_set_sign_bit_copies = 0;
13615 : 732373437 : rsp->last_death = 0;
13616 : 732373437 : rsp->truncated_to_mode = VOIDmode;
13617 : : }
13618 : :
13619 : 8982725 : last_call_luid = mem_last_set = DF_INSN_LUID (insn);
13620 : :
13621 : : /* We can't combine into a call pattern. Remember, though, that
13622 : : the return value register is set at this LUID. We could
13623 : : still replace a register with the return value from the
13624 : : wrong subroutine call! */
13625 : 8982725 : note_stores (insn, record_dead_and_set_regs_1, NULL_RTX);
13626 : : }
13627 : : else
13628 : 150532186 : note_stores (insn, record_dead_and_set_regs_1, insn);
13629 : 159514911 : }
13630 : :
13631 : : /* If a SUBREG has the promoted bit set, it is in fact a property of the
13632 : : register present in the SUBREG, so for each such SUBREG go back and
13633 : : adjust nonzero and sign bit information of the registers that are
13634 : : known to have some zero/sign bits set.
13635 : :
13636 : : This is needed because when combine blows the SUBREGs away, the
13637 : : information on zero/sign bits is lost and further combines can be
13638 : : missed because of that. */
13639 : :
13640 : : static void
13641 : 5709 : record_promoted_value (rtx_insn *insn, rtx subreg)
13642 : : {
13643 : 5709 : struct insn_link *links;
13644 : 5709 : rtx set;
13645 : 5709 : unsigned int regno = REGNO (SUBREG_REG (subreg));
13646 : 5709 : machine_mode mode = GET_MODE (subreg);
13647 : :
13648 : 5709 : if (!HWI_COMPUTABLE_MODE_P (mode))
13649 : : return;
13650 : :
13651 : 6224 : for (links = LOG_LINKS (insn); links;)
13652 : : {
13653 : 5583 : reg_stat_type *rsp;
13654 : :
13655 : 5583 : insn = links->insn;
13656 : 5583 : set = single_set (insn);
13657 : :
13658 : 5583 : if (! set || !REG_P (SET_DEST (set))
13659 : 5583 : || REGNO (SET_DEST (set)) != regno
13660 : 10779 : || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg)))
13661 : : {
13662 : 387 : links = links->next;
13663 : 387 : continue;
13664 : : }
13665 : :
13666 : 5196 : rsp = ®_stat[regno];
13667 : 5196 : if (rsp->last_set == insn)
13668 : : {
13669 : 5196 : if (SUBREG_PROMOTED_UNSIGNED_P (subreg))
13670 : 5196 : rsp->last_set_nonzero_bits &= GET_MODE_MASK (mode);
13671 : : }
13672 : :
13673 : 5196 : if (REG_P (SET_SRC (set)))
13674 : : {
13675 : 128 : regno = REGNO (SET_SRC (set));
13676 : 128 : links = LOG_LINKS (insn);
13677 : : }
13678 : : else
13679 : : break;
13680 : : }
13681 : : }
13682 : :
13683 : : /* Check if X, a register, is known to contain a value already
13684 : : truncated to MODE. In this case we can use a subreg to refer to
13685 : : the truncated value even though in the generic case we would need
13686 : : an explicit truncation. */
13687 : :
13688 : : static bool
13689 : 0 : reg_truncated_to_mode (machine_mode mode, const_rtx x)
13690 : : {
13691 : 0 : reg_stat_type *rsp = ®_stat[REGNO (x)];
13692 : 0 : machine_mode truncated = rsp->truncated_to_mode;
13693 : :
13694 : 0 : if (truncated == 0
13695 : 0 : || rsp->truncation_label < label_tick_ebb_start)
13696 : : return false;
13697 : 0 : if (!partial_subreg_p (mode, truncated))
13698 : : return true;
13699 : 0 : if (TRULY_NOOP_TRUNCATION_MODES_P (mode, truncated))
13700 : : return true;
13701 : : return false;
13702 : : }
13703 : :
13704 : : /* If X is a hard reg or a subreg record the mode that the register is
13705 : : accessed in. For non-TARGET_TRULY_NOOP_TRUNCATION targets we might be
13706 : : able to turn a truncate into a subreg using this information. Return true
13707 : : if traversing X is complete. */
13708 : :
13709 : : static bool
13710 : 186387644 : record_truncated_value (rtx x)
13711 : : {
13712 : 186387644 : machine_mode truncated_mode;
13713 : 186387644 : reg_stat_type *rsp;
13714 : :
13715 : 186387644 : if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x)))
13716 : : {
13717 : 1824913 : machine_mode original_mode = GET_MODE (SUBREG_REG (x));
13718 : 1824913 : truncated_mode = GET_MODE (x);
13719 : :
13720 : 1824913 : if (!partial_subreg_p (truncated_mode, original_mode))
13721 : : return true;
13722 : :
13723 : 1199896 : truncated_mode = GET_MODE (x);
13724 : 1199896 : if (TRULY_NOOP_TRUNCATION_MODES_P (truncated_mode, original_mode))
13725 : : return true;
13726 : :
13727 : 0 : x = SUBREG_REG (x);
13728 : 0 : }
13729 : : /* ??? For hard-regs we now record everything. We might be able to
13730 : : optimize this using last_set_mode. */
13731 : 184562731 : else if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
13732 : 20025456 : truncated_mode = GET_MODE (x);
13733 : : else
13734 : : return false;
13735 : :
13736 : 20025456 : rsp = ®_stat[REGNO (x)];
13737 : 20025456 : if (rsp->truncated_to_mode == 0
13738 : 9344580 : || rsp->truncation_label < label_tick_ebb_start
13739 : 28186447 : || partial_subreg_p (truncated_mode, rsp->truncated_to_mode))
13740 : : {
13741 : 11865056 : rsp->truncated_to_mode = truncated_mode;
13742 : 11865056 : rsp->truncation_label = label_tick;
13743 : : }
13744 : :
13745 : : return true;
13746 : : }
13747 : :
13748 : : /* Callback for note_uses. Find hardregs and subregs of pseudos and
13749 : : the modes they are used in. This can help truning TRUNCATEs into
13750 : : SUBREGs. */
13751 : :
13752 : : static void
13753 : 71688001 : record_truncated_values (rtx *loc, void *data ATTRIBUTE_UNUSED)
13754 : : {
13755 : 71688001 : subrtx_var_iterator::array_type array;
13756 : 258075645 : FOR_EACH_SUBRTX_VAR (iter, array, *loc, NONCONST)
13757 : 186387644 : if (record_truncated_value (*iter))
13758 : 21850369 : iter.skip_subrtxes ();
13759 : 71688001 : }
13760 : :
13761 : : /* Scan X for promoted SUBREGs. For each one found,
13762 : : note what it implies to the registers used in it. */
13763 : :
13764 : : static void
13765 : 340209355 : check_promoted_subreg (rtx_insn *insn, rtx x)
13766 : : {
13767 : 340209355 : if (GET_CODE (x) == SUBREG
13768 : 2198047 : && SUBREG_PROMOTED_VAR_P (x)
13769 : 340215064 : && REG_P (SUBREG_REG (x)))
13770 : 5709 : record_promoted_value (insn, x);
13771 : : else
13772 : : {
13773 : 340203646 : const char *format = GET_RTX_FORMAT (GET_CODE (x));
13774 : 340203646 : int i, j;
13775 : :
13776 : 818611861 : for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
13777 : 478408215 : switch (format[i])
13778 : : {
13779 : 254274112 : case 'e':
13780 : 254274112 : check_promoted_subreg (insn, XEXP (x, i));
13781 : 254274112 : break;
13782 : 11115126 : case 'V':
13783 : 11115126 : case 'E':
13784 : 11115126 : if (XVEC (x, i) != 0)
13785 : 34326074 : for (j = 0; j < XVECLEN (x, i); j++)
13786 : 23210948 : check_promoted_subreg (insn, XVECEXP (x, i, j));
13787 : : break;
13788 : : }
13789 : : }
13790 : 340209355 : }
13791 : :
13792 : : /* Verify that all the registers and memory references mentioned in *LOC are
13793 : : still valid. *LOC was part of a value set in INSN when label_tick was
13794 : : equal to TICK. Return false if some are not. If REPLACE is true, replace
13795 : : the invalid references with (clobber (const_int 0)) and return true. This
13796 : : replacement is useful because we often can get useful information about
13797 : : the form of a value (e.g., if it was produced by a shift that always
13798 : : produces -1 or 0) even though we don't know exactly what registers it
13799 : : was produced from. */
13800 : :
13801 : : static bool
13802 : 468488089 : get_last_value_validate (rtx *loc, rtx_insn *insn, int tick, bool replace)
13803 : : {
13804 : 468488089 : rtx x = *loc;
13805 : 468488089 : const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
13806 : 468488089 : int len = GET_RTX_LENGTH (GET_CODE (x));
13807 : 468488089 : int i, j;
13808 : :
13809 : 468488089 : if (REG_P (x))
13810 : : {
13811 : 149371663 : unsigned int regno = REGNO (x);
13812 : 149371663 : unsigned int endregno = END_REGNO (x);
13813 : 149371663 : unsigned int j;
13814 : :
13815 : 275707713 : for (j = regno; j < endregno; j++)
13816 : : {
13817 : 149397576 : reg_stat_type *rsp = ®_stat[j];
13818 : 149397576 : if (rsp->last_set_invalid
13819 : : /* If this is a pseudo-register that was only set once and not
13820 : : live at the beginning of the function, it is always valid. */
13821 : 246808950 : || (! (regno >= FIRST_PSEUDO_REGISTER
13822 : 111976916 : && regno < reg_n_sets_max
13823 : 111925057 : && REG_N_SETS (regno) == 1
13824 : 194822748 : && (!REGNO_REG_SET_P
13825 : : (DF_LR_IN (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb),
13826 : : regno)))
13827 : 29165658 : && rsp->last_set_label > tick))
13828 : : {
13829 : 23061526 : if (replace)
13830 : 11918473 : *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
13831 : 23061526 : return replace;
13832 : : }
13833 : : }
13834 : :
13835 : : return true;
13836 : : }
13837 : : /* If this is a memory reference, make sure that there were no stores after
13838 : : it that might have clobbered the value. We don't have alias info, so we
13839 : : assume any store invalidates it. Moreover, we only have local UIDs, so
13840 : : we also assume that there were stores in the intervening basic blocks. */
13841 : 31605485 : else if (MEM_P (x) && !MEM_READONLY_P (x)
13842 : 348721328 : && (tick != label_tick || DF_INSN_LUID (insn) <= mem_last_set))
13843 : : {
13844 : 6679279 : if (replace)
13845 : 3342205 : *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
13846 : 6679279 : return replace;
13847 : : }
13848 : :
13849 : 779892165 : for (i = 0; i < len; i++)
13850 : : {
13851 : 478717707 : if (fmt[i] == 'e')
13852 : : {
13853 : : /* Check for identical subexpressions. If x contains
13854 : : identical subexpression we only have to traverse one of
13855 : : them. */
13856 : 297678305 : if (i == 1 && ARITHMETIC_P (x))
13857 : : {
13858 : : /* Note that at this point x0 has already been checked
13859 : : and found valid. */
13860 : 110901771 : rtx x0 = XEXP (x, 0);
13861 : 110901771 : rtx x1 = XEXP (x, 1);
13862 : :
13863 : : /* If x0 and x1 are identical then x is also valid. */
13864 : 110901771 : if (x0 == x1)
13865 : : return true;
13866 : :
13867 : : /* If x1 is identical to a subexpression of x0 then
13868 : : while checking x0, x1 has already been checked. Thus
13869 : : it is valid and so as x. */
13870 : 110538935 : if (ARITHMETIC_P (x0)
13871 : 32364743 : && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
13872 : : return true;
13873 : :
13874 : : /* If x0 is identical to a subexpression of x1 then x is
13875 : : valid iff the rest of x1 is valid. */
13876 : 108607514 : if (ARITHMETIC_P (x1)
13877 : 1299872 : && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
13878 : 574 : return
13879 : 611 : get_last_value_validate (&XEXP (x1,
13880 : : x0 == XEXP (x1, 0) ? 1 : 0),
13881 : 574 : insn, tick, replace);
13882 : : }
13883 : :
13884 : 295383474 : if (!get_last_value_validate (&XEXP (x, i), insn, tick, replace))
13885 : : return false;
13886 : : }
13887 : 181039402 : else if (fmt[i] == 'E')
13888 : 27024087 : for (j = 0; j < XVECLEN (x, i); j++)
13889 : 21385523 : if (!get_last_value_validate (&XVECEXP (x, i, j),
13890 : : insn, tick, replace))
13891 : : return false;
13892 : : }
13893 : :
13894 : : /* If we haven't found a reason for it to be invalid, it is valid. */
13895 : : return true;
13896 : : }
13897 : :
13898 : : /* Get the last value assigned to X, if known. Some registers
13899 : : in the value may be replaced with (clobber (const_int 0)) if their value
13900 : : is known longer known reliably. */
13901 : :
13902 : : static rtx
13903 : 211510924 : get_last_value (const_rtx x)
13904 : : {
13905 : 211510924 : unsigned int regno;
13906 : 211510924 : rtx value;
13907 : 211510924 : reg_stat_type *rsp;
13908 : :
13909 : : /* If this is a non-paradoxical SUBREG, get the value of its operand and
13910 : : then convert it to the desired mode. If this is a paradoxical SUBREG,
13911 : : we cannot predict what values the "extra" bits might have. */
13912 : 211510924 : if (GET_CODE (x) == SUBREG
13913 : 12162160 : && subreg_lowpart_p (x)
13914 : 11687172 : && !paradoxical_subreg_p (x)
13915 : 218667737 : && (value = get_last_value (SUBREG_REG (x))) != 0)
13916 : 3639258 : return gen_lowpart (GET_MODE (x), value);
13917 : :
13918 : 207871666 : if (!REG_P (x))
13919 : : return 0;
13920 : :
13921 : 180843804 : regno = REGNO (x);
13922 : 180843804 : rsp = ®_stat[regno];
13923 : 180843804 : value = rsp->last_set_value;
13924 : :
13925 : : /* If we don't have a value, or if it isn't for this basic block and
13926 : : it's either a hard register, set more than once, or it's a live
13927 : : at the beginning of the function, return 0.
13928 : :
13929 : : Because if it's not live at the beginning of the function then the reg
13930 : : is always set before being used (is never used without being set).
13931 : : And, if it's set only once, and it's always set before use, then all
13932 : : uses must have the same last value, even if it's not from this basic
13933 : : block. */
13934 : :
13935 : 180843804 : if (value == 0
13936 : 180843804 : || (rsp->last_set_label < label_tick_ebb_start
13937 : 71527280 : && (regno < FIRST_PSEUDO_REGISTER
13938 : 70670456 : || regno >= reg_n_sets_max
13939 : 70670456 : || REG_N_SETS (regno) != 1
13940 : 14833334 : || REGNO_REG_SET_P
13941 : : (DF_LR_IN (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb), regno))))
13942 : 105629861 : return 0;
13943 : :
13944 : : /* If the value was set in a later insn than the ones we are processing,
13945 : : we can't use it even if the register was only set once. */
13946 : 75213943 : if (rsp->last_set_label == label_tick
13947 : 75213943 : && DF_INSN_LUID (rsp->last_set) >= subst_low_luid)
13948 : : return 0;
13949 : :
13950 : : /* If fewer bits were set than what we are asked for now, we cannot use
13951 : : the value. */
13952 : 55033124 : if (maybe_lt (GET_MODE_PRECISION (rsp->last_set_mode),
13953 : 55033124 : GET_MODE_PRECISION (GET_MODE (x))))
13954 : : return 0;
13955 : :
13956 : : /* If the value has all its registers valid, return it. */
13957 : 55031702 : if (get_last_value_validate (&value, rsp->last_set,
13958 : : rsp->last_set_label, false))
13959 : 51251454 : return value;
13960 : :
13961 : : /* Otherwise, make a copy and replace any invalid register with
13962 : : (clobber (const_int 0)). If that fails for some reason, return 0. */
13963 : :
13964 : 3780248 : value = copy_rtx (value);
13965 : 3780248 : if (get_last_value_validate (&value, rsp->last_set,
13966 : : rsp->last_set_label, true))
13967 : 3780248 : return value;
13968 : :
13969 : : return 0;
13970 : : }
13971 : :
13972 : : /* Define three variables used for communication between the following
13973 : : routines. */
13974 : :
13975 : : static unsigned int reg_dead_regno, reg_dead_endregno;
13976 : : static int reg_dead_flag;
13977 : : rtx reg_dead_reg;
13978 : :
13979 : : /* Function called via note_stores from reg_dead_at_p.
13980 : :
13981 : : If DEST is within [reg_dead_regno, reg_dead_endregno), set
13982 : : reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
13983 : :
13984 : : static void
13985 : 606527 : reg_dead_at_p_1 (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
13986 : : {
13987 : 606527 : unsigned int regno, endregno;
13988 : :
13989 : 606527 : if (!REG_P (dest))
13990 : : return;
13991 : :
13992 : 563844 : regno = REGNO (dest);
13993 : 563844 : endregno = END_REGNO (dest);
13994 : 563844 : if (reg_dead_endregno > regno && reg_dead_regno < endregno)
13995 : 257752 : reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
13996 : : }
13997 : :
13998 : : /* Return true if REG is known to be dead at INSN.
13999 : :
14000 : : We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
14001 : : referencing REG, it is dead. If we hit a SET referencing REG, it is
14002 : : live. Otherwise, see if it is live or dead at the start of the basic
14003 : : block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
14004 : : must be assumed to be always live. */
14005 : :
14006 : : static bool
14007 : 1506605 : reg_dead_at_p (rtx reg, rtx_insn *insn)
14008 : : {
14009 : 1506605 : basic_block block;
14010 : 1506605 : unsigned int i;
14011 : :
14012 : : /* Set variables for reg_dead_at_p_1. */
14013 : 1506605 : reg_dead_regno = REGNO (reg);
14014 : 1506605 : reg_dead_endregno = END_REGNO (reg);
14015 : 1506605 : reg_dead_reg = reg;
14016 : :
14017 : 1506605 : reg_dead_flag = 0;
14018 : :
14019 : : /* Check that reg isn't mentioned in NEWPAT_USED_REGS. For fixed registers
14020 : : we allow the machine description to decide whether use-and-clobber
14021 : : patterns are OK. */
14022 : 1506605 : if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
14023 : : {
14024 : 3013210 : for (i = reg_dead_regno; i < reg_dead_endregno; i++)
14025 : 1506605 : if (!fixed_regs[i] && TEST_HARD_REG_BIT (newpat_used_regs, i))
14026 : : return false;
14027 : : }
14028 : :
14029 : : /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, or
14030 : : beginning of basic block. */
14031 : 1506605 : block = BLOCK_FOR_INSN (insn);
14032 : 836020 : for (;;)
14033 : : {
14034 : 2342625 : if (INSN_P (insn))
14035 : : {
14036 : 2177135 : if (find_regno_note (insn, REG_UNUSED, reg_dead_regno))
14037 : : return true;
14038 : :
14039 : 861619 : note_stores (insn, reg_dead_at_p_1, NULL);
14040 : 861619 : if (reg_dead_flag)
14041 : 128876 : return reg_dead_flag == 1 ? 1 : 0;
14042 : :
14043 : 732743 : if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
14044 : : return true;
14045 : : }
14046 : :
14047 : 872427 : if (insn == BB_HEAD (block))
14048 : : break;
14049 : :
14050 : 836020 : insn = PREV_INSN (insn);
14051 : : }
14052 : :
14053 : : /* Look at live-in sets for the basic block that we were in. */
14054 : 72814 : for (i = reg_dead_regno; i < reg_dead_endregno; i++)
14055 : 36407 : if (REGNO_REG_SET_P (df_get_live_in (block), i))
14056 : : return false;
14057 : :
14058 : : return true;
14059 : : }
14060 : :
14061 : : /* Note hard registers in X that are used. */
14062 : :
14063 : : static void
14064 : 265069593 : mark_used_regs_combine (rtx x)
14065 : : {
14066 : 306752167 : RTX_CODE code = GET_CODE (x);
14067 : 306752167 : unsigned int regno;
14068 : 306752167 : int i;
14069 : :
14070 : 306752167 : switch (code)
14071 : : {
14072 : : case LABEL_REF:
14073 : : case SYMBOL_REF:
14074 : : case CONST:
14075 : : CASE_CONST_ANY:
14076 : : case PC:
14077 : : case ADDR_VEC:
14078 : : case ADDR_DIFF_VEC:
14079 : : case ASM_INPUT:
14080 : : return;
14081 : :
14082 : 6975786 : case CLOBBER:
14083 : : /* If we are clobbering a MEM, mark any hard registers inside the
14084 : : address as used. */
14085 : 6975786 : if (MEM_P (XEXP (x, 0)))
14086 : 5535 : mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
14087 : : return;
14088 : :
14089 : 70268302 : case REG:
14090 : 70268302 : regno = REGNO (x);
14091 : : /* A hard reg in a wide mode may really be multiple registers.
14092 : : If so, mark all of them just like the first. */
14093 : 70268302 : if (regno < FIRST_PSEUDO_REGISTER)
14094 : : {
14095 : : /* None of this applies to the stack, frame or arg pointers. */
14096 : 8708929 : if (regno == STACK_POINTER_REGNUM
14097 : 8708929 : || (!HARD_FRAME_POINTER_IS_FRAME_POINTER
14098 : : && regno == HARD_FRAME_POINTER_REGNUM)
14099 : 7798691 : || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
14100 : 1069469 : && regno == ARG_POINTER_REGNUM && fixed_regs[regno])
14101 : 6729222 : || regno == FRAME_POINTER_REGNUM)
14102 : : return;
14103 : :
14104 : 1553479 : add_to_hard_reg_set (&newpat_used_regs, GET_MODE (x), regno);
14105 : : }
14106 : : return;
14107 : :
14108 : 41677039 : case SET:
14109 : 41677039 : {
14110 : : /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
14111 : : the address. */
14112 : 41677039 : rtx testreg = SET_DEST (x);
14113 : :
14114 : 41677039 : while (GET_CODE (testreg) == SUBREG
14115 : 41693007 : || GET_CODE (testreg) == ZERO_EXTRACT
14116 : 83680746 : || GET_CODE (testreg) == STRICT_LOW_PART)
14117 : 318980 : testreg = XEXP (testreg, 0);
14118 : :
14119 : 41677039 : if (MEM_P (testreg))
14120 : 4630505 : mark_used_regs_combine (XEXP (testreg, 0));
14121 : :
14122 : 41677039 : mark_used_regs_combine (SET_SRC (x));
14123 : : }
14124 : 41677039 : return;
14125 : :
14126 : 121989175 : default:
14127 : 121989175 : break;
14128 : : }
14129 : :
14130 : : /* Recursively scan the operands of this expression. */
14131 : :
14132 : 121989175 : {
14133 : 121989175 : const char *fmt = GET_RTX_FORMAT (code);
14134 : :
14135 : 353080114 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
14136 : : {
14137 : 231090939 : if (fmt[i] == 'e')
14138 : 187862321 : mark_used_regs_combine (XEXP (x, i));
14139 : 43228618 : else if (fmt[i] == 'E')
14140 : : {
14141 : : int j;
14142 : :
14143 : 60342410 : for (j = 0; j < XVECLEN (x, i); j++)
14144 : 41915055 : mark_used_regs_combine (XVECEXP (x, i, j));
14145 : : }
14146 : : }
14147 : : }
14148 : : }
14149 : :
14150 : : /* Remove register number REGNO from the dead registers list of INSN.
14151 : :
14152 : : Return the note used to record the death, if there was one. */
14153 : :
14154 : : rtx
14155 : 2918811 : remove_death (unsigned int regno, rtx_insn *insn)
14156 : : {
14157 : 2918811 : rtx note = find_regno_note (insn, REG_DEAD, regno);
14158 : :
14159 : 2918811 : if (note)
14160 : 431391 : remove_note (insn, note);
14161 : :
14162 : 2918811 : return note;
14163 : : }
14164 : :
14165 : : /* For each register (hardware or pseudo) used within expression X, if its
14166 : : death is in an instruction with luid between FROM_LUID (inclusive) and
14167 : : TO_INSN (exclusive), put a REG_DEAD note for that register in the
14168 : : list headed by PNOTES.
14169 : :
14170 : : That said, don't move registers killed by maybe_kill_insn.
14171 : :
14172 : : This is done when X is being merged by combination into TO_INSN. These
14173 : : notes will then be distributed as needed. */
14174 : :
14175 : : static void
14176 : 22281836 : move_deaths (rtx x, rtx maybe_kill_insn, int from_luid, rtx_insn *to_insn,
14177 : : rtx *pnotes)
14178 : : {
14179 : 22730920 : const char *fmt;
14180 : 22730920 : int len, i;
14181 : 22730920 : enum rtx_code code = GET_CODE (x);
14182 : :
14183 : 22730920 : if (code == REG)
14184 : : {
14185 : 5673012 : unsigned int regno = REGNO (x);
14186 : 5673012 : rtx_insn *where_dead = reg_stat[regno].last_death;
14187 : :
14188 : : /* If we do not know where the register died, it may still die between
14189 : : FROM_LUID and TO_INSN. If so, find it. This is PR83304. */
14190 : 5673012 : if (!where_dead || DF_INSN_LUID (where_dead) >= DF_INSN_LUID (to_insn))
14191 : : {
14192 : 3014127 : rtx_insn *insn = prev_real_nondebug_insn (to_insn);
14193 : 3014127 : while (insn
14194 : 4536633 : && BLOCK_FOR_INSN (insn) == BLOCK_FOR_INSN (to_insn)
14195 : 8424920 : && DF_INSN_LUID (insn) >= from_luid)
14196 : : {
14197 : 2084268 : if (dead_or_set_regno_p (insn, regno))
14198 : : {
14199 : 531999 : if (find_regno_note (insn, REG_DEAD, regno))
14200 : 5673012 : where_dead = insn;
14201 : : break;
14202 : : }
14203 : :
14204 : 1552269 : insn = prev_real_nondebug_insn (insn);
14205 : : }
14206 : : }
14207 : :
14208 : : /* Don't move the register if it gets killed in between from and to. */
14209 : 93270 : if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
14210 : 5715123 : && ! reg_referenced_p (x, maybe_kill_insn))
14211 : : return;
14212 : :
14213 : 5630901 : if (where_dead
14214 : 2986774 : && BLOCK_FOR_INSN (where_dead) == BLOCK_FOR_INSN (to_insn)
14215 : 2821234 : && DF_INSN_LUID (where_dead) >= from_luid
14216 : 8451957 : && DF_INSN_LUID (where_dead) < DF_INSN_LUID (to_insn))
14217 : : {
14218 : 2556990 : rtx note = remove_death (regno, where_dead);
14219 : :
14220 : : /* It is possible for the call above to return 0. This can occur
14221 : : when last_death points to I2 or I1 that we combined with.
14222 : : In that case make a new note.
14223 : :
14224 : : We must also check for the case where X is a hard register
14225 : : and NOTE is a death note for a range of hard registers
14226 : : including X. In that case, we must put REG_DEAD notes for
14227 : : the remaining registers in place of NOTE. */
14228 : :
14229 : 2556990 : if (note != 0 && regno < FIRST_PSEUDO_REGISTER
14230 : 2556990 : && partial_subreg_p (GET_MODE (x), GET_MODE (XEXP (note, 0))))
14231 : : {
14232 : 0 : unsigned int deadregno = REGNO (XEXP (note, 0));
14233 : 0 : unsigned int deadend = END_REGNO (XEXP (note, 0));
14234 : 0 : unsigned int ourend = END_REGNO (x);
14235 : 0 : unsigned int i;
14236 : :
14237 : 0 : for (i = deadregno; i < deadend; i++)
14238 : 0 : if (i < regno || i >= ourend)
14239 : 0 : add_reg_note (where_dead, REG_DEAD, regno_reg_rtx[i]);
14240 : : }
14241 : :
14242 : : /* If we didn't find any note, or if we found a REG_DEAD note that
14243 : : covers only part of the given reg, and we have a multi-reg hard
14244 : : register, then to be safe we must check for REG_DEAD notes
14245 : : for each register other than the first. They could have
14246 : : their own REG_DEAD notes lying around. */
14247 : 2556990 : else if ((note == 0
14248 : : || (note != 0
14249 : 69607 : && partial_subreg_p (GET_MODE (XEXP (note, 0)),
14250 : 69607 : GET_MODE (x))))
14251 : 2487383 : && regno < FIRST_PSEUDO_REGISTER
14252 : 2837090 : && REG_NREGS (x) > 1)
14253 : : {
14254 : 0 : unsigned int ourend = END_REGNO (x);
14255 : 0 : unsigned int i, offset;
14256 : 0 : rtx oldnotes = 0;
14257 : :
14258 : 0 : if (note)
14259 : 0 : offset = hard_regno_nregs (regno, GET_MODE (XEXP (note, 0)));
14260 : : else
14261 : : offset = 1;
14262 : :
14263 : 0 : for (i = regno + offset; i < ourend; i++)
14264 : 0 : move_deaths (regno_reg_rtx[i],
14265 : : maybe_kill_insn, from_luid, to_insn, &oldnotes);
14266 : : }
14267 : :
14268 : 2556990 : if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
14269 : : {
14270 : 69590 : XEXP (note, 1) = *pnotes;
14271 : 69590 : *pnotes = note;
14272 : : }
14273 : : else
14274 : 2487400 : *pnotes = alloc_reg_note (REG_DEAD, x, *pnotes);
14275 : : }
14276 : :
14277 : 5630901 : return;
14278 : : }
14279 : :
14280 : 17057908 : else if (GET_CODE (x) == SET)
14281 : : {
14282 : 3823727 : rtx dest = SET_DEST (x);
14283 : :
14284 : 3823727 : move_deaths (SET_SRC (x), maybe_kill_insn, from_luid, to_insn, pnotes);
14285 : :
14286 : : /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
14287 : : that accesses one word of a multi-word item, some
14288 : : piece of everything register in the expression is used by
14289 : : this insn, so remove any old death. */
14290 : : /* ??? So why do we test for equality of the sizes? */
14291 : :
14292 : 3823727 : if (GET_CODE (dest) == ZERO_EXTRACT
14293 : 3823308 : || GET_CODE (dest) == STRICT_LOW_PART
14294 : 7645429 : || (GET_CODE (dest) == SUBREG
14295 : 81685 : && !read_modify_subreg_p (dest)))
14296 : : {
14297 : 60813 : move_deaths (dest, maybe_kill_insn, from_luid, to_insn, pnotes);
14298 : 60813 : return;
14299 : : }
14300 : :
14301 : : /* If this is some other SUBREG, we know it replaces the entire
14302 : : value, so use that as the destination. */
14303 : 3762914 : if (GET_CODE (dest) == SUBREG)
14304 : 22897 : dest = SUBREG_REG (dest);
14305 : :
14306 : : /* If this is a MEM, adjust deaths of anything used in the address.
14307 : : For a REG (the only other possibility), the entire value is
14308 : : being replaced so the old value is not used in this insn. */
14309 : :
14310 : 3762914 : if (MEM_P (dest))
14311 : 388271 : move_deaths (XEXP (dest, 0), maybe_kill_insn, from_luid,
14312 : : to_insn, pnotes);
14313 : : return;
14314 : : }
14315 : :
14316 : 13234181 : else if (GET_CODE (x) == CLOBBER)
14317 : : return;
14318 : :
14319 : 12698562 : len = GET_RTX_LENGTH (code);
14320 : 12698562 : fmt = GET_RTX_FORMAT (code);
14321 : :
14322 : 33015577 : for (i = 0; i < len; i++)
14323 : : {
14324 : 20317015 : if (fmt[i] == 'E')
14325 : : {
14326 : 877938 : int j;
14327 : 3169644 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
14328 : 2291706 : move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_luid,
14329 : : to_insn, pnotes);
14330 : : }
14331 : 19439077 : else if (fmt[i] == 'e')
14332 : 12387227 : move_deaths (XEXP (x, i), maybe_kill_insn, from_luid, to_insn, pnotes);
14333 : : }
14334 : : }
14335 : :
14336 : : /* Return true if X is the target of a bit-field assignment in BODY, the
14337 : : pattern of an insn. X must be a REG. */
14338 : :
14339 : : static bool
14340 : 4420329 : reg_bitfield_target_p (rtx x, rtx body)
14341 : : {
14342 : 4420329 : int i;
14343 : :
14344 : 4420329 : if (GET_CODE (body) == SET)
14345 : : {
14346 : 3240047 : rtx dest = SET_DEST (body);
14347 : 3240047 : rtx target;
14348 : 3240047 : unsigned int regno, tregno, endregno, endtregno;
14349 : :
14350 : 3240047 : if (GET_CODE (dest) == ZERO_EXTRACT)
14351 : 370 : target = XEXP (dest, 0);
14352 : 3239677 : else if (GET_CODE (dest) == STRICT_LOW_PART)
14353 : 1920 : target = SUBREG_REG (XEXP (dest, 0));
14354 : : else
14355 : : return false;
14356 : :
14357 : 2290 : if (GET_CODE (target) == SUBREG)
14358 : 239 : target = SUBREG_REG (target);
14359 : :
14360 : 2290 : if (!REG_P (target))
14361 : : return false;
14362 : :
14363 : 2281 : tregno = REGNO (target), regno = REGNO (x);
14364 : 2281 : if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
14365 : 2271 : return target == x;
14366 : :
14367 : 10 : endtregno = end_hard_regno (GET_MODE (target), tregno);
14368 : 10 : endregno = end_hard_regno (GET_MODE (x), regno);
14369 : :
14370 : 10 : return endregno > tregno && regno < endtregno;
14371 : : }
14372 : :
14373 : 1180282 : else if (GET_CODE (body) == PARALLEL)
14374 : 1724915 : for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
14375 : 1158991 : if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
14376 : : return true;
14377 : :
14378 : : return false;
14379 : : }
14380 : :
14381 : : /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
14382 : : as appropriate. I3 and I2 are the insns resulting from the combination
14383 : : insns including FROM (I2 may be zero).
14384 : :
14385 : : ELIM_I2 and ELIM_I1 are either zero or registers that we know will
14386 : : not need REG_DEAD notes because they are being substituted for. This
14387 : : saves searching in the most common cases.
14388 : :
14389 : : Each note in the list is either ignored or placed on some insns, depending
14390 : : on the type of note. */
14391 : :
14392 : : static void
14393 : 9283069 : distribute_notes (rtx notes, rtx_insn *from_insn, rtx_insn *i3, rtx_insn *i2,
14394 : : rtx elim_i2, rtx elim_i1, rtx elim_i0)
14395 : : {
14396 : 9283069 : rtx note, next_note;
14397 : 9283069 : rtx tem_note;
14398 : 9283069 : rtx_insn *tem_insn;
14399 : :
14400 : 21419765 : for (note = notes; note; note = next_note)
14401 : : {
14402 : 12136696 : rtx_insn *place = 0, *place2 = 0;
14403 : :
14404 : 12136696 : next_note = XEXP (note, 1);
14405 : 12136696 : switch (REG_NOTE_KIND (note))
14406 : : {
14407 : : case REG_BR_PROB:
14408 : : case REG_BR_PRED:
14409 : : /* Doesn't matter much where we put this, as long as it's somewhere.
14410 : : It is preferable to keep these notes on branches, which is most
14411 : : likely to be i3. */
14412 : : place = i3;
14413 : : break;
14414 : :
14415 : 0 : case REG_NON_LOCAL_GOTO:
14416 : 0 : if (JUMP_P (i3))
14417 : : place = i3;
14418 : : else
14419 : : {
14420 : 0 : gcc_assert (i2 && JUMP_P (i2));
14421 : : place = i2;
14422 : : }
14423 : : break;
14424 : :
14425 : 25313 : case REG_EH_REGION:
14426 : 25313 : {
14427 : : /* The landing pad handling needs to be kept in sync with the
14428 : : prerequisite checking in try_combine. */
14429 : 25313 : int lp_nr = INTVAL (XEXP (note, 0));
14430 : : /* A REG_EH_REGION note transfering control can only ever come
14431 : : from i3. */
14432 : 25313 : if (lp_nr > 0)
14433 : 15172 : gcc_assert (from_insn == i3);
14434 : : /* We are making sure there is a single effective REG_EH_REGION
14435 : : note and it's valid to put it on i3. */
14436 : 25313 : if (!insn_could_throw_p (from_insn)
14437 : 25313 : && !(lp_nr == INT_MIN && can_nonlocal_goto (from_insn)))
14438 : : /* Throw away stray notes on insns that can never throw or
14439 : : make a nonlocal goto. */
14440 : : ;
14441 : : else
14442 : : {
14443 : 25183 : if (CALL_P (i3))
14444 : : place = i3;
14445 : : else
14446 : : {
14447 : 2198 : gcc_assert (cfun->can_throw_non_call_exceptions);
14448 : : /* If i3 can still trap preserve the note, otherwise we've
14449 : : combined things such that we can now prove that the
14450 : : instructions can't trap. Drop the note in this case. */
14451 : 2198 : if (may_trap_p (i3))
14452 : : place = i3;
14453 : : }
14454 : : }
14455 : : break;
14456 : : }
14457 : :
14458 : 122583 : case REG_ARGS_SIZE:
14459 : : /* ??? How to distribute between i3-i1. Assume i3 contains the
14460 : : entire adjustment. Assert i3 contains at least some adjust. */
14461 : 122583 : if (!noop_move_p (i3))
14462 : : {
14463 : 122582 : poly_int64 old_size, args_size = get_args_size (note);
14464 : : /* fixup_args_size_notes looks at REG_NORETURN note,
14465 : : so ensure the note is placed there first. */
14466 : 122582 : if (CALL_P (i3))
14467 : : {
14468 : : rtx *np;
14469 : 1710 : for (np = &next_note; *np; np = &XEXP (*np, 1))
14470 : 7 : if (REG_NOTE_KIND (*np) == REG_NORETURN)
14471 : : {
14472 : 5 : rtx n = *np;
14473 : 5 : *np = XEXP (n, 1);
14474 : 5 : XEXP (n, 1) = REG_NOTES (i3);
14475 : 5 : REG_NOTES (i3) = n;
14476 : 5 : break;
14477 : : }
14478 : : }
14479 : 122582 : old_size = fixup_args_size_notes (PREV_INSN (i3), i3, args_size);
14480 : : /* emit_call_1 adds for !ACCUMULATE_OUTGOING_ARGS
14481 : : REG_ARGS_SIZE note to all noreturn calls, allow that here. */
14482 : 122582 : gcc_assert (maybe_ne (old_size, args_size)
14483 : : || (CALL_P (i3)
14484 : : && !ACCUMULATE_OUTGOING_ARGS
14485 : : && find_reg_note (i3, REG_NORETURN, NULL_RTX)));
14486 : : }
14487 : : break;
14488 : :
14489 : 76695 : case REG_NORETURN:
14490 : 76695 : case REG_SETJMP:
14491 : 76695 : case REG_TM:
14492 : 76695 : case REG_CALL_DECL:
14493 : 76695 : case REG_UNTYPED_CALL:
14494 : 76695 : case REG_CALL_NOCF_CHECK:
14495 : : /* These notes must remain with the call. It should not be
14496 : : possible for both I2 and I3 to be a call. */
14497 : 76695 : if (CALL_P (i3))
14498 : : place = i3;
14499 : : else
14500 : : {
14501 : 0 : gcc_assert (i2 && CALL_P (i2));
14502 : : place = i2;
14503 : : }
14504 : : break;
14505 : :
14506 : 1763896 : case REG_UNUSED:
14507 : : /* Any clobbers for i3 may still exist, and so we must process
14508 : : REG_UNUSED notes from that insn.
14509 : :
14510 : : Any clobbers from i2 or i1 can only exist if they were added by
14511 : : recog_for_combine. In that case, recog_for_combine created the
14512 : : necessary REG_UNUSED notes. Trying to keep any original
14513 : : REG_UNUSED notes from these insns can cause incorrect output
14514 : : if it is for the same register as the original i3 dest.
14515 : : In that case, we will notice that the register is set in i3,
14516 : : and then add a REG_UNUSED note for the destination of i3, which
14517 : : is wrong. However, it is possible to have REG_UNUSED notes from
14518 : : i2 or i1 for register which were both used and clobbered, so
14519 : : we keep notes from i2 or i1 if they will turn into REG_DEAD
14520 : : notes. */
14521 : :
14522 : : /* If this register is set or clobbered between FROM_INSN and I3,
14523 : : we should not create a note for it. */
14524 : 1763896 : if (reg_set_between_p (XEXP (note, 0), from_insn, i3))
14525 : : break;
14526 : :
14527 : : /* If this register is set or clobbered in I3, put the note there
14528 : : unless there is one already. */
14529 : 1681577 : if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
14530 : : {
14531 : 992499 : if (from_insn != i3)
14532 : : break;
14533 : :
14534 : 550531 : if (! (REG_P (XEXP (note, 0))
14535 : 550531 : ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
14536 : 0 : : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
14537 : : place = i3;
14538 : : }
14539 : : /* Otherwise, if this register is used by I3, then this register
14540 : : now dies here, so we must put a REG_DEAD note here unless there
14541 : : is one already. */
14542 : 689078 : else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
14543 : : {
14544 : 6894 : if (! (REG_P (XEXP (note, 0))
14545 : 6894 : ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
14546 : 0 : : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
14547 : : {
14548 : 6666 : PUT_REG_NOTE_KIND (note, REG_DEAD);
14549 : 6666 : place = i3;
14550 : : }
14551 : : }
14552 : :
14553 : : /* A SET or CLOBBER of the REG_UNUSED reg has been removed,
14554 : : but we can't tell which at this point. We must reset any
14555 : : expectations we had about the value that was previously
14556 : : stored in the reg. ??? Ideally, we'd adjust REG_N_SETS
14557 : : and, if appropriate, restore its previous value, but we
14558 : : don't have enough information for that at this point. */
14559 : : else
14560 : : {
14561 : 682184 : record_value_for_reg (XEXP (note, 0), NULL, NULL_RTX);
14562 : :
14563 : : /* Otherwise, if this register is now referenced in i2
14564 : : then the register used to be modified in one of the
14565 : : original insns. If it was i3 (say, in an unused
14566 : : parallel), it's now completely gone, so the note can
14567 : : be discarded. But if it was modified in i2, i1 or i0
14568 : : and we still reference it in i2, then we're
14569 : : referencing the previous value, and since the
14570 : : register was modified and REG_UNUSED, we know that
14571 : : the previous value is now dead. So, if we only
14572 : : reference the register in i2, we change the note to
14573 : : REG_DEAD, to reflect the previous value. However, if
14574 : : we're also setting or clobbering the register as
14575 : : scratch, we know (because the register was not
14576 : : referenced in i3) that it's unused, just as it was
14577 : : unused before, and we place the note in i2. */
14578 : 17994 : if (from_insn != i3 && i2 && INSN_P (i2)
14579 : 700178 : && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
14580 : : {
14581 : 7 : if (!reg_set_p (XEXP (note, 0), PATTERN (i2)))
14582 : 7 : PUT_REG_NOTE_KIND (note, REG_DEAD);
14583 : 7 : if (! (REG_P (XEXP (note, 0))
14584 : 7 : ? find_regno_note (i2, REG_NOTE_KIND (note),
14585 : 7 : REGNO (XEXP (note, 0)))
14586 : 0 : : find_reg_note (i2, REG_NOTE_KIND (note),
14587 : : XEXP (note, 0))))
14588 : : place = i2;
14589 : : }
14590 : : }
14591 : :
14592 : : break;
14593 : :
14594 : 365986 : case REG_EQUAL:
14595 : 365986 : case REG_EQUIV:
14596 : 365986 : case REG_NOALIAS:
14597 : : /* These notes say something about results of an insn. We can
14598 : : only support them if they used to be on I3 in which case they
14599 : : remain on I3. Otherwise they are ignored.
14600 : :
14601 : : If the note refers to an expression that is not a constant, we
14602 : : must also ignore the note since we cannot tell whether the
14603 : : equivalence is still true. It might be possible to do
14604 : : slightly better than this (we only have a problem if I2DEST
14605 : : or I1DEST is present in the expression), but it doesn't
14606 : : seem worth the trouble. */
14607 : :
14608 : 365986 : if (from_insn == i3
14609 : 176384 : && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
14610 : : place = i3;
14611 : : break;
14612 : :
14613 : 0 : case REG_INC:
14614 : : /* These notes say something about how a register is used. They must
14615 : : be present on any use of the register in I2 or I3. */
14616 : 0 : if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
14617 : 0 : place = i3;
14618 : :
14619 : 0 : if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
14620 : : {
14621 : 0 : if (place)
14622 : : place2 = i2;
14623 : : else
14624 : : place = i2;
14625 : : }
14626 : : break;
14627 : :
14628 : 6367 : case REG_LABEL_TARGET:
14629 : 6367 : case REG_LABEL_OPERAND:
14630 : : /* This can show up in several ways -- either directly in the
14631 : : pattern, or hidden off in the constant pool with (or without?)
14632 : : a REG_EQUAL note. */
14633 : : /* ??? Ignore the without-reg_equal-note problem for now. */
14634 : 6367 : if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
14635 : 6367 : || ((tem_note = find_reg_note (i3, REG_EQUAL, NULL_RTX))
14636 : 0 : && GET_CODE (XEXP (tem_note, 0)) == LABEL_REF
14637 : 0 : && label_ref_label (XEXP (tem_note, 0)) == XEXP (note, 0)))
14638 : : place = i3;
14639 : :
14640 : 6367 : if (i2
14641 : 6367 : && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
14642 : 0 : || ((tem_note = find_reg_note (i2, REG_EQUAL, NULL_RTX))
14643 : 0 : && GET_CODE (XEXP (tem_note, 0)) == LABEL_REF
14644 : 0 : && label_ref_label (XEXP (tem_note, 0)) == XEXP (note, 0))))
14645 : : {
14646 : 0 : if (place)
14647 : : place2 = i2;
14648 : : else
14649 : : place = i2;
14650 : : }
14651 : :
14652 : : /* For REG_LABEL_TARGET on a JUMP_P, we prefer to put the note
14653 : : as a JUMP_LABEL or decrement LABEL_NUSES if it's already
14654 : : there. */
14655 : 6367 : if (place && JUMP_P (place)
14656 : 5284 : && REG_NOTE_KIND (note) == REG_LABEL_TARGET
14657 : 0 : && (JUMP_LABEL (place) == NULL
14658 : 0 : || JUMP_LABEL (place) == XEXP (note, 0)))
14659 : : {
14660 : 0 : rtx label = JUMP_LABEL (place);
14661 : :
14662 : 0 : if (!label)
14663 : 0 : JUMP_LABEL (place) = XEXP (note, 0);
14664 : 0 : else if (LABEL_P (label))
14665 : 0 : LABEL_NUSES (label)--;
14666 : : }
14667 : :
14668 : 6367 : if (place2 && JUMP_P (place2)
14669 : 0 : && REG_NOTE_KIND (note) == REG_LABEL_TARGET
14670 : 0 : && (JUMP_LABEL (place2) == NULL
14671 : 0 : || JUMP_LABEL (place2) == XEXP (note, 0)))
14672 : : {
14673 : 0 : rtx label = JUMP_LABEL (place2);
14674 : :
14675 : 0 : if (!label)
14676 : 0 : JUMP_LABEL (place2) = XEXP (note, 0);
14677 : 0 : else if (LABEL_P (label))
14678 : 0 : LABEL_NUSES (label)--;
14679 : : place2 = 0;
14680 : : }
14681 : : break;
14682 : :
14683 : : case REG_NONNEG:
14684 : : /* This note says something about the value of a register prior
14685 : : to the execution of an insn. It is too much trouble to see
14686 : : if the note is still correct in all situations. It is better
14687 : : to simply delete it. */
14688 : : break;
14689 : :
14690 : 9736302 : case REG_DEAD:
14691 : : /* If we replaced the right hand side of FROM_INSN with a
14692 : : REG_EQUAL note, the original use of the dying register
14693 : : will not have been combined into I3 and I2. In such cases,
14694 : : FROM_INSN is guaranteed to be the first of the combined
14695 : : instructions, so we simply need to search back before
14696 : : FROM_INSN for the previous use or set of this register,
14697 : : then alter the notes there appropriately.
14698 : :
14699 : : If the register is used as an input in I3, it dies there.
14700 : : Similarly for I2, if it is nonzero and adjacent to I3.
14701 : :
14702 : : If the register is not used as an input in either I3 or I2
14703 : : and it is not one of the registers we were supposed to eliminate,
14704 : : there are two possibilities. We might have a non-adjacent I2
14705 : : or we might have somehow eliminated an additional register
14706 : : from a computation. For example, we might have had A & B where
14707 : : we discover that B will always be zero. In this case we will
14708 : : eliminate the reference to A.
14709 : :
14710 : : In both cases, we must search to see if we can find a previous
14711 : : use of A and put the death note there. */
14712 : :
14713 : 9736302 : if (from_insn
14714 : 6796117 : && from_insn == i2mod
14715 : 9737953 : && !reg_overlap_mentioned_p (XEXP (note, 0), i2mod_new_rhs))
14716 : : tem_insn = from_insn;
14717 : : else
14718 : : {
14719 : 9734947 : if (from_insn
14720 : 6794762 : && CALL_P (from_insn)
14721 : 9952389 : && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
14722 : : place = from_insn;
14723 : 9596737 : else if (i2 && reg_set_p (XEXP (note, 0), PATTERN (i2)))
14724 : : {
14725 : : /* If the new I2 sets the same register that is marked
14726 : : dead in the note, we do not in general know where to
14727 : : put the note. One important case we _can_ handle is
14728 : : when the note comes from I3. */
14729 : 37867 : if (from_insn == i3)
14730 : : place = i3;
14731 : : else
14732 : : break;
14733 : : }
14734 : 9558870 : else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
14735 : : place = i3;
14736 : 110347 : else if (i2 != 0 && next_nonnote_nondebug_insn (i2) == i3
14737 : 3796875 : && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
14738 : : place = i2;
14739 : 3641460 : else if ((rtx_equal_p (XEXP (note, 0), elim_i2)
14740 : 3532781 : && !(i2mod
14741 : 26210 : && reg_overlap_mentioned_p (XEXP (note, 0),
14742 : : i2mod_old_rhs)))
14743 : 134899 : || rtx_equal_p (XEXP (note, 0), elim_i1)
14744 : 3693110 : || rtx_equal_p (XEXP (note, 0), elim_i0))
14745 : : break;
14746 : 224285 : tem_insn = i3;
14747 : : }
14748 : :
14749 : 224285 : if (place == 0)
14750 : : {
14751 : 47829 : basic_block bb = this_basic_block;
14752 : :
14753 : 2391930 : for (tem_insn = PREV_INSN (tem_insn); place == 0; tem_insn = PREV_INSN (tem_insn))
14754 : : {
14755 : 2391930 : if (!NONDEBUG_INSN_P (tem_insn))
14756 : : {
14757 : 1777416 : if (tem_insn == BB_HEAD (bb))
14758 : : break;
14759 : 1742503 : continue;
14760 : : }
14761 : :
14762 : : /* If the register is being set at TEM_INSN, see if that is all
14763 : : TEM_INSN is doing. If so, delete TEM_INSN. Otherwise, make this
14764 : : into a REG_UNUSED note instead. Don't delete sets to
14765 : : global register vars. */
14766 : 614514 : if ((REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER
14767 : 1313 : || !global_regs[REGNO (XEXP (note, 0))])
14768 : 615827 : && reg_set_p (XEXP (note, 0), PATTERN (tem_insn)))
14769 : : {
14770 : 11525 : rtx set = single_set (tem_insn);
14771 : 11525 : rtx inner_dest = 0;
14772 : :
14773 : 11525 : if (set != 0)
14774 : 10760 : for (inner_dest = SET_DEST (set);
14775 : 10769 : (GET_CODE (inner_dest) == STRICT_LOW_PART
14776 : 10769 : || GET_CODE (inner_dest) == SUBREG
14777 : 10769 : || GET_CODE (inner_dest) == ZERO_EXTRACT);
14778 : 9 : inner_dest = XEXP (inner_dest, 0))
14779 : : ;
14780 : :
14781 : : /* Verify that it was the set, and not a clobber that
14782 : : modified the register.
14783 : :
14784 : : If we cannot delete the setter due to side
14785 : : effects, mark the user with an UNUSED note instead
14786 : : of deleting it. */
14787 : :
14788 : 10760 : if (set != 0 && ! side_effects_p (SET_SRC (set))
14789 : 10534 : && rtx_equal_p (XEXP (note, 0), inner_dest))
14790 : : {
14791 : : /* Move the notes and links of TEM_INSN elsewhere.
14792 : : This might delete other dead insns recursively.
14793 : : First set the pattern to something that won't use
14794 : : any register. */
14795 : 10419 : rtx old_notes = REG_NOTES (tem_insn);
14796 : :
14797 : 10419 : PATTERN (tem_insn) = pc_rtx;
14798 : 10419 : REG_NOTES (tem_insn) = NULL;
14799 : :
14800 : 10419 : distribute_notes (old_notes, tem_insn, tem_insn, NULL,
14801 : : NULL_RTX, NULL_RTX, NULL_RTX);
14802 : 10419 : distribute_links (LOG_LINKS (tem_insn));
14803 : :
14804 : 10419 : unsigned int regno = REGNO (XEXP (note, 0));
14805 : 10419 : reg_stat_type *rsp = ®_stat[regno];
14806 : 10419 : if (rsp->last_set == tem_insn)
14807 : 9297 : record_value_for_reg (XEXP (note, 0), NULL, NULL_RTX);
14808 : :
14809 : 10419 : SET_INSN_DELETED (tem_insn);
14810 : 10419 : if (tem_insn == i2)
14811 : 601598 : i2 = NULL;
14812 : : }
14813 : : else
14814 : : {
14815 : 1106 : PUT_REG_NOTE_KIND (note, REG_UNUSED);
14816 : :
14817 : : /* If there isn't already a REG_UNUSED note, put one
14818 : : here. Do not place a REG_DEAD note, even if
14819 : : the register is also used here; that would not
14820 : : match the algorithm used in lifetime analysis
14821 : : and can cause the consistency check in the
14822 : : scheduler to fail. */
14823 : 1106 : if (! find_regno_note (tem_insn, REG_UNUSED,
14824 : 1106 : REGNO (XEXP (note, 0))))
14825 : 630 : place = tem_insn;
14826 : : break;
14827 : : }
14828 : : }
14829 : 602989 : else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem_insn))
14830 : 602989 : || (CALL_P (tem_insn)
14831 : 16391 : && find_reg_fusage (tem_insn, USE, XEXP (note, 0))))
14832 : : {
14833 : 11810 : place = tem_insn;
14834 : :
14835 : : /* If we are doing a 3->2 combination, and we have a
14836 : : register which formerly died in i3 and was not used
14837 : : by i2, which now no longer dies in i3 and is used in
14838 : : i2 but does not die in i2, and place is between i2
14839 : : and i3, then we may need to move a link from place to
14840 : : i2. */
14841 : 3752 : if (i2 && DF_INSN_LUID (place) > DF_INSN_LUID (i2)
14842 : 0 : && from_insn
14843 : 0 : && DF_INSN_LUID (from_insn) > DF_INSN_LUID (i2)
14844 : 11810 : && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
14845 : : {
14846 : 0 : struct insn_link *links = LOG_LINKS (place);
14847 : 0 : LOG_LINKS (place) = NULL;
14848 : 0 : distribute_links (links);
14849 : : }
14850 : : break;
14851 : : }
14852 : :
14853 : 601598 : if (tem_insn == BB_HEAD (bb))
14854 : : break;
14855 : : }
14856 : :
14857 : : }
14858 : :
14859 : : /* If the register is set or already dead at PLACE, we needn't do
14860 : : anything with this note if it is still a REG_DEAD note.
14861 : : We check here if it is set at all, not if is it totally replaced,
14862 : : which is what `dead_or_set_p' checks, so also check for it being
14863 : : set partially. */
14864 : :
14865 : 6138072 : if (place && REG_NOTE_KIND (note) == REG_DEAD)
14866 : : {
14867 : 6102053 : unsigned int regno = REGNO (XEXP (note, 0));
14868 : 6102053 : reg_stat_type *rsp = ®_stat[regno];
14869 : :
14870 : 6102053 : if (dead_or_set_p (place, XEXP (note, 0))
14871 : 6102053 : || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
14872 : : {
14873 : : /* Unless the register previously died in PLACE, clear
14874 : : last_death. [I no longer understand why this is
14875 : : being done.] */
14876 : 2840741 : if (rsp->last_death != place)
14877 : 526105 : rsp->last_death = 0;
14878 : : place = 0;
14879 : : }
14880 : : else
14881 : 3261312 : rsp->last_death = place;
14882 : :
14883 : : /* If this is a death note for a hard reg that is occupying
14884 : : multiple registers, ensure that we are still using all
14885 : : parts of the object. If we find a piece of the object
14886 : : that is unused, we must arrange for an appropriate REG_DEAD
14887 : : note to be added for it. However, we can't just emit a USE
14888 : : and tag the note to it, since the register might actually
14889 : : be dead; so we recourse, and the recursive call then finds
14890 : : the previous insn that used this register. */
14891 : :
14892 : 3261312 : if (place && REG_NREGS (XEXP (note, 0)) > 1)
14893 : : {
14894 : 769 : unsigned int endregno = END_REGNO (XEXP (note, 0));
14895 : 769 : bool all_used = true;
14896 : 769 : unsigned int i;
14897 : :
14898 : 2307 : for (i = regno; i < endregno; i++)
14899 : 1538 : if ((! refers_to_regno_p (i, PATTERN (place))
14900 : 1538 : && ! find_regno_fusage (place, USE, i))
14901 : 3076 : || dead_or_set_regno_p (place, i))
14902 : : {
14903 : : all_used = false;
14904 : : break;
14905 : : }
14906 : :
14907 : 769 : if (! all_used)
14908 : : {
14909 : : /* Put only REG_DEAD notes for pieces that are
14910 : : not already dead or set. */
14911 : :
14912 : 0 : for (i = regno; i < endregno;
14913 : 0 : i += hard_regno_nregs (i, reg_raw_mode[i]))
14914 : : {
14915 : 0 : rtx piece = regno_reg_rtx[i];
14916 : 0 : basic_block bb = this_basic_block;
14917 : :
14918 : 0 : if (! dead_or_set_p (place, piece)
14919 : 0 : && ! reg_bitfield_target_p (piece,
14920 : 0 : PATTERN (place)))
14921 : : {
14922 : 0 : rtx new_note = alloc_reg_note (REG_DEAD, piece,
14923 : : NULL_RTX);
14924 : :
14925 : 0 : distribute_notes (new_note, place, place,
14926 : : NULL, NULL_RTX, NULL_RTX,
14927 : : NULL_RTX);
14928 : : }
14929 : 0 : else if (! refers_to_regno_p (i, PATTERN (place))
14930 : 0 : && ! find_regno_fusage (place, USE, i))
14931 : 0 : for (tem_insn = PREV_INSN (place); ;
14932 : 0 : tem_insn = PREV_INSN (tem_insn))
14933 : : {
14934 : 0 : if (!NONDEBUG_INSN_P (tem_insn))
14935 : : {
14936 : 0 : if (tem_insn == BB_HEAD (bb))
14937 : : break;
14938 : 0 : continue;
14939 : : }
14940 : 0 : if (dead_or_set_p (tem_insn, piece)
14941 : 0 : || reg_bitfield_target_p (piece,
14942 : 0 : PATTERN (tem_insn)))
14943 : : {
14944 : 0 : add_reg_note (tem_insn, REG_UNUSED, piece);
14945 : 0 : break;
14946 : : }
14947 : : }
14948 : : }
14949 : :
14950 : : place = 0;
14951 : : }
14952 : : }
14953 : : }
14954 : : break;
14955 : :
14956 : 0 : default:
14957 : : /* Any other notes should not be present at this point in the
14958 : : compilation. */
14959 : 0 : gcc_unreachable ();
14960 : : }
14961 : :
14962 : 3978647 : if (place)
14963 : : {
14964 : 3955615 : XEXP (note, 1) = REG_NOTES (place);
14965 : 3955615 : REG_NOTES (place) = note;
14966 : :
14967 : : /* Set added_notes_insn to the earliest insn we added a note to. */
14968 : 3955615 : if (added_notes_insn == 0
14969 : 3955615 : || DF_INSN_LUID (added_notes_insn) > DF_INSN_LUID (place))
14970 : 2622838 : added_notes_insn = place;
14971 : : }
14972 : :
14973 : 12136696 : if (place2)
14974 : : {
14975 : 0 : add_shallow_copy_of_reg_note (place2, note);
14976 : :
14977 : : /* Set added_notes_insn to the earliest insn we added a note to. */
14978 : 0 : if (added_notes_insn == 0
14979 : 0 : || DF_INSN_LUID (added_notes_insn) > DF_INSN_LUID (place2))
14980 : 0 : added_notes_insn = place2;
14981 : : }
14982 : : }
14983 : 9283069 : }
14984 : :
14985 : : /* Similarly to above, distribute the LOG_LINKS that used to be present on
14986 : : I3, I2, and I1 to new locations. This is also called to add a link
14987 : : pointing at I3 when I3's destination is changed. */
14988 : :
14989 : : static void
14990 : 14872592 : distribute_links (struct insn_link *links)
14991 : : {
14992 : 14872592 : struct insn_link *link, *next_link;
14993 : :
14994 : 22009541 : for (link = links; link; link = next_link)
14995 : : {
14996 : 7136949 : rtx_insn *place = 0;
14997 : 7136949 : rtx_insn *insn;
14998 : 7136949 : rtx set, reg;
14999 : :
15000 : 7136949 : next_link = link->next;
15001 : :
15002 : : /* If the insn that this link points to is a NOTE, ignore it. */
15003 : 7136949 : if (NOTE_P (link->insn))
15004 : 3751515 : continue;
15005 : :
15006 : 3385434 : set = 0;
15007 : 3385434 : rtx pat = PATTERN (link->insn);
15008 : 3385434 : if (GET_CODE (pat) == SET)
15009 : : set = pat;
15010 : 577890 : else if (GET_CODE (pat) == PARALLEL)
15011 : : {
15012 : : int i;
15013 : 692081 : for (i = 0; i < XVECLEN (pat, 0); i++)
15014 : : {
15015 : 687260 : set = XVECEXP (pat, 0, i);
15016 : 687260 : if (GET_CODE (set) != SET)
15017 : 4822 : continue;
15018 : :
15019 : 682438 : reg = SET_DEST (set);
15020 : 682438 : while (GET_CODE (reg) == ZERO_EXTRACT
15021 : 690500 : || GET_CODE (reg) == STRICT_LOW_PART
15022 : 1380746 : || GET_CODE (reg) == SUBREG)
15023 : 8069 : reg = XEXP (reg, 0);
15024 : :
15025 : 682438 : if (!REG_P (reg))
15026 : 43447 : continue;
15027 : :
15028 : 638991 : if (REGNO (reg) == link->regno)
15029 : : break;
15030 : : }
15031 : 577159 : if (i == XVECLEN (pat, 0))
15032 : 4821 : continue;
15033 : : }
15034 : : else
15035 : 731 : continue;
15036 : :
15037 : 3379882 : reg = SET_DEST (set);
15038 : :
15039 : 3379882 : while (GET_CODE (reg) == ZERO_EXTRACT
15040 : 3409479 : || GET_CODE (reg) == STRICT_LOW_PART
15041 : 6818901 : || GET_CODE (reg) == SUBREG)
15042 : 30151 : reg = XEXP (reg, 0);
15043 : :
15044 : 3379882 : if (reg == pc_rtx)
15045 : 293 : continue;
15046 : :
15047 : : /* A LOG_LINK is defined as being placed on the first insn that uses
15048 : : a register and points to the insn that sets the register. Start
15049 : : searching at the next insn after the target of the link and stop
15050 : : when we reach a set of the register or the end of the basic block.
15051 : :
15052 : : Note that this correctly handles the link that used to point from
15053 : : I3 to I2. Also note that not much searching is typically done here
15054 : : since most links don't point very far away. */
15055 : :
15056 : 14529566 : for (insn = NEXT_INSN (link->insn);
15057 : 14529566 : (insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
15058 : 9719825 : || BB_HEAD (this_basic_block->next_bb) != insn));
15059 : 11149977 : insn = NEXT_INSN (insn))
15060 : 14510171 : if (DEBUG_INSN_P (insn))
15061 : 2854835 : continue;
15062 : 11655336 : else if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
15063 : : {
15064 : 3218706 : if (reg_referenced_p (reg, PATTERN (insn)))
15065 : : place = insn;
15066 : : break;
15067 : : }
15068 : 8436630 : else if (CALL_P (insn)
15069 : 8436630 : && find_reg_fusage (insn, USE, reg))
15070 : : {
15071 : : place = insn;
15072 : : break;
15073 : : }
15074 : 8295248 : else if (INSN_P (insn) && reg_set_p (reg, insn))
15075 : : break;
15076 : :
15077 : : /* If we found a place to put the link, place it there unless there
15078 : : is already a link to the same insn as LINK at that point. */
15079 : :
15080 : 160883 : if (place)
15081 : : {
15082 : 3360088 : struct insn_link *link2;
15083 : :
15084 : 4352937 : FOR_EACH_LOG_LINK (link2, place)
15085 : 1009599 : if (link2->insn == link->insn && link2->regno == link->regno)
15086 : : break;
15087 : :
15088 : 3360088 : if (link2 == NULL)
15089 : : {
15090 : 3343338 : link->next = LOG_LINKS (place);
15091 : 3343338 : LOG_LINKS (place) = link;
15092 : :
15093 : : /* Set added_links_insn to the earliest insn we added a
15094 : : link to. */
15095 : 3343338 : if (added_links_insn == 0
15096 : 3343338 : || DF_INSN_LUID (added_links_insn) > DF_INSN_LUID (place))
15097 : 2624394 : added_links_insn = place;
15098 : : }
15099 : : }
15100 : : }
15101 : 14872592 : }
15102 : :
15103 : : /* Check for any register or memory mentioned in EQUIV that is not
15104 : : mentioned in EXPR. This is used to restrict EQUIV to "specializations"
15105 : : of EXPR where some registers may have been replaced by constants. */
15106 : :
15107 : : static bool
15108 : 2460729 : unmentioned_reg_p (rtx equiv, rtx expr)
15109 : : {
15110 : 2460729 : subrtx_iterator::array_type array;
15111 : 6500841 : FOR_EACH_SUBRTX (iter, array, equiv, NONCONST)
15112 : : {
15113 : 5312730 : const_rtx x = *iter;
15114 : 3636770 : if ((REG_P (x) || MEM_P (x))
15115 : 5680077 : && !reg_mentioned_p (x, expr))
15116 : 1272618 : return true;
15117 : : }
15118 : 1188111 : return false;
15119 : 2460729 : }
15120 : :
15121 : : /* Make pseudo-to-pseudo copies after every hard-reg-to-pseudo-copy, because
15122 : : the reg-to-reg copy can usefully combine with later instructions, but we
15123 : : do not want to combine the hard reg into later instructions, for that
15124 : : restricts register allocation. */
15125 : : static void
15126 : 1008570 : make_more_copies (void)
15127 : : {
15128 : 1008570 : basic_block bb;
15129 : :
15130 : 10771905 : FOR_EACH_BB_FN (bb, cfun)
15131 : : {
15132 : 9763335 : rtx_insn *insn;
15133 : :
15134 : 123363656 : FOR_BB_INSNS (bb, insn)
15135 : : {
15136 : 113600321 : if (!NONDEBUG_INSN_P (insn))
15137 : 57589506 : continue;
15138 : :
15139 : 56010815 : rtx set = single_set (insn);
15140 : 56010815 : if (!set)
15141 : 3867783 : continue;
15142 : :
15143 : 52143032 : rtx dest = SET_DEST (set);
15144 : 52143032 : if (!(REG_P (dest) && !HARD_REGISTER_P (dest)))
15145 : 29720980 : continue;
15146 : :
15147 : 22422052 : rtx src = SET_SRC (set);
15148 : 22422052 : if (!(REG_P (src) && HARD_REGISTER_P (src)))
15149 : 19492232 : continue;
15150 : 2929820 : if (TEST_HARD_REG_BIT (fixed_reg_set, REGNO (src)))
15151 : 9120 : continue;
15152 : :
15153 : 2920700 : rtx new_reg = gen_reg_rtx (GET_MODE (dest));
15154 : :
15155 : : /* The "original" pseudo copies have important attributes
15156 : : attached, like pointerness. We want that for these copies
15157 : : too, for use by insn recognition and later passes. */
15158 : 2920700 : set_reg_attrs_from_value (new_reg, dest);
15159 : :
15160 : 2920700 : rtx_insn *new_insn = gen_move_insn (new_reg, src);
15161 : 2920700 : SET_SRC (set) = new_reg;
15162 : 2920700 : emit_insn_before (new_insn, insn);
15163 : 2920700 : df_insn_rescan (insn);
15164 : : }
15165 : : }
15166 : 1008570 : }
15167 : :
15168 : : /* Try combining insns through substitution. */
15169 : : static void
15170 : 1008570 : rest_of_handle_combine (void)
15171 : : {
15172 : 1008570 : make_more_copies ();
15173 : :
15174 : 1008570 : df_set_flags (DF_LR_RUN_DCE + DF_DEFER_INSN_RESCAN);
15175 : 1008570 : df_note_add_problem ();
15176 : 1008570 : df_analyze ();
15177 : :
15178 : 1008570 : regstat_init_n_sets_and_refs ();
15179 : 1008570 : reg_n_sets_max = max_reg_num ();
15180 : :
15181 : 1008570 : bool rebuild_jump_labels_after_combine
15182 : 1008570 : = combine_instructions (get_insns (), max_reg_num ());
15183 : :
15184 : : /* Combining insns may have turned an indirect jump into a
15185 : : direct jump. Rebuild the JUMP_LABEL fields of jumping
15186 : : instructions. */
15187 : 1008570 : if (rebuild_jump_labels_after_combine)
15188 : : {
15189 : 2355 : if (dom_info_available_p (CDI_DOMINATORS))
15190 : 0 : free_dominance_info (CDI_DOMINATORS);
15191 : 2355 : timevar_push (TV_JUMP);
15192 : 2355 : rebuild_jump_labels (get_insns ());
15193 : 2355 : cleanup_cfg (0);
15194 : 2355 : timevar_pop (TV_JUMP);
15195 : : }
15196 : :
15197 : 1008570 : regstat_free_n_sets_and_refs ();
15198 : 1008570 : }
15199 : :
15200 : : namespace {
15201 : :
15202 : : const pass_data pass_data_combine =
15203 : : {
15204 : : RTL_PASS, /* type */
15205 : : "combine", /* name */
15206 : : OPTGROUP_NONE, /* optinfo_flags */
15207 : : TV_COMBINE, /* tv_id */
15208 : : PROP_cfglayout, /* properties_required */
15209 : : 0, /* properties_provided */
15210 : : 0, /* properties_destroyed */
15211 : : 0, /* todo_flags_start */
15212 : : TODO_df_finish, /* todo_flags_finish */
15213 : : };
15214 : :
15215 : : class pass_combine : public rtl_opt_pass
15216 : : {
15217 : : public:
15218 : 283157 : pass_combine (gcc::context *ctxt)
15219 : 566314 : : rtl_opt_pass (pass_data_combine, ctxt)
15220 : : {}
15221 : :
15222 : : /* opt_pass methods: */
15223 : 1435849 : bool gate (function *) final override { return (optimize > 0); }
15224 : 1008570 : unsigned int execute (function *) final override
15225 : : {
15226 : 1008570 : rest_of_handle_combine ();
15227 : 1008570 : return 0;
15228 : : }
15229 : :
15230 : : }; // class pass_combine
15231 : :
15232 : : } // anon namespace
15233 : :
15234 : : rtl_opt_pass *
15235 : 283157 : make_pass_combine (gcc::context *ctxt)
15236 : : {
15237 : 283157 : return new pass_combine (ctxt);
15238 : : }
|