Line data Source code
1 : /* Expands front end tree to back end RTL for GCC.
2 : Copyright (C) 1987-2026 Free Software Foundation, Inc.
3 :
4 : This file is part of GCC.
5 :
6 : GCC is free software; you can redistribute it and/or modify it under
7 : the terms of the GNU General Public License as published by the Free
8 : Software Foundation; either version 3, or (at your option) any later
9 : version.
10 :
11 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : for more details.
15 :
16 : You should have received a copy of the GNU General Public License
17 : along with GCC; see the file COPYING3. If not see
18 : <http://www.gnu.org/licenses/>. */
19 :
20 : /* This file handles the generation of rtl code from tree structure
21 : at the level of the function as a whole.
22 : It creates the rtl expressions for parameters and auto variables
23 : and has full responsibility for allocating stack slots.
24 :
25 : `expand_function_start' is called at the beginning of a function,
26 : before the function body is parsed, and `expand_function_end' is
27 : called after parsing the body.
28 :
29 : Call `assign_stack_local' to allocate a stack slot for a local variable.
30 : This is usually done during the RTL generation for the function body,
31 : but it can also be done in the reload pass when a pseudo-register does
32 : not get a hard register. */
33 :
34 : #include "config.h"
35 : #include "system.h"
36 : #include "coretypes.h"
37 : #include "backend.h"
38 : #include "target.h"
39 : #include "rtl.h"
40 : #include "tree.h"
41 : #include "gimple-expr.h"
42 : #include "cfghooks.h"
43 : #include "df.h"
44 : #include "memmodel.h"
45 : #include "tm_p.h"
46 : #include "stringpool.h"
47 : #include "expmed.h"
48 : #include "optabs.h"
49 : #include "opts.h"
50 : #include "regs.h"
51 : #include "emit-rtl.h"
52 : #include "recog.h"
53 : #include "rtl-error.h"
54 : #include "hard-reg-set.h"
55 : #include "alias.h"
56 : #include "fold-const.h"
57 : #include "stor-layout.h"
58 : #include "varasm.h"
59 : #include "except.h"
60 : #include "dojump.h"
61 : #include "explow.h"
62 : #include "calls.h"
63 : #include "expr.h"
64 : #include "optabs-tree.h"
65 : #include "output.h"
66 : #include "langhooks.h"
67 : #include "common/common-target.h"
68 : #include "gimplify.h"
69 : #include "tree-pass.h"
70 : #include "cfgrtl.h"
71 : #include "cfganal.h"
72 : #include "cfgbuild.h"
73 : #include "cfgcleanup.h"
74 : #include "cfgexpand.h"
75 : #include "shrink-wrap.h"
76 : #include "toplev.h"
77 : #include "rtl-iter.h"
78 : #include "tree-dfa.h"
79 : #include "tree-ssa.h"
80 : #include "stringpool.h"
81 : #include "attribs.h"
82 : #include "gimple.h"
83 : #include "options.h"
84 : #include "function-abi.h"
85 : #include "value-range.h"
86 : #include "gimple-range.h"
87 : #include "insn-attr.h"
88 : #include "hierarchical_discriminator.h"
89 :
90 : /* So we can assign to cfun in this file. */
91 : #undef cfun
92 :
93 : #ifndef STACK_ALIGNMENT_NEEDED
94 : #define STACK_ALIGNMENT_NEEDED 1
95 : #endif
96 :
97 : #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
98 :
99 : /* Round a value to the lowest integer less than it that is a multiple of
100 : the required alignment. Avoid using division in case the value is
101 : negative. Assume the alignment is a power of two. */
102 : #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
103 :
104 : /* Similar, but round to the next highest integer that meets the
105 : alignment. */
106 : #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
107 :
108 : /* Nonzero once virtual register instantiation has been done.
109 : assign_stack_local uses frame_pointer_rtx when this is nonzero.
110 : calls.cc:emit_library_call_value_1 uses it to set up
111 : post-instantiation libcalls. */
112 : int virtuals_instantiated;
113 :
114 : /* Assign unique numbers to labels generated for profiling, debugging, etc. */
115 : static GTY(()) int funcdef_no;
116 :
117 : /* These variables hold pointers to functions to create and destroy
118 : target specific, per-function data structures. */
119 : struct machine_function * (*init_machine_status) (void);
120 :
121 : /* The currently compiled function. */
122 : struct function *cfun = 0;
123 :
124 : /* These hashes record the prologue and epilogue insns. */
125 :
126 : struct insn_cache_hasher : ggc_cache_ptr_hash<rtx_def>
127 : {
128 1151091860 : static hashval_t hash (rtx x) { return htab_hash_pointer (x); }
129 : static bool equal (rtx a, rtx b) { return a == b; }
130 : };
131 :
132 : static GTY((cache))
133 : hash_table<insn_cache_hasher> *prologue_insn_hash;
134 : static GTY((cache))
135 : hash_table<insn_cache_hasher> *epilogue_insn_hash;
136 :
137 :
138 : hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
139 : vec<tree, va_gc> *types_used_by_cur_var_decl;
140 :
141 : /* Forward declarations. */
142 :
143 : static class temp_slot *find_temp_slot_from_address (rtx);
144 : static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
145 : static void pad_below (struct args_size *, machine_mode, tree);
146 : static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
147 : static int all_blocks (tree, tree *);
148 : static tree *get_block_vector (tree, int *);
149 : extern tree debug_find_var_in_block_tree (tree, tree);
150 : /* We always define `record_insns' even if it's not used so that we
151 : can always export `prologue_epilogue_contains'. */
152 : static void record_insns (rtx_insn *, rtx, hash_table<insn_cache_hasher> **)
153 : ATTRIBUTE_UNUSED;
154 : static bool contains (const rtx_insn *, hash_table<insn_cache_hasher> *);
155 : static void prepare_function_start (void);
156 : static void do_clobber_return_reg (rtx, void *);
157 : static void do_use_return_reg (rtx, void *);
158 :
159 :
160 : /* Stack of nested functions. */
161 : /* Keep track of the cfun stack. */
162 :
163 : static vec<function *> function_context_stack;
164 :
165 : /* Save the current context for compilation of a nested function.
166 : This is called from language-specific code. */
167 :
168 : void
169 90181984 : push_function_context (void)
170 : {
171 90181984 : if (cfun == 0)
172 20 : allocate_struct_function (NULL, false);
173 :
174 90181984 : function_context_stack.safe_push (cfun);
175 90181984 : set_cfun (NULL);
176 90181984 : }
177 :
178 : /* Restore the last saved context, at the end of a nested function.
179 : This function is called from language-specific code. */
180 :
181 : void
182 90181963 : pop_function_context (void)
183 : {
184 90181963 : struct function *p = function_context_stack.pop ();
185 90181963 : set_cfun (p);
186 90181963 : current_function_decl = p->decl;
187 :
188 : /* Reset variables that have known state during rtx generation. */
189 90181963 : virtuals_instantiated = 0;
190 90181963 : generating_concat_p = 1;
191 90181963 : }
192 :
193 : /* Clear out all parts of the state in F that can safely be discarded
194 : after the function has been parsed, but not compiled, to let
195 : garbage collection reclaim the memory. */
196 :
197 : void
198 1696665 : free_after_parsing (struct function *f)
199 : {
200 1696665 : f->language = 0;
201 1696665 : }
202 :
203 : /* Clear out all parts of the state in F that can safely be discarded
204 : after the function has been compiled, to let garbage collection
205 : reclaim the memory. */
206 :
207 : void
208 1702107 : free_after_compilation (struct function *f)
209 : {
210 1702107 : prologue_insn_hash = NULL;
211 1702107 : epilogue_insn_hash = NULL;
212 :
213 1702107 : free (crtl->emit.regno_pointer_align);
214 :
215 1702107 : memset (crtl, 0, sizeof (struct rtl_data));
216 1702107 : f->eh = NULL;
217 1702107 : f->machine = NULL;
218 1702107 : f->cfg = NULL;
219 1702107 : f->curr_properties &= ~PROP_cfg;
220 1702246 : delete f->cond_uids;
221 1702107 : free_copyid_allocator (f);
222 :
223 1702107 : regno_reg_rtx = NULL;
224 1702107 : }
225 :
226 : /* Return size needed for stack frame based on slots so far allocated.
227 : This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
228 : the caller may have to do that. */
229 :
230 : poly_int64
231 149089512 : get_frame_size (void)
232 : {
233 149089512 : if (FRAME_GROWS_DOWNWARD)
234 149089512 : return -frame_offset;
235 : else
236 : return frame_offset;
237 : }
238 :
239 : /* Issue an error message and return TRUE if frame OFFSET overflows in
240 : the signed target pointer arithmetics for function FUNC. Otherwise
241 : return FALSE. */
242 :
243 : bool
244 3951191 : frame_offset_overflow (poly_int64 offset, tree func)
245 : {
246 3951191 : poly_uint64 size = FRAME_GROWS_DOWNWARD ? -offset : offset;
247 3951191 : unsigned HOST_WIDE_INT limit
248 3951191 : = ((HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (Pmode) - 1))
249 : /* Leave room for the fixed part of the frame. */
250 3951191 : - 64 * UNITS_PER_WORD);
251 :
252 7902382 : if (!coeffs_in_range_p (size, 0U, limit))
253 : {
254 0 : unsigned HOST_WIDE_INT hwisize;
255 0 : if (size.is_constant (&hwisize))
256 0 : error_at (DECL_SOURCE_LOCATION (func),
257 : "total size of local objects %wu exceeds maximum %wu",
258 : hwisize, limit);
259 : else
260 : error_at (DECL_SOURCE_LOCATION (func),
261 : "total size of local objects exceeds maximum %wu",
262 : limit);
263 0 : return true;
264 : }
265 :
266 : return false;
267 : }
268 :
269 : /* Return the minimum spill slot alignment for a register of mode MODE. */
270 :
271 : unsigned int
272 1415746 : spill_slot_alignment (machine_mode mode ATTRIBUTE_UNUSED)
273 : {
274 1415746 : return STACK_SLOT_ALIGNMENT (NULL_TREE, mode, GET_MODE_ALIGNMENT (mode));
275 : }
276 :
277 : /* Return stack slot alignment in bits for TYPE and MODE. */
278 :
279 : static unsigned int
280 154718 : get_stack_local_alignment (tree type, machine_mode mode)
281 : {
282 154718 : unsigned int alignment;
283 :
284 154718 : if (mode == BLKmode)
285 22179 : alignment = BIGGEST_ALIGNMENT;
286 : else
287 132539 : alignment = GET_MODE_ALIGNMENT (mode);
288 :
289 : /* Allow the frond-end to (possibly) increase the alignment of this
290 : stack slot. */
291 154718 : if (! type)
292 56656 : type = lang_hooks.types.type_for_mode (mode, 0);
293 :
294 154718 : return STACK_SLOT_ALIGNMENT (type, mode, alignment);
295 : }
296 :
297 : /* Determine whether it is possible to fit a stack slot of size SIZE and
298 : alignment ALIGNMENT into an area in the stack frame that starts at
299 : frame offset START and has a length of LENGTH. If so, store the frame
300 : offset to be used for the stack slot in *POFFSET and return true;
301 : return false otherwise. This function will extend the frame size when
302 : given a start/length pair that lies at the end of the frame. */
303 :
304 : static bool
305 2359872 : try_fit_stack_local (poly_int64 start, poly_int64 length,
306 : poly_int64 size, unsigned int alignment,
307 : poly_int64 *poffset)
308 : {
309 2359872 : poly_int64 this_frame_offset;
310 2359872 : int frame_off, frame_alignment, frame_phase;
311 :
312 : /* Calculate how many bytes the start of local variables is off from
313 : stack alignment. */
314 2359872 : frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
315 2359872 : frame_off = targetm.starting_frame_offset () % frame_alignment;
316 2359872 : frame_phase = frame_off ? frame_alignment - frame_off : 0;
317 :
318 : /* Round the frame offset to the specified alignment. */
319 :
320 2359872 : if (FRAME_GROWS_DOWNWARD)
321 2359872 : this_frame_offset
322 2359872 : = (aligned_lower_bound (start + length - size - frame_phase, alignment)
323 2359872 : + frame_phase);
324 : else
325 : this_frame_offset
326 : = aligned_upper_bound (start - frame_phase, alignment) + frame_phase;
327 :
328 : /* See if it fits. If this space is at the edge of the frame,
329 : consider extending the frame to make it fit. Our caller relies on
330 : this when allocating a new slot. */
331 2359872 : if (maybe_lt (this_frame_offset, start))
332 : {
333 486331 : if (known_eq (frame_offset, start))
334 362643 : frame_offset = this_frame_offset;
335 : else
336 : return false;
337 : }
338 1873541 : else if (maybe_gt (this_frame_offset + size, start + length))
339 : {
340 0 : if (known_eq (frame_offset, start + length))
341 0 : frame_offset = this_frame_offset + size;
342 : else
343 : return false;
344 : }
345 :
346 2236184 : *poffset = this_frame_offset;
347 2236184 : return true;
348 : }
349 :
350 : /* Create a new frame_space structure describing free space in the stack
351 : frame beginning at START and ending at END, and chain it into the
352 : function's frame_space_list. */
353 :
354 : static void
355 390435 : add_frame_space (poly_int64 start, poly_int64 end)
356 : {
357 390435 : class frame_space *space = ggc_alloc<frame_space> ();
358 390435 : space->next = crtl->frame_space_list;
359 390435 : crtl->frame_space_list = space;
360 390435 : space->start = start;
361 390435 : space->length = end - start;
362 390435 : }
363 :
364 : /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
365 : with machine mode MODE.
366 :
367 : ALIGN controls the amount of alignment for the address of the slot:
368 : 0 means according to MODE,
369 : -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
370 : -2 means use BITS_PER_UNIT,
371 : positive specifies alignment boundary in bits.
372 :
373 : KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
374 : alignment and ASLK_RECORD_PAD bit set if we should remember
375 : extra space we allocated for alignment purposes. When we are
376 : called from assign_stack_temp_for_type, it is not set so we don't
377 : track the same stack slot in two independent lists.
378 :
379 : We do not round to stack_boundary here. */
380 :
381 : rtx
382 2236184 : assign_stack_local_1 (machine_mode mode, poly_int64 size,
383 : int align, int kind)
384 : {
385 2236184 : rtx x, addr;
386 2236184 : poly_int64 bigend_correction = 0;
387 2236184 : poly_int64 slot_offset = 0, old_frame_offset;
388 2236184 : unsigned int alignment, alignment_in_bits;
389 :
390 2236184 : if (align == 0)
391 : {
392 7018 : alignment = get_stack_local_alignment (NULL, mode);
393 7018 : alignment /= BITS_PER_UNIT;
394 : }
395 2229166 : else if (align == -1)
396 : {
397 947 : alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
398 947 : size = aligned_upper_bound (size, alignment);
399 : }
400 2228219 : else if (align == -2)
401 : alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
402 : else
403 2228219 : alignment = align / BITS_PER_UNIT;
404 :
405 2236184 : alignment_in_bits = alignment * BITS_PER_UNIT;
406 :
407 : /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
408 2236184 : if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
409 : {
410 0 : alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
411 0 : alignment = MAX_SUPPORTED_STACK_ALIGNMENT / BITS_PER_UNIT;
412 : }
413 :
414 2236184 : if (SUPPORTS_STACK_ALIGNMENT)
415 : {
416 2236184 : if (crtl->stack_alignment_estimated < alignment_in_bits)
417 : {
418 4379 : if (!crtl->stack_realign_processed)
419 4356 : crtl->stack_alignment_estimated = alignment_in_bits;
420 : else
421 : {
422 : /* If stack is realigned and stack alignment value
423 : hasn't been finalized, it is OK not to increase
424 : stack_alignment_estimated. The bigger alignment
425 : requirement is recorded in stack_alignment_needed
426 : below. */
427 23 : gcc_assert (!crtl->stack_realign_finalized);
428 23 : if (!crtl->stack_realign_needed)
429 : {
430 : /* It is OK to reduce the alignment as long as the
431 : requested size is 0 or the estimated stack
432 : alignment >= mode alignment. */
433 23 : gcc_assert ((kind & ASLK_REDUCE_ALIGN)
434 : || known_eq (size, 0)
435 : || (crtl->stack_alignment_estimated
436 : >= GET_MODE_ALIGNMENT (mode)));
437 23 : alignment_in_bits = crtl->stack_alignment_estimated;
438 23 : alignment = alignment_in_bits / BITS_PER_UNIT;
439 : }
440 : }
441 : }
442 : }
443 :
444 2236184 : if (crtl->stack_alignment_needed < alignment_in_bits)
445 18361 : crtl->stack_alignment_needed = alignment_in_bits;
446 2236184 : if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
447 291283 : crtl->max_used_stack_slot_alignment = alignment_in_bits;
448 :
449 2236184 : if (mode != BLKmode || maybe_ne (size, 0))
450 : {
451 1443795 : if (kind & ASLK_RECORD_PAD)
452 : {
453 : class frame_space **psp;
454 :
455 1483543 : for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
456 : {
457 185470 : class frame_space *space = *psp;
458 185470 : if (!try_fit_stack_local (space->start, space->length, size,
459 : alignment, &slot_offset))
460 123688 : continue;
461 61782 : *psp = space->next;
462 61782 : if (known_gt (slot_offset, space->start))
463 25791 : add_frame_space (space->start, slot_offset);
464 61782 : if (known_lt (slot_offset + size, space->start + space->length))
465 10780 : add_frame_space (slot_offset + size,
466 10780 : space->start + space->length);
467 61782 : goto found_space;
468 : }
469 : }
470 : }
471 : else if (!STACK_ALIGNMENT_NEEDED)
472 : {
473 : slot_offset = frame_offset;
474 : goto found_space;
475 : }
476 :
477 2174402 : old_frame_offset = frame_offset;
478 :
479 2174402 : if (FRAME_GROWS_DOWNWARD)
480 : {
481 2174402 : frame_offset -= size;
482 2174402 : try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
483 :
484 2174402 : if (kind & ASLK_RECORD_PAD)
485 : {
486 2090462 : if (known_gt (slot_offset, frame_offset))
487 0 : add_frame_space (frame_offset, slot_offset);
488 2090462 : if (known_lt (slot_offset + size, old_frame_offset))
489 353864 : add_frame_space (slot_offset + size, old_frame_offset);
490 : }
491 : }
492 : else
493 : {
494 : frame_offset += size;
495 : try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
496 :
497 : if (kind & ASLK_RECORD_PAD)
498 : {
499 : if (known_gt (slot_offset, old_frame_offset))
500 : add_frame_space (old_frame_offset, slot_offset);
501 : if (known_lt (slot_offset + size, frame_offset))
502 : add_frame_space (slot_offset + size, frame_offset);
503 : }
504 : }
505 :
506 2236184 : found_space:
507 : /* On a big-endian machine, if we are allocating more space than we will use,
508 : use the least significant bytes of those that are allocated. */
509 2236184 : if (mode != BLKmode)
510 : {
511 : /* The slot size can sometimes be smaller than the mode size;
512 : e.g. the rs6000 port allocates slots with a vector mode
513 : that have the size of only one element. However, the slot
514 : size must always be ordered wrt to the mode size, in the
515 : same way as for a subreg. */
516 673779 : gcc_checking_assert (ordered_p (GET_MODE_SIZE (mode), size));
517 : if (BYTES_BIG_ENDIAN && maybe_lt (GET_MODE_SIZE (mode), size))
518 : bigend_correction = size - GET_MODE_SIZE (mode);
519 : }
520 :
521 : /* If we have already instantiated virtual registers, return the actual
522 : address relative to the frame pointer. */
523 2236184 : if (virtuals_instantiated)
524 1838811 : addr = plus_constant (Pmode, frame_pointer_rtx,
525 : trunc_int_for_mode
526 1548678 : (slot_offset + bigend_correction
527 1838811 : + targetm.starting_frame_offset (), Pmode));
528 : else
529 720084 : addr = plus_constant (Pmode, virtual_stack_vars_rtx,
530 : trunc_int_for_mode
531 : (slot_offset + bigend_correction,
532 687506 : Pmode));
533 :
534 2236184 : x = gen_rtx_MEM (mode, addr);
535 2236184 : set_mem_align (x, alignment_in_bits);
536 2236184 : MEM_NOTRAP_P (x) = 1;
537 :
538 2236184 : vec_safe_push (stack_slot_list, x);
539 :
540 2236184 : if (frame_offset_overflow (frame_offset, current_function_decl))
541 0 : frame_offset = 0;
542 :
543 2236184 : return x;
544 : }
545 :
546 : /* Wrap up assign_stack_local_1 with last parameter as false. */
547 :
548 : rtx
549 2152244 : assign_stack_local (machine_mode mode, poly_int64 size, int align)
550 : {
551 2152244 : return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
552 : }
553 :
554 : /* In order to evaluate some expressions, such as function calls returning
555 : structures in memory, we need to temporarily allocate stack locations.
556 : We record each allocated temporary in the following structure.
557 :
558 : Associated with each temporary slot is a nesting level. When we pop up
559 : one level, all temporaries associated with the previous level are freed.
560 : Normally, all temporaries are freed after the execution of the statement
561 : in which they were created. However, if we are inside a ({...}) grouping,
562 : the result may be in a temporary and hence must be preserved. If the
563 : result could be in a temporary, we preserve it if we can determine which
564 : one it is in. If we cannot determine which temporary may contain the
565 : result, all temporaries are preserved. A temporary is preserved by
566 : pretending it was allocated at the previous nesting level. */
567 :
568 : class GTY(()) temp_slot {
569 : public:
570 : /* Points to next temporary slot. */
571 : class temp_slot *next;
572 : /* Points to previous temporary slot. */
573 : class temp_slot *prev;
574 : /* The rtx to used to reference the slot. */
575 : rtx slot;
576 : /* The size, in units, of the slot. */
577 : poly_int64 size;
578 : /* The type of the object in the slot, or zero if it doesn't correspond
579 : to a type. We use this to determine whether a slot can be reused.
580 : It can be reused if objects of the type of the new slot will always
581 : conflict with objects of the type of the old slot. */
582 : tree type;
583 : /* The alignment (in bits) of the slot. */
584 : unsigned int align;
585 : /* True if this temporary is currently in use. */
586 : bool in_use;
587 : /* Nesting level at which this slot is being used. */
588 : int level;
589 : /* The offset of the slot from the frame_pointer, including extra space
590 : for alignment. This info is for combine_temp_slots. */
591 : poly_int64 base_offset;
592 : /* The size of the slot, including extra space for alignment. This
593 : info is for combine_temp_slots. */
594 : poly_int64 full_size;
595 : };
596 :
597 : /* Entry for the below hash table. */
598 : struct GTY((for_user)) temp_slot_address_entry {
599 : hashval_t hash;
600 : rtx address;
601 : class temp_slot *temp_slot;
602 : };
603 :
604 : struct temp_address_hasher : ggc_ptr_hash<temp_slot_address_entry>
605 : {
606 : static hashval_t hash (temp_slot_address_entry *);
607 : static bool equal (temp_slot_address_entry *, temp_slot_address_entry *);
608 : };
609 :
610 : /* A table of addresses that represent a stack slot. The table is a mapping
611 : from address RTXen to a temp slot. */
612 : static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table;
613 : static size_t n_temp_slots_in_use;
614 :
615 : /* Removes temporary slot TEMP from LIST. */
616 :
617 : static void
618 212845 : cut_slot_from_list (class temp_slot *temp, class temp_slot **list)
619 : {
620 0 : if (temp->next)
621 27996 : temp->next->prev = temp->prev;
622 212845 : if (temp->prev)
623 7959 : temp->prev->next = temp->next;
624 : else
625 204886 : *list = temp->next;
626 :
627 212845 : temp->prev = temp->next = NULL;
628 697 : }
629 :
630 : /* Inserts temporary slot TEMP to LIST. */
631 :
632 : static void
633 296709 : insert_slot_to_list (class temp_slot *temp, class temp_slot **list)
634 : {
635 296709 : temp->next = *list;
636 0 : if (*list)
637 80560 : (*list)->prev = temp;
638 296709 : temp->prev = NULL;
639 296709 : *list = temp;
640 0 : }
641 :
642 : /* Returns the list of used temp slots at LEVEL. */
643 :
644 : static class temp_slot **
645 65935108 : temp_slots_at_level (int level)
646 : {
647 130455042 : if (level >= (int) vec_safe_length (used_temp_slots))
648 1896681 : vec_safe_grow_cleared (used_temp_slots, level + 1, true);
649 :
650 65935108 : return &(*used_temp_slots)[level];
651 : }
652 :
653 : /* Returns the maximal temporary slot level. */
654 :
655 : static int
656 1327277 : max_slot_level (void)
657 : {
658 0 : if (!used_temp_slots)
659 : return -1;
660 :
661 1272250 : return used_temp_slots->length () - 1;
662 : }
663 :
664 : /* Moves temporary slot TEMP to LEVEL. */
665 :
666 : static void
667 1232 : move_slot_to_level (class temp_slot *temp, int level)
668 : {
669 1232 : cut_slot_from_list (temp, temp_slots_at_level (temp->level));
670 1232 : insert_slot_to_list (temp, temp_slots_at_level (level));
671 1232 : temp->level = level;
672 1232 : }
673 :
674 : /* Make temporary slot TEMP available. */
675 :
676 : static void
677 147156 : make_slot_available (class temp_slot *temp)
678 : {
679 147156 : cut_slot_from_list (temp, temp_slots_at_level (temp->level));
680 147156 : insert_slot_to_list (temp, &avail_temp_slots);
681 147156 : temp->in_use = false;
682 147156 : temp->level = -1;
683 147156 : n_temp_slots_in_use--;
684 147156 : }
685 :
686 : /* Compute the hash value for an address -> temp slot mapping.
687 : The value is cached on the mapping entry. */
688 : static hashval_t
689 9020849 : temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
690 : {
691 9020849 : int do_not_record = 0;
692 9020849 : return hash_rtx (t->address, GET_MODE (t->address),
693 9020849 : &do_not_record, NULL, false);
694 : }
695 :
696 : /* Return the hash value for an address -> temp slot mapping. */
697 : hashval_t
698 30610 : temp_address_hasher::hash (temp_slot_address_entry *t)
699 : {
700 30610 : return t->hash;
701 : }
702 :
703 : /* Compare two address -> temp slot mapping entries. */
704 : bool
705 31392 : temp_address_hasher::equal (temp_slot_address_entry *t1,
706 : temp_slot_address_entry *t2)
707 : {
708 31392 : return exp_equiv_p (t1->address, t2->address, 0, true);
709 : }
710 :
711 : /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
712 : static void
713 147738 : insert_temp_slot_address (rtx address, class temp_slot *temp_slot)
714 : {
715 147738 : struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
716 147738 : t->address = copy_rtx (address);
717 147738 : t->temp_slot = temp_slot;
718 147738 : t->hash = temp_slot_address_compute_hash (t);
719 147738 : *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t;
720 147738 : }
721 :
722 : /* Remove an address -> temp slot mapping entry if the temp slot is
723 : not in use anymore. Callback for remove_unused_temp_slot_addresses. */
724 : int
725 1055 : remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
726 : {
727 1055 : const struct temp_slot_address_entry *t = *slot;
728 1055 : if (! t->temp_slot->in_use)
729 604 : temp_slot_address_table->clear_slot (slot);
730 1055 : return 1;
731 : }
732 :
733 : /* Remove all mappings of addresses to unused temp slots. */
734 : static void
735 139735 : remove_unused_temp_slot_addresses (void)
736 : {
737 : /* Use quicker clearing if there aren't any active temp slots. */
738 139735 : if (n_temp_slots_in_use)
739 434 : temp_slot_address_table->traverse
740 1489 : <void *, remove_unused_temp_slot_addresses_1> (NULL);
741 : else
742 139301 : temp_slot_address_table->empty ();
743 139735 : }
744 :
745 : /* Find the temp slot corresponding to the object at address X. */
746 :
747 : static class temp_slot *
748 8873111 : find_temp_slot_from_address (rtx x)
749 : {
750 8873111 : class temp_slot *p;
751 8873111 : struct temp_slot_address_entry tmp, *t;
752 :
753 : /* First try the easy way:
754 : See if X exists in the address -> temp slot mapping. */
755 8873111 : tmp.address = x;
756 8873111 : tmp.temp_slot = NULL;
757 8873111 : tmp.hash = temp_slot_address_compute_hash (&tmp);
758 8873111 : t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash);
759 8873111 : if (t)
760 1264 : return t->temp_slot;
761 :
762 : /* If we have a sum involving a register, see if it points to a temp
763 : slot. */
764 1566657 : if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
765 10085342 : && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
766 : return p;
767 1566657 : else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
768 9096962 : && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
769 : return p;
770 :
771 : /* Last resort: Address is a virtual stack var address. */
772 8871847 : poly_int64 offset;
773 8871847 : if (strip_offset (x, &offset) == virtual_stack_vars_rtx)
774 : {
775 1327277 : int i;
776 5791309 : for (i = max_slot_level (); i >= 0; i--)
777 3193310 : for (p = *temp_slots_at_level (i); p; p = p->next)
778 3056 : if (known_in_range_p (offset, p->base_offset, p->full_size))
779 : return p;
780 : }
781 :
782 : return NULL;
783 : }
784 :
785 : /* Allocate a temporary stack slot and record it for possible later
786 : reuse.
787 :
788 : MODE is the machine mode to be given to the returned rtx.
789 :
790 : SIZE is the size in units of the space required. We do no rounding here
791 : since assign_stack_local will do any required rounding.
792 :
793 : TYPE is the type that will be used for the stack slot. */
794 :
795 : rtx
796 147700 : assign_stack_temp_for_type (machine_mode mode, poly_int64 size, tree type)
797 : {
798 147700 : unsigned int align;
799 147700 : class temp_slot *p, *best_p = 0, *selected = NULL, **pp;
800 147700 : rtx slot;
801 :
802 147700 : gcc_assert (known_size_p (size));
803 :
804 147700 : align = get_stack_local_alignment (type, mode);
805 :
806 : /* Try to find an available, already-allocated temporary of the proper
807 : mode which meets the size and alignment requirements. Choose the
808 : smallest one with the closest alignment.
809 :
810 : If assign_stack_temp is called outside of the tree->rtl expansion,
811 : we cannot reuse the stack slots (that may still refer to
812 : VIRTUAL_STACK_VARS_REGNUM). */
813 147700 : if (!virtuals_instantiated)
814 : {
815 11106335 : for (p = avail_temp_slots; p; p = p->next)
816 : {
817 11009270 : if (p->align >= align
818 10449508 : && known_ge (p->size, size)
819 10432573 : && GET_MODE (p->slot) == mode
820 10295494 : && objects_must_conflict_p (p->type, type)
821 11073258 : && (best_p == 0
822 228 : || (known_eq (best_p->size, p->size)
823 126 : ? best_p->align > p->align
824 102 : : known_ge (best_p->size, p->size))))
825 : {
826 63842 : if (p->align == align && known_eq (p->size, size))
827 : {
828 50635 : selected = p;
829 50635 : cut_slot_from_list (selected, &avail_temp_slots);
830 50635 : best_p = 0;
831 50635 : break;
832 : }
833 : best_p = p;
834 : }
835 : }
836 : }
837 :
838 : /* Make our best, if any, the one to use. */
839 147700 : if (best_p)
840 : {
841 13125 : selected = best_p;
842 13125 : cut_slot_from_list (selected, &avail_temp_slots);
843 :
844 : /* If there are enough aligned bytes left over, make them into a new
845 : temp_slot so that the extra bytes don't get wasted. Do this only
846 : for BLKmode slots, so that we can be sure of the alignment. */
847 13125 : if (GET_MODE (best_p->slot) == BLKmode)
848 : {
849 10804 : int alignment = best_p->align / BITS_PER_UNIT;
850 10804 : poly_int64 rounded_size = aligned_upper_bound (size, alignment);
851 :
852 10804 : if (known_ge (best_p->size - rounded_size, alignment))
853 : {
854 621 : p = ggc_alloc<temp_slot> ();
855 621 : p->in_use = false;
856 621 : p->size = best_p->size - rounded_size;
857 621 : p->base_offset = best_p->base_offset + rounded_size;
858 621 : p->full_size = best_p->full_size - rounded_size;
859 621 : p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
860 621 : p->align = best_p->align;
861 621 : p->type = best_p->type;
862 621 : insert_slot_to_list (p, &avail_temp_slots);
863 :
864 621 : vec_safe_push (stack_slot_list, p->slot);
865 :
866 621 : best_p->size = rounded_size;
867 621 : best_p->full_size = rounded_size;
868 : }
869 : }
870 : }
871 :
872 : /* If we still didn't find one, make a new temporary. */
873 145379 : if (selected == 0)
874 : {
875 83940 : poly_int64 frame_offset_old = frame_offset;
876 :
877 83940 : p = ggc_alloc<temp_slot> ();
878 :
879 : /* We are passing an explicit alignment request to assign_stack_local.
880 : One side effect of that is assign_stack_local will not round SIZE
881 : to ensure the frame offset remains suitably aligned.
882 :
883 : So for requests which depended on the rounding of SIZE, we go ahead
884 : and round it now. We also make sure ALIGNMENT is at least
885 : BIGGEST_ALIGNMENT. */
886 93785 : gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
887 83940 : p->slot = assign_stack_local_1 (mode,
888 : (mode == BLKmode
889 9938 : ? aligned_upper_bound (size,
890 : (int) align
891 : / BITS_PER_UNIT)
892 : : size),
893 : align, 0);
894 :
895 83940 : p->align = align;
896 :
897 : /* The following slot size computation is necessary because we don't
898 : know the actual size of the temporary slot until assign_stack_local
899 : has performed all the frame alignment and size rounding for the
900 : requested temporary. Note that extra space added for alignment
901 : can be either above or below this stack slot depending on which
902 : way the frame grows. We include the extra space if and only if it
903 : is above this slot. */
904 83940 : if (FRAME_GROWS_DOWNWARD)
905 83940 : p->size = frame_offset_old - frame_offset;
906 : else
907 : p->size = size;
908 :
909 : /* Now define the fields used by combine_temp_slots. */
910 83940 : if (FRAME_GROWS_DOWNWARD)
911 : {
912 83940 : p->base_offset = frame_offset;
913 83940 : p->full_size = frame_offset_old - frame_offset;
914 : }
915 : else
916 : {
917 : p->base_offset = frame_offset_old;
918 : p->full_size = frame_offset - frame_offset_old;
919 : }
920 :
921 83940 : selected = p;
922 : }
923 :
924 147700 : p = selected;
925 147700 : p->in_use = true;
926 147700 : p->type = type;
927 147700 : p->level = temp_slot_level;
928 147700 : n_temp_slots_in_use++;
929 :
930 147700 : pp = temp_slots_at_level (p->level);
931 147700 : insert_slot_to_list (p, pp);
932 147700 : insert_temp_slot_address (XEXP (p->slot, 0), p);
933 :
934 : /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
935 147700 : slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
936 147700 : vec_safe_push (stack_slot_list, slot);
937 :
938 : /* If we know the alias set for the memory that will be used, use
939 : it. If there's no TYPE, then we don't know anything about the
940 : alias set for the memory. */
941 147700 : set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
942 147700 : set_mem_align (slot, align);
943 :
944 : /* If a type is specified, set the relevant flags. */
945 147700 : if (type != 0)
946 98062 : MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
947 147700 : MEM_NOTRAP_P (slot) = 1;
948 :
949 147700 : return slot;
950 : }
951 :
952 : /* Allocate a temporary stack slot and record it for possible later
953 : reuse. First two arguments are same as in preceding function. */
954 :
955 : rtx
956 49638 : assign_stack_temp (machine_mode mode, poly_int64 size)
957 : {
958 49638 : return assign_stack_temp_for_type (mode, size, NULL_TREE);
959 : }
960 :
961 : /* Assign a temporary.
962 : If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
963 : and so that should be used in error messages. In either case, we
964 : allocate of the given type.
965 : MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
966 : it is 0 if a register is OK.
967 : DONT_PROMOTE is 1 if we should not promote values in register
968 : to wider modes. */
969 :
970 : rtx
971 1000934 : assign_temp (tree type_or_decl, int memory_required,
972 : int dont_promote ATTRIBUTE_UNUSED)
973 : {
974 1000934 : tree type, decl;
975 1000934 : machine_mode mode;
976 : #ifdef PROMOTE_MODE
977 1000934 : int unsignedp;
978 : #endif
979 :
980 1000934 : if (DECL_P (type_or_decl))
981 0 : decl = type_or_decl, type = TREE_TYPE (decl);
982 : else
983 : decl = NULL, type = type_or_decl;
984 :
985 1000934 : mode = TYPE_MODE (type);
986 : #ifdef PROMOTE_MODE
987 1000934 : unsignedp = TYPE_UNSIGNED (type);
988 : #endif
989 :
990 : /* Allocating temporaries of TREE_ADDRESSABLE type must be done in the front
991 : end. See also create_tmp_var for the gimplification-time check. */
992 1000934 : gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
993 :
994 1000934 : if (mode == BLKmode || memory_required)
995 : {
996 92481 : poly_int64 size;
997 92481 : rtx tmp;
998 :
999 : /* Unfortunately, we don't yet know how to allocate variable-sized
1000 : temporaries. However, sometimes we can find a fixed upper limit on
1001 : the size, so try that instead. */
1002 92481 : if (!poly_int_tree_p (TYPE_SIZE_UNIT (type), &size))
1003 0 : size = max_int_size_in_bytes (type);
1004 :
1005 : /* Zero sized arrays are a GNU C extension. Set size to 1 to avoid
1006 : problems with allocating the stack space. */
1007 92481 : if (known_eq (size, 0))
1008 0 : size = 1;
1009 :
1010 : /* The size of the temporary may be too large to fit into an integer. */
1011 : /* ??? Not sure this should happen except for user silliness, so limit
1012 : this to things that aren't compiler-generated temporaries. The
1013 : rest of the time we'll die in assign_stack_temp_for_type. */
1014 92481 : if (decl
1015 0 : && !known_size_p (size)
1016 92481 : && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
1017 : {
1018 0 : error ("size of variable %q+D is too large", decl);
1019 0 : size = 1;
1020 : }
1021 :
1022 92481 : tmp = assign_stack_temp_for_type (mode, size, type);
1023 92481 : return tmp;
1024 : }
1025 :
1026 : #ifdef PROMOTE_MODE
1027 908453 : if (! dont_promote)
1028 0 : mode = promote_mode (type, mode, &unsignedp);
1029 : #endif
1030 :
1031 908453 : return gen_reg_rtx (mode);
1032 : }
1033 :
1034 : /* Combine temporary stack slots which are adjacent on the stack.
1035 :
1036 : This allows for better use of already allocated stack space. This is only
1037 : done for BLKmode slots because we can be sure that we won't have alignment
1038 : problems in this case. */
1039 :
1040 : static void
1041 139735 : combine_temp_slots (void)
1042 : {
1043 139735 : class temp_slot *p, *q, *next, *next_q;
1044 139735 : int num_slots;
1045 :
1046 : /* We can't combine slots, because the information about which slot
1047 : is in which alias set will be lost. */
1048 139735 : if (flag_strict_aliasing)
1049 : return;
1050 :
1051 : /* If there are a lot of temp slots, don't do anything unless
1052 : high levels of optimization. */
1053 97224 : if (! flag_expensive_optimizations)
1054 428140 : for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1055 418993 : if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1056 : return;
1057 :
1058 271989 : for (p = avail_temp_slots; p; p = next)
1059 : {
1060 187293 : int delete_p = 0;
1061 :
1062 187293 : next = p->next;
1063 :
1064 187293 : if (GET_MODE (p->slot) != BLKmode)
1065 163643 : continue;
1066 :
1067 29650 : for (q = p->next; q; q = next_q)
1068 : {
1069 6010 : int delete_q = 0;
1070 :
1071 6010 : next_q = q->next;
1072 :
1073 6010 : if (GET_MODE (q->slot) != BLKmode)
1074 5250 : continue;
1075 :
1076 760 : if (known_eq (p->base_offset + p->full_size, q->base_offset))
1077 : {
1078 : /* Q comes after P; combine Q into P. */
1079 687 : p->size += q->size;
1080 29650 : p->full_size += q->full_size;
1081 : delete_q = 1;
1082 : }
1083 73 : else if (known_eq (q->base_offset + q->full_size, p->base_offset))
1084 : {
1085 : /* P comes after Q; combine P into Q. */
1086 10 : q->size += p->size;
1087 10 : q->full_size += p->full_size;
1088 : delete_p = 1;
1089 : break;
1090 : }
1091 687 : if (delete_q)
1092 1374 : cut_slot_from_list (q, &avail_temp_slots);
1093 : }
1094 :
1095 : /* Either delete P or advance past it. */
1096 23650 : if (delete_p)
1097 20 : cut_slot_from_list (p, &avail_temp_slots);
1098 : }
1099 : }
1100 :
1101 : /* Indicate that NEW_RTX is an alternate way of referring to the temp
1102 : slot that previously was known by OLD_RTX. */
1103 :
1104 : void
1105 16425087 : update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1106 : {
1107 17292827 : class temp_slot *p;
1108 :
1109 17292827 : if (rtx_equal_p (old_rtx, new_rtx))
1110 : return;
1111 :
1112 5010210 : p = find_temp_slot_from_address (old_rtx);
1113 :
1114 : /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1115 : NEW_RTX is a register, see if one operand of the PLUS is a
1116 : temporary location. If so, NEW_RTX points into it. Otherwise,
1117 : if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1118 : in common between them. If so, try a recursive call on those
1119 : values. */
1120 5010210 : if (p == 0)
1121 : {
1122 5010172 : if (GET_CODE (old_rtx) != PLUS)
1123 : return;
1124 :
1125 953273 : if (REG_P (new_rtx))
1126 : {
1127 304135 : update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1128 304135 : update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1129 304135 : return;
1130 : }
1131 649138 : else if (GET_CODE (new_rtx) != PLUS)
1132 : return;
1133 :
1134 649138 : if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1135 342444 : update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1136 306694 : else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1137 0 : update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1138 306694 : else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1139 32429 : update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1140 274265 : else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1141 188732 : update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1142 :
1143 : return;
1144 : }
1145 :
1146 : /* Otherwise add an alias for the temp's address. */
1147 38 : insert_temp_slot_address (new_rtx, p);
1148 : }
1149 :
1150 : /* If X could be a reference to a temporary slot, mark that slot as
1151 : belonging to the to one level higher than the current level. If X
1152 : matched one of our slots, just mark that one. Otherwise, we can't
1153 : easily predict which it is, so upgrade all of them.
1154 :
1155 : This is called when an ({...}) construct occurs and a statement
1156 : returns a value in memory. */
1157 :
1158 : void
1159 23944625 : preserve_temp_slots (rtx x)
1160 : {
1161 23944625 : class temp_slot *p = 0, *next;
1162 :
1163 23944625 : if (x == 0)
1164 : return;
1165 :
1166 : /* If X is a register that is being used as a pointer, see if we have
1167 : a temporary slot we know it points to. */
1168 10673031 : if (REG_P (x) && REG_POINTER (x))
1169 1753649 : p = find_temp_slot_from_address (x);
1170 :
1171 : /* If X is not in memory or is at a constant address, it cannot be in
1172 : a temporary slot. */
1173 10673031 : if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1174 : return;
1175 :
1176 : /* First see if we can find a match. */
1177 670642 : if (p == 0)
1178 670642 : p = find_temp_slot_from_address (XEXP (x, 0));
1179 :
1180 670642 : if (p != 0)
1181 : {
1182 1226 : if (p->level == temp_slot_level)
1183 1226 : move_slot_to_level (p, temp_slot_level - 1);
1184 1226 : return;
1185 : }
1186 :
1187 : /* Otherwise, preserve all non-kept slots at this level. */
1188 669422 : for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1189 : {
1190 6 : next = p->next;
1191 6 : move_slot_to_level (p, temp_slot_level - 1);
1192 : }
1193 : }
1194 :
1195 : /* Free all temporaries used so far. This is normally called at the
1196 : end of generating code for a statement. */
1197 :
1198 : void
1199 61776590 : free_temp_slots (void)
1200 : {
1201 61776590 : class temp_slot *p, *next;
1202 61776590 : bool some_available = false;
1203 :
1204 61923746 : for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1205 : {
1206 147156 : next = p->next;
1207 147156 : make_slot_available (p);
1208 147156 : some_available = true;
1209 : }
1210 :
1211 61776590 : if (some_available)
1212 : {
1213 139735 : remove_unused_temp_slot_addresses ();
1214 139735 : combine_temp_slots ();
1215 : }
1216 61776590 : }
1217 :
1218 : /* Push deeper into the nesting level for stack temporaries. */
1219 :
1220 : void
1221 30257098 : push_temp_slots (void)
1222 : {
1223 30257098 : temp_slot_level++;
1224 30257098 : }
1225 :
1226 : /* Pop a temporary nesting level. All slots in use in the current level
1227 : are freed. */
1228 :
1229 : void
1230 30257096 : pop_temp_slots (void)
1231 : {
1232 30257096 : free_temp_slots ();
1233 30257096 : temp_slot_level--;
1234 30257096 : }
1235 :
1236 : /* Initialize temporary slots. */
1237 :
1238 : void
1239 3183101 : init_temp_slots (void)
1240 : {
1241 : /* We have not allocated any temporaries yet. */
1242 3183101 : avail_temp_slots = 0;
1243 3183101 : vec_alloc (used_temp_slots, 0);
1244 3183101 : temp_slot_level = 0;
1245 3183101 : n_temp_slots_in_use = 0;
1246 :
1247 : /* Set up the table to map addresses to temp slots. */
1248 3183101 : if (! temp_slot_address_table)
1249 211494 : temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32);
1250 : else
1251 2971607 : temp_slot_address_table->empty ();
1252 3183101 : }
1253 :
1254 : /* Functions and data structures to keep track of the values hard regs
1255 : had at the start of the function. */
1256 :
1257 : /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1258 : and has_hard_reg_initial_val.. */
1259 : struct GTY(()) initial_value_pair {
1260 : rtx hard_reg;
1261 : rtx pseudo;
1262 : };
1263 : /* ??? This could be a VEC but there is currently no way to define an
1264 : opaque VEC type. This could be worked around by defining struct
1265 : initial_value_pair in function.h. */
1266 : struct GTY(()) initial_value_struct {
1267 : int num_entries;
1268 : int max_entries;
1269 : initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1270 : };
1271 :
1272 : /* If a pseudo represents an initial hard reg (or expression), return
1273 : it, else return NULL_RTX. */
1274 :
1275 : rtx
1276 0 : get_hard_reg_initial_reg (rtx reg)
1277 : {
1278 0 : struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1279 0 : int i;
1280 :
1281 0 : if (ivs == 0)
1282 : return NULL_RTX;
1283 :
1284 0 : for (i = 0; i < ivs->num_entries; i++)
1285 0 : if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1286 0 : return ivs->entries[i].hard_reg;
1287 :
1288 : return NULL_RTX;
1289 : }
1290 :
1291 : /* Make sure that there's a pseudo register of mode MODE that stores the
1292 : initial value of hard register REGNO. Return an rtx for such a pseudo. */
1293 :
1294 : rtx
1295 0 : get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1296 : {
1297 0 : struct initial_value_struct *ivs;
1298 0 : rtx rv;
1299 :
1300 0 : rv = has_hard_reg_initial_val (mode, regno);
1301 0 : if (rv)
1302 : return rv;
1303 :
1304 0 : ivs = crtl->hard_reg_initial_vals;
1305 0 : if (ivs == 0)
1306 : {
1307 0 : ivs = ggc_alloc<initial_value_struct> ();
1308 0 : ivs->num_entries = 0;
1309 0 : ivs->max_entries = 5;
1310 0 : ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
1311 0 : crtl->hard_reg_initial_vals = ivs;
1312 : }
1313 :
1314 0 : if (ivs->num_entries >= ivs->max_entries)
1315 : {
1316 0 : ivs->max_entries += 5;
1317 0 : ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1318 : ivs->max_entries);
1319 : }
1320 :
1321 0 : ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1322 0 : ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1323 :
1324 0 : return ivs->entries[ivs->num_entries++].pseudo;
1325 : }
1326 :
1327 : /* See if get_hard_reg_initial_val has been used to create a pseudo
1328 : for the initial value of hard register REGNO in mode MODE. Return
1329 : the associated pseudo if so, otherwise return NULL. */
1330 :
1331 : rtx
1332 0 : has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1333 : {
1334 0 : struct initial_value_struct *ivs;
1335 0 : int i;
1336 :
1337 0 : ivs = crtl->hard_reg_initial_vals;
1338 0 : if (ivs != 0)
1339 0 : for (i = 0; i < ivs->num_entries; i++)
1340 0 : if (GET_MODE (ivs->entries[i].hard_reg) == mode
1341 0 : && REGNO (ivs->entries[i].hard_reg) == regno)
1342 0 : return ivs->entries[i].pseudo;
1343 :
1344 : return NULL_RTX;
1345 : }
1346 :
1347 : void
1348 1480895 : emit_initial_value_sets (void)
1349 : {
1350 1480895 : struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1351 1480895 : int i;
1352 1480895 : rtx_insn *seq;
1353 :
1354 1480895 : if (ivs == 0)
1355 : return;
1356 :
1357 0 : start_sequence ();
1358 0 : for (i = 0; i < ivs->num_entries; i++)
1359 0 : emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1360 0 : seq = end_sequence ();
1361 :
1362 0 : emit_insn_at_entry (seq);
1363 : }
1364 :
1365 : /* Return the hardreg-pseudoreg initial values pair entry I and
1366 : TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1367 : bool
1368 0 : initial_value_entry (int i, rtx *hreg, rtx *preg)
1369 : {
1370 0 : struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1371 0 : if (!ivs || i >= ivs->num_entries)
1372 : return false;
1373 :
1374 0 : *hreg = ivs->entries[i].hard_reg;
1375 0 : *preg = ivs->entries[i].pseudo;
1376 0 : return true;
1377 : }
1378 :
1379 : /* These routines are responsible for converting virtual register references
1380 : to the actual hard register references once RTL generation is complete.
1381 :
1382 : The following four variables are used for communication between the
1383 : routines. They contain the offsets of the virtual registers from their
1384 : respective hard registers. */
1385 :
1386 : static poly_int64 in_arg_offset;
1387 : static poly_int64 var_offset;
1388 : static poly_int64 dynamic_offset;
1389 : static poly_int64 out_arg_offset;
1390 : static poly_int64 cfa_offset;
1391 :
1392 : /* In most machines, the stack pointer register is equivalent to the bottom
1393 : of the stack. */
1394 :
1395 : #ifndef STACK_POINTER_OFFSET
1396 : #define STACK_POINTER_OFFSET 0
1397 : #endif
1398 :
1399 : #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1400 : #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1401 : #endif
1402 :
1403 : /* If not defined, pick an appropriate default for the offset of dynamically
1404 : allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1405 : INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1406 :
1407 : #ifndef STACK_DYNAMIC_OFFSET
1408 :
1409 : /* The bottom of the stack points to the actual arguments. If
1410 : REG_PARM_STACK_SPACE is defined, this includes the space for the register
1411 : parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1412 : stack space for register parameters is not pushed by the caller, but
1413 : rather part of the fixed stack areas and hence not included in
1414 : `crtl->outgoing_args_size'. Nevertheless, we must allow
1415 : for it when allocating stack dynamic objects. */
1416 :
1417 : #ifdef INCOMING_REG_PARM_STACK_SPACE
1418 : #define STACK_DYNAMIC_OFFSET(FNDECL) \
1419 : ((ACCUMULATE_OUTGOING_ARGS \
1420 : ? (crtl->outgoing_args_size \
1421 : + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1422 : : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1423 : : 0) + (STACK_POINTER_OFFSET))
1424 : #else
1425 : #define STACK_DYNAMIC_OFFSET(FNDECL) \
1426 : ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : poly_int64 (0)) \
1427 : + (STACK_POINTER_OFFSET))
1428 : #endif
1429 : #endif
1430 :
1431 :
1432 : /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1433 : is a virtual register, return the equivalent hard register and set the
1434 : offset indirectly through the pointer. Otherwise, return 0. */
1435 :
1436 : static rtx
1437 364520938 : instantiate_new_reg (rtx x, poly_int64 *poffset)
1438 : {
1439 364520938 : rtx new_rtx;
1440 364520938 : poly_int64 offset;
1441 :
1442 364520938 : if (x == virtual_incoming_args_rtx)
1443 : {
1444 3749432 : if (stack_realign_drap)
1445 : {
1446 : /* Replace virtual_incoming_args_rtx with internal arg
1447 : pointer if DRAP is used to realign stack. */
1448 16465 : new_rtx = crtl->args.internal_arg_pointer;
1449 16465 : offset = 0;
1450 : }
1451 : else
1452 3732967 : new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1453 : }
1454 360771506 : else if (x == virtual_stack_vars_rtx)
1455 18236384 : new_rtx = frame_pointer_rtx, offset = var_offset;
1456 342535122 : else if (x == virtual_stack_dynamic_rtx)
1457 53067 : new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1458 342482055 : else if (x == virtual_outgoing_args_rtx)
1459 1566876 : new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1460 340915179 : else if (x == virtual_cfa_rtx)
1461 : {
1462 : #ifdef FRAME_POINTER_CFA_OFFSET
1463 : new_rtx = frame_pointer_rtx;
1464 : #else
1465 1802 : new_rtx = arg_pointer_rtx;
1466 : #endif
1467 1802 : offset = cfa_offset;
1468 : }
1469 340913377 : else if (x == virtual_preferred_stack_boundary_rtx)
1470 : {
1471 112596 : new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1472 112596 : offset = 0;
1473 : }
1474 : else
1475 : return NULL_RTX;
1476 :
1477 23720157 : *poffset = offset;
1478 23720157 : return new_rtx;
1479 : }
1480 :
1481 : /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1482 : registers present inside of *LOC. The expression is simplified,
1483 : as much as possible, but is not to be considered "valid" in any sense
1484 : implied by the target. Return true if any change is made. */
1485 :
1486 : static bool
1487 206359020 : instantiate_virtual_regs_in_rtx (rtx *loc)
1488 : {
1489 206359020 : if (!*loc)
1490 : return false;
1491 93177017 : bool changed = false;
1492 93177017 : subrtx_ptr_iterator::array_type array;
1493 348297443 : FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
1494 : {
1495 255120426 : rtx *loc = *iter;
1496 255120426 : if (rtx x = *loc)
1497 : {
1498 231495764 : rtx new_rtx;
1499 231495764 : poly_int64 offset;
1500 231495764 : switch (GET_CODE (x))
1501 : {
1502 35625716 : case REG:
1503 35625716 : new_rtx = instantiate_new_reg (x, &offset);
1504 35625716 : if (new_rtx)
1505 : {
1506 1654933 : *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1507 1654933 : changed = true;
1508 : }
1509 35625716 : iter.skip_subrtxes ();
1510 35625716 : break;
1511 :
1512 27936827 : case PLUS:
1513 27936827 : new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1514 27936827 : if (new_rtx)
1515 : {
1516 18976805 : XEXP (x, 0) = new_rtx;
1517 18976805 : *loc = plus_constant (GET_MODE (x), x, offset, true);
1518 18976805 : changed = true;
1519 18976805 : iter.skip_subrtxes ();
1520 18976805 : break;
1521 : }
1522 :
1523 : /* FIXME -- from old code */
1524 : /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1525 : we can commute the PLUS and SUBREG because pointers into the
1526 : frame are well-behaved. */
1527 : break;
1528 :
1529 : default:
1530 : break;
1531 : }
1532 : }
1533 : }
1534 93177017 : return changed;
1535 93177017 : }
1536 :
1537 : /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1538 : matches the predicate for insn CODE operand OPERAND. */
1539 :
1540 : static bool
1541 29772337 : safe_insn_predicate (int code, int operand, rtx x)
1542 : {
1543 29772337 : return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
1544 : }
1545 :
1546 : /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1547 : registers present inside of insn. The result will be a valid insn. */
1548 :
1549 : static void
1550 93014325 : instantiate_virtual_regs_in_insn (rtx_insn *insn)
1551 : {
1552 93014325 : poly_int64 offset;
1553 93014325 : int insn_code, i;
1554 93014325 : bool any_change = false;
1555 93014325 : rtx set, new_rtx, x;
1556 93014325 : rtx_insn *seq;
1557 :
1558 : /* There are some special cases to be handled first. */
1559 93014325 : set = single_set (insn);
1560 93014325 : if (set)
1561 : {
1562 : /* We're allowed to assign to a virtual register. This is interpreted
1563 : to mean that the underlying register gets assigned the inverse
1564 : transformation. This is used, for example, in the handling of
1565 : non-local gotos. */
1566 88801976 : new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1567 88801976 : if (new_rtx)
1568 : {
1569 0 : start_sequence ();
1570 :
1571 0 : instantiate_virtual_regs_in_rtx (&SET_SRC (set));
1572 0 : x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1573 0 : gen_int_mode (-offset, GET_MODE (new_rtx)));
1574 0 : x = force_operand (x, new_rtx);
1575 0 : if (x != new_rtx)
1576 0 : emit_move_insn (new_rtx, x);
1577 :
1578 0 : seq = end_sequence ();
1579 :
1580 0 : emit_insn_before (seq, insn);
1581 0 : delete_insn (insn);
1582 18687 : return;
1583 : }
1584 :
1585 : /* Handle a straight copy from a virtual register by generating a
1586 : new add insn. The difference between this and falling through
1587 : to the generic case is avoiding a new pseudo and eliminating a
1588 : move insn in the initial rtl stream. */
1589 88801976 : new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1590 88801976 : if (new_rtx
1591 321760 : && maybe_ne (offset, 0)
1592 3841 : && REG_P (SET_DEST (set))
1593 88805817 : && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1594 : {
1595 3841 : start_sequence ();
1596 :
1597 3841 : x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
1598 : gen_int_mode (offset,
1599 3841 : GET_MODE (SET_DEST (set))),
1600 : SET_DEST (set), 1, OPTAB_LIB_WIDEN);
1601 3841 : if (x != SET_DEST (set))
1602 0 : emit_move_insn (SET_DEST (set), x);
1603 :
1604 3841 : seq = end_sequence ();
1605 :
1606 3841 : emit_insn_before (seq, insn);
1607 3841 : delete_insn (insn);
1608 3841 : return;
1609 : }
1610 :
1611 88798135 : extract_insn (insn);
1612 88798135 : insn_code = INSN_CODE (insn);
1613 :
1614 : /* Handle a plus involving a virtual register by determining if the
1615 : operands remain valid if they're modified in place. */
1616 88798135 : poly_int64 delta;
1617 88798135 : if (GET_CODE (SET_SRC (set)) == PLUS
1618 10094312 : && recog_data.n_operands >= 3
1619 10028954 : && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1620 10028399 : && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1621 10028399 : && poly_int_rtx_p (recog_data.operand[2], &delta)
1622 96711063 : && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1623 : {
1624 2351599 : offset += delta;
1625 :
1626 : /* If the sum is zero, then replace with a plain move. */
1627 2351599 : if (known_eq (offset, 0)
1628 14846 : && REG_P (SET_DEST (set))
1629 2366445 : && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1630 : {
1631 14846 : start_sequence ();
1632 14846 : emit_move_insn (SET_DEST (set), new_rtx);
1633 14846 : seq = end_sequence ();
1634 :
1635 14846 : emit_insn_before (seq, insn);
1636 14846 : delete_insn (insn);
1637 14846 : return;
1638 : }
1639 :
1640 2336753 : x = gen_int_mode (offset, recog_data.operand_mode[2]);
1641 :
1642 : /* Using validate_change and apply_change_group here leaves
1643 : recog_data in an invalid state. Since we know exactly what
1644 : we want to check, do those two by hand. */
1645 2336753 : if (safe_insn_predicate (insn_code, 1, new_rtx)
1646 2336753 : && safe_insn_predicate (insn_code, 2, x))
1647 : {
1648 2308604 : *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1649 2308604 : *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1650 2308604 : any_change = true;
1651 :
1652 : /* Fall through into the regular operand fixup loop in
1653 : order to take care of operands other than 1 and 2. */
1654 : }
1655 : }
1656 : }
1657 : else
1658 : {
1659 4212349 : extract_insn (insn);
1660 4212349 : insn_code = INSN_CODE (insn);
1661 : }
1662 :
1663 : /* In the general case, we expect virtual registers to appear only in
1664 : operands, and then only as either bare registers or inside memories. */
1665 294567133 : for (i = 0; i < recog_data.n_operands; ++i)
1666 : {
1667 201571495 : x = recog_data.operand[i];
1668 201571495 : switch (GET_CODE (x))
1669 : {
1670 29089560 : case MEM:
1671 29089560 : {
1672 29089560 : rtx addr = XEXP (x, 0);
1673 :
1674 29089560 : if (!instantiate_virtual_regs_in_rtx (&addr))
1675 16733600 : continue;
1676 :
1677 12355960 : start_sequence ();
1678 12355960 : x = replace_equiv_address (x, addr, true);
1679 : /* It may happen that the address with the virtual reg
1680 : was valid (e.g. based on the virtual stack reg, which might
1681 : be acceptable to the predicates with all offsets), whereas
1682 : the address now isn't anymore, for instance when the address
1683 : is still offsetted, but the base reg isn't virtual-stack-reg
1684 : anymore. Below we would do a force_reg on the whole operand,
1685 : but this insn might actually only accept memory. Hence,
1686 : before doing that last resort, try to reload the address into
1687 : a register, so this operand stays a MEM. */
1688 12355960 : if (!safe_insn_predicate (insn_code, i, x))
1689 : {
1690 0 : addr = force_reg (GET_MODE (addr), addr);
1691 0 : x = replace_equiv_address (x, addr, true);
1692 : }
1693 12355960 : seq = end_sequence ();
1694 12355960 : if (seq)
1695 0 : emit_insn_before (seq, insn);
1696 : }
1697 12355960 : break;
1698 :
1699 112721514 : case REG:
1700 112721514 : new_rtx = instantiate_new_reg (x, &offset);
1701 112721514 : if (new_rtx == NULL)
1702 112306460 : continue;
1703 415054 : if (known_eq (offset, 0))
1704 : x = new_rtx;
1705 : else
1706 : {
1707 0 : start_sequence ();
1708 :
1709 : /* Careful, special mode predicates may have stuff in
1710 : insn_data[insn_code].operand[i].mode that isn't useful
1711 : to us for computing a new value. */
1712 : /* ??? Recognize address_operand and/or "p" constraints
1713 : to see if (plus new offset) is a valid before we put
1714 : this through expand_simple_binop. */
1715 0 : x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1716 0 : gen_int_mode (offset, GET_MODE (x)),
1717 : NULL_RTX, 1, OPTAB_LIB_WIDEN);
1718 0 : seq = end_sequence ();
1719 0 : emit_insn_before (seq, insn);
1720 : }
1721 : break;
1722 :
1723 2720001 : case SUBREG:
1724 2720001 : new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1725 2720001 : if (new_rtx == NULL)
1726 2719995 : continue;
1727 6 : start_sequence ();
1728 6 : if (maybe_ne (offset, 0))
1729 0 : new_rtx = expand_simple_binop
1730 0 : (GET_MODE (new_rtx), PLUS, new_rtx,
1731 0 : gen_int_mode (offset, GET_MODE (new_rtx)),
1732 : NULL_RTX, 1, OPTAB_LIB_WIDEN);
1733 12 : x = force_subreg (recog_data.operand_mode[i], new_rtx,
1734 6 : GET_MODE (new_rtx), SUBREG_BYTE (x));
1735 6 : gcc_assert (x);
1736 6 : seq = end_sequence ();
1737 6 : emit_insn_before (seq, insn);
1738 6 : break;
1739 :
1740 57040420 : default:
1741 57040420 : continue;
1742 57040420 : }
1743 :
1744 : /* At this point, X contains the new value for the operand.
1745 : Validate the new value vs the insn predicate. Note that
1746 : asm insns will have insn_code -1 here. */
1747 12771020 : if (!safe_insn_predicate (insn_code, i, x))
1748 : {
1749 56298 : start_sequence ();
1750 56298 : if (REG_P (x))
1751 : {
1752 0 : gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1753 0 : x = copy_to_reg (x);
1754 : }
1755 : else
1756 56298 : x = force_reg (insn_data[insn_code].operand[i].mode, x);
1757 56298 : seq = end_sequence ();
1758 56298 : if (seq)
1759 56298 : emit_insn_before (seq, insn);
1760 : }
1761 :
1762 12771020 : *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1763 12771020 : any_change = true;
1764 : }
1765 :
1766 92995638 : if (any_change)
1767 : {
1768 : /* Propagate operand changes into the duplicates. */
1769 14992439 : for (i = 0; i < recog_data.n_dups; ++i)
1770 87030 : *recog_data.dup_loc[i]
1771 87030 : = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1772 :
1773 : /* Force re-recognition of the instruction for validation. */
1774 14905409 : INSN_CODE (insn) = -1;
1775 : }
1776 :
1777 92995638 : if (asm_noperands (PATTERN (insn)) >= 0)
1778 : {
1779 108207 : if (!check_asm_operands (PATTERN (insn)))
1780 : {
1781 23 : error_for_asm (insn, "impossible constraint in %<asm%>");
1782 : /* For asm goto, instead of fixing up all the edges
1783 : just clear the template and clear input and output operands
1784 : and strip away clobbers. */
1785 23 : if (JUMP_P (insn))
1786 : {
1787 14 : rtx asm_op = extract_asm_operands (PATTERN (insn));
1788 14 : PATTERN (insn) = asm_op;
1789 14 : PUT_MODE (asm_op, VOIDmode);
1790 14 : ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1791 14 : ASM_OPERANDS_OUTPUT_CONSTRAINT (asm_op) = "";
1792 14 : ASM_OPERANDS_OUTPUT_IDX (asm_op) = 0;
1793 14 : ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1794 14 : ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1795 : }
1796 : else
1797 9 : delete_insn (insn);
1798 : }
1799 : }
1800 : else
1801 : {
1802 92887431 : if (recog_memoized (insn) < 0)
1803 0 : fatal_insn_not_found (insn);
1804 : }
1805 : }
1806 :
1807 : /* Subroutine of instantiate_decls. Given RTL representing a decl,
1808 : do any instantiation required. */
1809 :
1810 : void
1811 9593237 : instantiate_decl_rtl (rtx x)
1812 : {
1813 9599154 : rtx addr;
1814 :
1815 9599154 : if (x == 0)
1816 : return;
1817 :
1818 : /* If this is a CONCAT, recurse for the pieces. */
1819 9599154 : if (GET_CODE (x) == CONCAT)
1820 : {
1821 5917 : instantiate_decl_rtl (XEXP (x, 0));
1822 5917 : instantiate_decl_rtl (XEXP (x, 1));
1823 5917 : return;
1824 : }
1825 :
1826 : /* If this is not a MEM, no need to do anything. Similarly if the
1827 : address is a constant or a register that is not a virtual register. */
1828 9593237 : if (!MEM_P (x))
1829 : return;
1830 :
1831 3217733 : addr = XEXP (x, 0);
1832 3217733 : if (CONSTANT_P (addr)
1833 3217733 : || (REG_P (addr)
1834 302887 : && !VIRTUAL_REGISTER_P (addr)))
1835 : return;
1836 :
1837 2996395 : instantiate_virtual_regs_in_rtx (&XEXP (x, 0));
1838 : }
1839 :
1840 : /* Helper for instantiate_decls called via walk_tree: Process all decls
1841 : in the given DECL_VALUE_EXPR. */
1842 :
1843 : static tree
1844 1207171 : instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1845 : {
1846 1207171 : tree t = *tp;
1847 1207171 : if (! EXPR_P (t))
1848 : {
1849 651074 : *walk_subtrees = 0;
1850 651074 : if (DECL_P (t))
1851 : {
1852 544265 : if (DECL_RTL_SET_P (t))
1853 170642 : instantiate_decl_rtl (DECL_RTL (t));
1854 135531 : if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1855 655654 : && DECL_INCOMING_RTL (t))
1856 111389 : instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1857 337631 : if ((VAR_P (t) || TREE_CODE (t) == RESULT_DECL)
1858 551698 : && DECL_HAS_VALUE_EXPR_P (t))
1859 : {
1860 7703 : tree v = DECL_VALUE_EXPR (t);
1861 7703 : walk_tree (&v, instantiate_expr, NULL, NULL);
1862 : }
1863 : }
1864 : }
1865 1207171 : return NULL;
1866 : }
1867 :
1868 : /* Subroutine of instantiate_decls: Process all decls in the given
1869 : BLOCK node and all its subblocks. */
1870 :
1871 : static void
1872 16219898 : instantiate_decls_1 (tree let)
1873 : {
1874 16219898 : tree t;
1875 :
1876 34958358 : for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1877 : {
1878 18738460 : if (DECL_RTL_SET_P (t))
1879 2265654 : instantiate_decl_rtl (DECL_RTL (t));
1880 18738460 : if (VAR_P (t) && DECL_HAS_VALUE_EXPR_P (t))
1881 : {
1882 283306 : tree v = DECL_VALUE_EXPR (t);
1883 283306 : walk_tree (&v, instantiate_expr, NULL, NULL);
1884 : }
1885 : }
1886 :
1887 : /* Process all subblocks. */
1888 30959684 : for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1889 14739786 : instantiate_decls_1 (t);
1890 16219898 : }
1891 :
1892 : /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1893 : all virtual registers in their DECL_RTL's. */
1894 :
1895 : static void
1896 1480112 : instantiate_decls (tree fndecl)
1897 : {
1898 1480112 : tree decl;
1899 1480112 : unsigned ix;
1900 :
1901 : /* Process all parameters of the function. */
1902 4591008 : for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1903 : {
1904 3110896 : instantiate_decl_rtl (DECL_RTL (decl));
1905 3110896 : instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1906 3110896 : if (DECL_HAS_VALUE_EXPR_P (decl))
1907 : {
1908 129 : tree v = DECL_VALUE_EXPR (decl);
1909 129 : walk_tree (&v, instantiate_expr, NULL, NULL);
1910 : }
1911 : }
1912 :
1913 1480112 : if ((decl = DECL_RESULT (fndecl))
1914 1480112 : && TREE_CODE (decl) == RESULT_DECL)
1915 : {
1916 1480112 : if (DECL_RTL_SET_P (decl))
1917 788769 : instantiate_decl_rtl (DECL_RTL (decl));
1918 1480112 : if (DECL_HAS_VALUE_EXPR_P (decl))
1919 : {
1920 69511 : tree v = DECL_VALUE_EXPR (decl);
1921 69511 : walk_tree (&v, instantiate_expr, NULL, NULL);
1922 : }
1923 : }
1924 :
1925 : /* Process the saved static chain if it exists. */
1926 1480112 : decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
1927 1480112 : if (decl && DECL_HAS_VALUE_EXPR_P (decl))
1928 3923 : instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
1929 :
1930 : /* Now process all variables defined in the function or its subblocks. */
1931 1480112 : if (DECL_INITIAL (fndecl))
1932 1480112 : instantiate_decls_1 (DECL_INITIAL (fndecl));
1933 :
1934 2784763 : FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1935 65140 : if (DECL_RTL_SET_P (decl))
1936 25151 : instantiate_decl_rtl (DECL_RTL (decl));
1937 1480112 : vec_free (cfun->local_decls);
1938 1480112 : }
1939 :
1940 : /* Return the value of STACK_DYNAMIC_OFFSET for the current function.
1941 : This is done through a function wrapper so that the macro sees a
1942 : predictable set of included files. */
1943 :
1944 : poly_int64
1945 1480112 : get_stack_dynamic_offset ()
1946 : {
1947 1480112 : return STACK_DYNAMIC_OFFSET (current_function_decl);
1948 : }
1949 :
1950 : /* Pass through the INSNS of function FNDECL and convert virtual register
1951 : references to hard register references. */
1952 :
1953 : static void
1954 1480112 : instantiate_virtual_regs (void)
1955 : {
1956 1480112 : rtx_insn *insn;
1957 :
1958 : /* Compute the offsets to use for this function. */
1959 1480112 : in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1960 1480112 : var_offset = targetm.starting_frame_offset ();
1961 1480112 : dynamic_offset = get_stack_dynamic_offset ();
1962 1480112 : out_arg_offset = STACK_POINTER_OFFSET;
1963 : #ifdef FRAME_POINTER_CFA_OFFSET
1964 : cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1965 : #else
1966 1480112 : cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1967 : #endif
1968 :
1969 : /* Initialize recognition, indicating that volatile is OK. */
1970 1480112 : init_recog ();
1971 :
1972 : /* Scan through all the insns, instantiating every virtual register still
1973 : present. */
1974 173201947 : for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1975 171721835 : if (INSN_P (insn))
1976 : {
1977 : /* These patterns in the instruction stream can never be recognized.
1978 : Fortunately, they shouldn't contain virtual registers either. */
1979 154728706 : if (GET_CODE (PATTERN (insn)) == USE
1980 141934544 : || GET_CODE (PATTERN (insn)) == CLOBBER
1981 141765151 : || GET_CODE (PATTERN (insn)) == ASM_INPUT
1982 284460370 : || DEBUG_MARKER_INSN_P (insn))
1983 12031603 : continue;
1984 130665500 : else if (DEBUG_BIND_INSN_P (insn))
1985 37651175 : instantiate_virtual_regs_in_rtx (INSN_VAR_LOCATION_PTR (insn));
1986 : else
1987 93014325 : instantiate_virtual_regs_in_insn (insn);
1988 :
1989 130665500 : if (insn->deleted ())
1990 18696 : continue;
1991 :
1992 130646804 : instantiate_virtual_regs_in_rtx (®_NOTES (insn));
1993 :
1994 : /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1995 130646804 : if (CALL_P (insn))
1996 5975086 : instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn));
1997 : }
1998 :
1999 : /* Instantiate the virtual registers in the DECLs for debugging purposes. */
2000 1480112 : instantiate_decls (current_function_decl);
2001 :
2002 1480112 : targetm.instantiate_decls ();
2003 :
2004 : /* Indicate that, from now on, assign_stack_local should use
2005 : frame_pointer_rtx. */
2006 1480112 : virtuals_instantiated = 1;
2007 1480112 : }
2008 :
2009 : namespace {
2010 :
2011 : const pass_data pass_data_instantiate_virtual_regs =
2012 : {
2013 : RTL_PASS, /* type */
2014 : "vregs", /* name */
2015 : OPTGROUP_NONE, /* optinfo_flags */
2016 : TV_NONE, /* tv_id */
2017 : 0, /* properties_required */
2018 : 0, /* properties_provided */
2019 : 0, /* properties_destroyed */
2020 : 0, /* todo_flags_start */
2021 : 0, /* todo_flags_finish */
2022 : };
2023 :
2024 : class pass_instantiate_virtual_regs : public rtl_opt_pass
2025 : {
2026 : public:
2027 288775 : pass_instantiate_virtual_regs (gcc::context *ctxt)
2028 577550 : : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
2029 : {}
2030 :
2031 : /* opt_pass methods: */
2032 1480112 : unsigned int execute (function *) final override
2033 : {
2034 1480112 : instantiate_virtual_regs ();
2035 1480112 : return 0;
2036 : }
2037 :
2038 : }; // class pass_instantiate_virtual_regs
2039 :
2040 : } // anon namespace
2041 :
2042 : rtl_opt_pass *
2043 288775 : make_pass_instantiate_virtual_regs (gcc::context *ctxt)
2044 : {
2045 288775 : return new pass_instantiate_virtual_regs (ctxt);
2046 : }
2047 :
2048 :
2049 : /* Return true if EXP is an aggregate type (or a value with aggregate type).
2050 : This means a type for which function calls must pass an address to the
2051 : function or get an address back from the function.
2052 : EXP may be a type node or an expression (whose type is tested). */
2053 :
2054 : bool
2055 147045154 : aggregate_value_p (const_tree exp, const_tree fntype)
2056 : {
2057 147058399 : const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
2058 147058399 : int i, regno, nregs;
2059 147058399 : rtx reg;
2060 :
2061 147058399 : if (fntype)
2062 146741015 : switch (TREE_CODE (fntype))
2063 : {
2064 11747725 : case CALL_EXPR:
2065 11747725 : {
2066 11747725 : tree fndecl = get_callee_fndecl (fntype);
2067 11747725 : if (fndecl)
2068 10829322 : fntype = TREE_TYPE (fndecl);
2069 918403 : else if (CALL_EXPR_FN (fntype))
2070 323512 : fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype)));
2071 : else
2072 : /* For internal functions, assume nothing needs to be
2073 : returned in memory. */
2074 : return false;
2075 : }
2076 : break;
2077 125115244 : case FUNCTION_DECL:
2078 125115244 : fntype = TREE_TYPE (fntype);
2079 125115244 : break;
2080 : case FUNCTION_TYPE:
2081 : case METHOD_TYPE:
2082 : break;
2083 : case IDENTIFIER_NODE:
2084 330578 : fntype = NULL_TREE;
2085 : break;
2086 0 : default:
2087 : /* We don't expect other tree types here. */
2088 0 : gcc_unreachable ();
2089 : }
2090 :
2091 146463508 : if (VOID_TYPE_P (type))
2092 : return false;
2093 :
2094 107693869 : if (error_operand_p (fntype))
2095 : return false;
2096 :
2097 : /* If a record should be passed the same as its first (and only) member
2098 : don't pass it as an aggregate. */
2099 107693868 : if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2100 13245 : return aggregate_value_p (first_field (type), fntype);
2101 :
2102 : /* If the front end has decided that this needs to be passed by
2103 : reference, do so. */
2104 107680534 : if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2105 196353765 : && DECL_BY_REFERENCE (exp))
2106 : return true;
2107 :
2108 : /* Function types that are TREE_ADDRESSABLE force return in memory. */
2109 107530298 : if (fntype && TREE_ADDRESSABLE (fntype))
2110 : return true;
2111 :
2112 : /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2113 : and thus can't be returned in registers. */
2114 107530298 : if (TREE_ADDRESSABLE (type))
2115 : return true;
2116 :
2117 106217136 : if (TYPE_EMPTY_P (type))
2118 : return false;
2119 :
2120 105412686 : if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2121 : return true;
2122 :
2123 104661750 : if (targetm.calls.return_in_memory (type, fntype))
2124 : return true;
2125 :
2126 : /* Make sure we have suitable call-clobbered regs to return
2127 : the value in; if not, we must return it in memory. */
2128 99057335 : reg = hard_function_value (type, 0, fntype, 0);
2129 :
2130 : /* If we have something other than a REG (e.g. a PARALLEL), then assume
2131 : it is OK. */
2132 99057335 : if (!REG_P (reg))
2133 : return false;
2134 :
2135 : /* Use the default ABI if the type of the function isn't known.
2136 : The scheme for handling interoperability between different ABIs
2137 : requires us to be able to tell when we're calling a function with
2138 : a nondefault ABI. */
2139 98210081 : const predefined_function_abi &abi = (fntype
2140 98210081 : ? fntype_abi (fntype)
2141 284589 : : default_function_abi);
2142 98210081 : regno = REGNO (reg);
2143 98210081 : nregs = hard_regno_nregs (regno, TYPE_MODE (type));
2144 198928777 : for (i = 0; i < nregs; i++)
2145 100718696 : if (!fixed_regs[regno + i] && !abi.clobbers_full_reg_p (regno + i))
2146 : return true;
2147 :
2148 : return false;
2149 : }
2150 :
2151 : /* Return true if we should assign DECL a pseudo register; false if it
2152 : should live on the local stack. */
2153 :
2154 : bool
2155 162375331 : use_register_for_decl (const_tree decl)
2156 : {
2157 162375331 : if (TREE_CODE (decl) == SSA_NAME)
2158 : {
2159 : /* We often try to use the SSA_NAME, instead of its underlying
2160 : decl, to get type information and guide decisions, to avoid
2161 : differences of behavior between anonymous and named
2162 : variables, but in this one case we have to go for the actual
2163 : variable if there is one. The main reason is that, at least
2164 : at -O0, we want to place user variables on the stack, but we
2165 : don't mind using pseudos for anonymous or ignored temps.
2166 : Should we take the SSA_NAME, we'd conclude all SSA_NAMEs
2167 : should go in pseudos, whereas their corresponding variables
2168 : might have to go on the stack. So, disregarding the decl
2169 : here would negatively impact debug info at -O0, enable
2170 : coalescing between SSA_NAMEs that ought to get different
2171 : stack/pseudo assignments, and get the incoming argument
2172 : processing thoroughly confused by PARM_DECLs expected to live
2173 : in stack slots but assigned to pseudos. */
2174 144234026 : if (!SSA_NAME_VAR (decl))
2175 99386201 : return TYPE_MODE (TREE_TYPE (decl)) != BLKmode
2176 99386201 : && !(flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)));
2177 :
2178 : decl = SSA_NAME_VAR (decl);
2179 : }
2180 :
2181 : /* Honor volatile. */
2182 62989130 : if (TREE_SIDE_EFFECTS (decl))
2183 : return false;
2184 :
2185 : /* Honor addressability. */
2186 62871009 : if (TREE_ADDRESSABLE (decl))
2187 : return false;
2188 :
2189 : /* RESULT_DECLs are a bit special in that they're assigned without
2190 : regard to use_register_for_decl, but we generally only store in
2191 : them. If we coalesce their SSA NAMEs, we'd better return a
2192 : result that matches the assignment in expand_function_start. */
2193 58631662 : if (TREE_CODE (decl) == RESULT_DECL)
2194 : {
2195 : /* If it's not an aggregate, we're going to use a REG or a
2196 : PARALLEL containing a REG. */
2197 3103611 : if (!aggregate_value_p (decl, current_function_decl))
2198 : return true;
2199 :
2200 : /* If expand_function_start determines the return value, we'll
2201 : use MEM if it's not by reference. */
2202 31723 : if (cfun->returns_pcc_struct
2203 63446 : || (targetm.calls.struct_value_rtx
2204 31723 : (TREE_TYPE (current_function_decl), 1)))
2205 0 : return DECL_BY_REFERENCE (decl);
2206 :
2207 : /* Otherwise, we're taking an extra all.function_result_decl
2208 : argument. It's set up in assign_parms_augmented_arg_list,
2209 : under the (negated) conditions above, and then it's used to
2210 : set up the RESULT_DECL rtl in assign_params, after looping
2211 : over all parameters. Now, if the RESULT_DECL is not by
2212 : reference, we'll use a MEM either way. */
2213 31723 : if (!DECL_BY_REFERENCE (decl))
2214 : return false;
2215 :
2216 : /* Otherwise, if RESULT_DECL is DECL_BY_REFERENCE, it will take
2217 : the function_result_decl's assignment. Since it's a pointer,
2218 : we can short-circuit a number of the tests below, and we must
2219 : duplicate them because we don't have the function_result_decl
2220 : to test. */
2221 31723 : if (!targetm.calls.allocate_stack_slots_for_args ())
2222 : return true;
2223 : /* We don't set DECL_IGNORED_P for the function_result_decl. */
2224 31723 : if (optimize)
2225 : return true;
2226 : /* Needed for [[musttail]] which can operate even at -O0 */
2227 3788 : if (cfun->tail_call_marked)
2228 : return true;
2229 : /* We don't set DECL_REGISTER for the function_result_decl. */
2230 : return false;
2231 : }
2232 :
2233 : /* Only register-like things go in registers. */
2234 55528051 : if (DECL_MODE (decl) == BLKmode)
2235 : return false;
2236 :
2237 : /* If -ffloat-store specified, don't put explicit float variables
2238 : into registers. */
2239 : /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2240 : propagates values across these stores, and it probably shouldn't. */
2241 53760428 : if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2242 : return false;
2243 :
2244 53758628 : if (!targetm.calls.allocate_stack_slots_for_args ())
2245 : return true;
2246 :
2247 : /* If we're not interested in tracking debugging information for
2248 : this decl, then we can certainly put it in a register. */
2249 53758312 : if (DECL_IGNORED_P (decl))
2250 : return true;
2251 :
2252 36372086 : if (optimize)
2253 : return true;
2254 :
2255 : /* Thunks force a tail call even at -O0 so we need to avoid creating a
2256 : dangling reference in case the parameter is passed by reference. */
2257 7249679 : if (TREE_CODE (decl) == PARM_DECL && cfun->tail_call_marked)
2258 : return true;
2259 :
2260 7249084 : if (!DECL_REGISTER (decl))
2261 : return false;
2262 :
2263 : /* When not optimizing, disregard register keyword for types that
2264 : could have methods, otherwise the methods won't be callable from
2265 : the debugger. */
2266 11290 : if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (decl)))
2267 : return false;
2268 :
2269 : return true;
2270 : }
2271 :
2272 : /* Structures to communicate between the subroutines of assign_parms.
2273 : The first holds data persistent across all parameters, the second
2274 : is cleared out for each parameter. */
2275 :
2276 : struct assign_parm_data_all
2277 : {
2278 : /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2279 : should become a job of the target or otherwise encapsulated. */
2280 : CUMULATIVE_ARGS args_so_far_v;
2281 : cumulative_args_t args_so_far;
2282 : struct args_size stack_args_size;
2283 : tree function_result_decl;
2284 : tree orig_fnargs;
2285 : rtx_insn *first_conversion_insn;
2286 : rtx_insn *last_conversion_insn;
2287 : HOST_WIDE_INT pretend_args_size;
2288 : HOST_WIDE_INT extra_pretend_bytes;
2289 : int reg_parm_stack_space;
2290 : };
2291 :
2292 18335254 : struct assign_parm_data_one
2293 : {
2294 : tree nominal_type;
2295 : function_arg_info arg;
2296 : rtx entry_parm;
2297 : rtx stack_parm;
2298 : machine_mode nominal_mode;
2299 : machine_mode passed_mode;
2300 : struct locate_and_pad_arg_data locate;
2301 : int partial;
2302 : };
2303 :
2304 : /* A subroutine of assign_parms. Initialize ALL. */
2305 :
2306 : static void
2307 4380375 : assign_parms_initialize_all (struct assign_parm_data_all *all)
2308 : {
2309 4380375 : tree fntype ATTRIBUTE_UNUSED;
2310 :
2311 4380375 : memset (all, 0, sizeof (*all));
2312 :
2313 4380375 : fntype = TREE_TYPE (current_function_decl);
2314 :
2315 : #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2316 : INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
2317 : #else
2318 4380375 : INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
2319 : current_function_decl, -1);
2320 : #endif
2321 4380375 : all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
2322 :
2323 : #ifdef INCOMING_REG_PARM_STACK_SPACE
2324 4380375 : all->reg_parm_stack_space
2325 4380375 : = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
2326 : #endif
2327 4380375 : }
2328 :
2329 : /* If ARGS contains entries with complex types, split the entry into two
2330 : entries of the component type. Return a new list of substitutions are
2331 : needed, else the old list. */
2332 :
2333 : static void
2334 0 : split_complex_args (vec<tree> *args)
2335 : {
2336 0 : unsigned i;
2337 0 : tree p;
2338 :
2339 0 : FOR_EACH_VEC_ELT (*args, i, p)
2340 : {
2341 0 : tree type = TREE_TYPE (p);
2342 0 : if (TREE_CODE (type) == COMPLEX_TYPE
2343 0 : && targetm.calls.split_complex_arg (type))
2344 : {
2345 0 : tree decl;
2346 0 : tree subtype = TREE_TYPE (type);
2347 0 : bool addressable = TREE_ADDRESSABLE (p);
2348 :
2349 : /* Rewrite the PARM_DECL's type with its component. */
2350 0 : p = copy_node (p);
2351 0 : TREE_TYPE (p) = subtype;
2352 0 : DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2353 0 : SET_DECL_MODE (p, VOIDmode);
2354 0 : DECL_SIZE (p) = NULL;
2355 0 : DECL_SIZE_UNIT (p) = NULL;
2356 : /* If this arg must go in memory, put it in a pseudo here.
2357 : We can't allow it to go in memory as per normal parms,
2358 : because the usual place might not have the imag part
2359 : adjacent to the real part. */
2360 0 : DECL_ARTIFICIAL (p) = addressable;
2361 0 : DECL_IGNORED_P (p) = addressable;
2362 0 : TREE_ADDRESSABLE (p) = 0;
2363 0 : layout_decl (p, 0);
2364 0 : (*args)[i] = p;
2365 :
2366 : /* Build a second synthetic decl. */
2367 0 : decl = build_decl (EXPR_LOCATION (p),
2368 : PARM_DECL, NULL_TREE, subtype);
2369 0 : DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2370 0 : DECL_ARTIFICIAL (decl) = addressable;
2371 0 : DECL_IGNORED_P (decl) = addressable;
2372 0 : layout_decl (decl, 0);
2373 0 : args->safe_insert (++i, decl);
2374 : }
2375 : }
2376 0 : }
2377 :
2378 : /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2379 : the hidden struct return argument, and (abi willing) complex args.
2380 : Return the new parameter list. */
2381 :
2382 : static vec<tree>
2383 4380375 : assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2384 : {
2385 4380375 : tree fndecl = current_function_decl;
2386 4380375 : tree fntype = TREE_TYPE (fndecl);
2387 4380375 : vec<tree> fnargs = vNULL;
2388 4380375 : tree arg;
2389 :
2390 13353315 : for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2391 8972940 : fnargs.safe_push (arg);
2392 :
2393 4380375 : all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2394 :
2395 : /* If struct value address is treated as the first argument, make it so. */
2396 4380375 : if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2397 194687 : && ! cfun->returns_pcc_struct
2398 4575062 : && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2399 : {
2400 194687 : tree type = build_pointer_type (TREE_TYPE (fntype));
2401 194687 : tree decl;
2402 :
2403 194687 : decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2404 : PARM_DECL, get_identifier (".result_ptr"), type);
2405 194687 : DECL_ARG_TYPE (decl) = type;
2406 194687 : DECL_ARTIFICIAL (decl) = 1;
2407 194687 : DECL_NAMELESS (decl) = 1;
2408 194687 : TREE_CONSTANT (decl) = 1;
2409 : /* We don't set DECL_IGNORED_P or DECL_REGISTER here. If this
2410 : changes, the end of the RESULT_DECL handling block in
2411 : use_register_for_decl must be adjusted to match. */
2412 :
2413 194687 : DECL_CHAIN (decl) = all->orig_fnargs;
2414 194687 : all->orig_fnargs = decl;
2415 194687 : fnargs.safe_insert (0, decl);
2416 :
2417 194687 : all->function_result_decl = decl;
2418 : }
2419 :
2420 : /* If the target wants to split complex arguments into scalars, do so. */
2421 4380375 : if (targetm.calls.split_complex_arg)
2422 0 : split_complex_args (&fnargs);
2423 :
2424 4380375 : return fnargs;
2425 : }
2426 :
2427 : /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2428 : data for the parameter. Incorporate ABI specifics such as pass-by-
2429 : reference and type promotion. */
2430 :
2431 : static void
2432 9167627 : assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2433 : struct assign_parm_data_one *data)
2434 : {
2435 9167627 : int unsignedp;
2436 :
2437 9167627 : *data = assign_parm_data_one ();
2438 :
2439 : /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2440 9167627 : if (!cfun->stdarg)
2441 9085860 : data->arg.named = 1; /* No variadic parms. */
2442 81767 : else if (DECL_CHAIN (parm))
2443 38948 : data->arg.named = 1; /* Not the last non-variadic parm. */
2444 42819 : else if (targetm.calls.strict_argument_naming (all->args_so_far))
2445 42819 : data->arg.named = 1; /* Only variadic ones are unnamed. */
2446 : else
2447 0 : data->arg.named = 0; /* Treat as variadic. */
2448 :
2449 9167627 : data->nominal_type = TREE_TYPE (parm);
2450 9167627 : data->arg.type = DECL_ARG_TYPE (parm);
2451 :
2452 : /* Look out for errors propagating this far. Also, if the parameter's
2453 : type is void then its value doesn't matter. */
2454 9167627 : if (TREE_TYPE (parm) == error_mark_node
2455 : /* This can happen after weird syntax errors
2456 : or if an enum type is defined among the parms. */
2457 9167542 : || TREE_CODE (parm) != PARM_DECL
2458 9167542 : || data->arg.type == NULL
2459 18335169 : || VOID_TYPE_P (data->nominal_type))
2460 : {
2461 85 : data->nominal_type = data->arg.type = void_type_node;
2462 85 : data->nominal_mode = data->passed_mode = data->arg.mode = VOIDmode;
2463 85 : return;
2464 : }
2465 :
2466 : /* Find mode of arg as it is passed, and mode of arg as it should be
2467 : during execution of this function. */
2468 9167542 : data->passed_mode = data->arg.mode = TYPE_MODE (data->arg.type);
2469 9167542 : data->nominal_mode = TYPE_MODE (data->nominal_type);
2470 :
2471 : /* If the parm is to be passed as a transparent union or record, use the
2472 : type of the first field for the tests below. We have already verified
2473 : that the modes are the same. */
2474 9167542 : if (RECORD_OR_UNION_TYPE_P (data->arg.type)
2475 9167542 : && TYPE_TRANSPARENT_AGGR (data->arg.type))
2476 1536 : data->arg.type = TREE_TYPE (first_field (data->arg.type));
2477 :
2478 : /* See if this arg was passed by invisible reference. */
2479 9167542 : if (apply_pass_by_reference_rules (&all->args_so_far_v, data->arg))
2480 : {
2481 9868 : data->nominal_type = data->arg.type;
2482 9868 : data->passed_mode = data->nominal_mode = data->arg.mode;
2483 : }
2484 :
2485 : /* Find mode as it is passed by the ABI. */
2486 9167542 : unsignedp = TYPE_UNSIGNED (data->arg.type);
2487 9167542 : data->arg.mode
2488 9167542 : = promote_function_mode (data->arg.type, data->arg.mode, &unsignedp,
2489 9167542 : TREE_TYPE (current_function_decl), 0);
2490 : }
2491 :
2492 : /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2493 :
2494 : static void
2495 21520 : assign_parms_setup_varargs (struct assign_parm_data_all *all,
2496 : struct assign_parm_data_one *data, bool no_rtl)
2497 : {
2498 21520 : int varargs_pretend_bytes = 0;
2499 :
2500 21520 : function_arg_info last_named_arg = data->arg;
2501 21520 : last_named_arg.named = true;
2502 21520 : targetm.calls.setup_incoming_varargs (all->args_so_far, last_named_arg,
2503 : &varargs_pretend_bytes, no_rtl);
2504 :
2505 : /* If the back-end has requested extra stack space, record how much is
2506 : needed. Do not change pretend_args_size otherwise since it may be
2507 : nonzero from an earlier partial argument. */
2508 21520 : if (varargs_pretend_bytes > 0)
2509 0 : all->pretend_args_size = varargs_pretend_bytes;
2510 21520 : }
2511 :
2512 : /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2513 : the incoming location of the current parameter. */
2514 :
2515 : static void
2516 3180986 : assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2517 : struct assign_parm_data_one *data)
2518 : {
2519 3180986 : HOST_WIDE_INT pretend_bytes = 0;
2520 3180986 : rtx entry_parm;
2521 3180986 : bool in_regs;
2522 :
2523 3180986 : if (data->arg.mode == VOIDmode)
2524 : {
2525 0 : data->entry_parm = data->stack_parm = const0_rtx;
2526 0 : return;
2527 : }
2528 :
2529 3180986 : targetm.calls.warn_parameter_passing_abi (all->args_so_far,
2530 : data->arg.type);
2531 :
2532 6361972 : entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2533 3180986 : data->arg);
2534 3180986 : if (entry_parm == 0)
2535 1070091 : data->arg.mode = data->passed_mode;
2536 :
2537 : /* Determine parm's home in the stack, in case it arrives in the stack
2538 : or we should pretend it did. Compute the stack position and rtx where
2539 : the argument arrives and its size.
2540 :
2541 : There is one complexity here: If this was a parameter that would
2542 : have been passed in registers, but wasn't only because it is
2543 : __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2544 : it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2545 : In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2546 : as it was the previous time. */
2547 1070091 : in_regs = (entry_parm != 0);
2548 : #ifdef STACK_PARMS_IN_REG_PARM_AREA
2549 : in_regs = true;
2550 : #endif
2551 1070091 : if (!in_regs && !data->arg.named)
2552 : {
2553 0 : if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
2554 : {
2555 0 : rtx tem;
2556 0 : function_arg_info named_arg = data->arg;
2557 0 : named_arg.named = true;
2558 0 : tem = targetm.calls.function_incoming_arg (all->args_so_far,
2559 : named_arg);
2560 0 : in_regs = tem != NULL;
2561 : }
2562 : }
2563 :
2564 : /* If this parameter was passed both in registers and in the stack, use
2565 : the copy on the stack. */
2566 3180986 : if (targetm.calls.must_pass_in_stack (data->arg))
2567 : entry_parm = 0;
2568 :
2569 3180986 : if (entry_parm)
2570 : {
2571 2110895 : int partial;
2572 :
2573 2110895 : partial = targetm.calls.arg_partial_bytes (all->args_so_far, data->arg);
2574 2110895 : data->partial = partial;
2575 :
2576 : /* The caller might already have allocated stack space for the
2577 : register parameters. */
2578 2110895 : if (partial != 0 && all->reg_parm_stack_space == 0)
2579 : {
2580 : /* Part of this argument is passed in registers and part
2581 : is passed on the stack. Ask the prologue code to extend
2582 : the stack part so that we can recreate the full value.
2583 :
2584 : PRETEND_BYTES is the size of the registers we need to store.
2585 : CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2586 : stack space that the prologue should allocate.
2587 :
2588 : Internally, gcc assumes that the argument pointer is aligned
2589 : to STACK_BOUNDARY bits. This is used both for alignment
2590 : optimizations (see init_emit) and to locate arguments that are
2591 : aligned to more than PARM_BOUNDARY bits. We must preserve this
2592 : invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2593 : a stack boundary. */
2594 :
2595 : /* We assume at most one partial arg, and it must be the first
2596 : argument on the stack. */
2597 0 : gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2598 :
2599 0 : pretend_bytes = partial;
2600 0 : all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2601 :
2602 : /* We want to align relative to the actual stack pointer, so
2603 : don't include this in the stack size until later. */
2604 0 : all->extra_pretend_bytes = all->pretend_args_size;
2605 : }
2606 : }
2607 :
2608 3180986 : locate_and_pad_parm (data->arg.mode, data->arg.type, in_regs,
2609 : all->reg_parm_stack_space,
2610 : entry_parm ? data->partial : 0, current_function_decl,
2611 : &all->stack_args_size, &data->locate);
2612 :
2613 : /* Update parm_stack_boundary if this parameter is passed in the
2614 : stack. */
2615 3180986 : if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2616 197760 : crtl->parm_stack_boundary = data->locate.boundary;
2617 :
2618 : /* Adjust offsets to include the pretend args. */
2619 3180986 : pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2620 3180986 : data->locate.slot_offset.constant += pretend_bytes;
2621 3180986 : data->locate.offset.constant += pretend_bytes;
2622 :
2623 3180986 : data->entry_parm = entry_parm;
2624 : }
2625 :
2626 : /* A subroutine of assign_parms. If there is actually space on the stack
2627 : for this parm, count it in stack_args_size and return true. */
2628 :
2629 : static bool
2630 3180986 : assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2631 : struct assign_parm_data_one *data)
2632 : {
2633 : /* Trivially true if we've no incoming register. */
2634 3180986 : if (data->entry_parm == NULL)
2635 : ;
2636 : /* Also true if we're partially in registers and partially not,
2637 : since we've arranged to drop the entire argument on the stack. */
2638 2110895 : else if (data->partial != 0)
2639 : ;
2640 : /* Also true if the target says that it's passed in both registers
2641 : and on the stack. */
2642 2110895 : else if (GET_CODE (data->entry_parm) == PARALLEL
2643 53455 : && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2644 : ;
2645 : /* Also true if the target says that there's stack allocated for
2646 : all register parameters. */
2647 2110895 : else if (all->reg_parm_stack_space > 0)
2648 : ;
2649 : /* Otherwise, no, this parameter has no ABI defined stack slot. */
2650 : else
2651 : return false;
2652 :
2653 1180284 : all->stack_args_size.constant += data->locate.size.constant;
2654 1180284 : if (data->locate.size.var)
2655 0 : ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2656 :
2657 : return true;
2658 : }
2659 :
2660 : /* A subroutine of assign_parms. Given that this parameter is allocated
2661 : stack space by the ABI, find it. */
2662 :
2663 : static void
2664 1180284 : assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2665 : {
2666 1180284 : rtx offset_rtx, stack_parm;
2667 1180284 : unsigned int align, boundary;
2668 :
2669 : /* If we're passing this arg using a reg, make its stack home the
2670 : aligned stack slot. */
2671 1180284 : if (data->entry_parm)
2672 110193 : offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2673 : else
2674 1312919 : offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2675 :
2676 1180284 : stack_parm = crtl->args.internal_arg_pointer;
2677 1180284 : if (offset_rtx != const0_rtx)
2678 1092068 : stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2679 1180284 : stack_parm = gen_rtx_MEM (data->arg.mode, stack_parm);
2680 :
2681 1180284 : if (!data->arg.pass_by_reference)
2682 : {
2683 1175390 : set_mem_attributes (stack_parm, parm, 1);
2684 : /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2685 : while promoted mode's size is needed. */
2686 1175390 : if (data->arg.mode != BLKmode
2687 1175390 : && data->arg.mode != DECL_MODE (parm))
2688 : {
2689 0 : set_mem_size (stack_parm, GET_MODE_SIZE (data->arg.mode));
2690 0 : if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2691 : {
2692 0 : poly_int64 offset = subreg_lowpart_offset (DECL_MODE (parm),
2693 0 : data->arg.mode);
2694 0 : if (maybe_ne (offset, 0))
2695 0 : set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2696 : }
2697 : }
2698 : }
2699 :
2700 1180284 : boundary = data->locate.boundary;
2701 1180284 : align = BITS_PER_UNIT;
2702 :
2703 : /* If we're padding upward, we know that the alignment of the slot
2704 : is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2705 : intentionally forcing upward padding. Otherwise we have to come
2706 : up with a guess at the alignment based on OFFSET_RTX. */
2707 1180284 : poly_int64 offset;
2708 1180284 : if (data->locate.where_pad == PAD_NONE || data->entry_parm)
2709 : align = boundary;
2710 1070091 : else if (data->locate.where_pad == PAD_UPWARD)
2711 : {
2712 1070091 : align = boundary;
2713 : /* If the argument offset is actually more aligned than the nominal
2714 : stack slot boundary, take advantage of that excess alignment.
2715 : Don't make any assumptions if STACK_POINTER_OFFSET is in use. */
2716 1070091 : if (poly_int_rtx_p (offset_rtx, &offset)
2717 : && known_eq (STACK_POINTER_OFFSET, 0))
2718 : {
2719 1070091 : unsigned int offset_align = known_alignment (offset) * BITS_PER_UNIT;
2720 1198597 : if (offset_align == 0 || offset_align > STACK_BOUNDARY)
2721 589738 : offset_align = STACK_BOUNDARY;
2722 1070091 : align = MAX (align, offset_align);
2723 : }
2724 : }
2725 0 : else if (poly_int_rtx_p (offset_rtx, &offset))
2726 : {
2727 0 : align = least_bit_hwi (boundary);
2728 0 : unsigned int offset_align = known_alignment (offset) * BITS_PER_UNIT;
2729 0 : if (offset_align != 0)
2730 0 : align = MIN (align, offset_align);
2731 : }
2732 1180284 : set_mem_align (stack_parm, align);
2733 :
2734 1180284 : if (data->entry_parm)
2735 110193 : set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2736 :
2737 1180284 : data->stack_parm = stack_parm;
2738 1180284 : }
2739 :
2740 : /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2741 : always valid and contiguous. */
2742 :
2743 : static void
2744 1180284 : assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2745 : {
2746 1180284 : rtx entry_parm = data->entry_parm;
2747 1180284 : rtx stack_parm = data->stack_parm;
2748 :
2749 : /* If this parm was passed part in regs and part in memory, pretend it
2750 : arrived entirely in memory by pushing the register-part onto the stack.
2751 : In the special case of a DImode or DFmode that is split, we could put
2752 : it together in a pseudoreg directly, but for now that's not worth
2753 : bothering with. */
2754 1180284 : if (data->partial != 0)
2755 : {
2756 : /* Handle calls that pass values in multiple non-contiguous
2757 : locations. The Irix 6 ABI has examples of this. */
2758 0 : if (GET_CODE (entry_parm) == PARALLEL)
2759 0 : emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
2760 0 : data->arg.type, int_size_in_bytes (data->arg.type));
2761 : else
2762 : {
2763 0 : gcc_assert (data->partial % UNITS_PER_WORD == 0);
2764 0 : move_block_from_reg (REGNO (entry_parm),
2765 : validize_mem (copy_rtx (stack_parm)),
2766 : data->partial / UNITS_PER_WORD);
2767 : }
2768 :
2769 : entry_parm = stack_parm;
2770 : }
2771 :
2772 : /* If we didn't decide this parm came in a register, by default it came
2773 : on the stack. */
2774 1180284 : else if (entry_parm == NULL)
2775 : entry_parm = stack_parm;
2776 :
2777 : /* When an argument is passed in multiple locations, we can't make use
2778 : of this information, but we can save some copying if the whole argument
2779 : is passed in a single register. */
2780 110193 : else if (GET_CODE (entry_parm) == PARALLEL
2781 0 : && data->nominal_mode != BLKmode
2782 0 : && data->passed_mode != BLKmode)
2783 : {
2784 0 : size_t i, len = XVECLEN (entry_parm, 0);
2785 :
2786 0 : for (i = 0; i < len; i++)
2787 0 : if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2788 0 : && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2789 0 : && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2790 0 : == data->passed_mode)
2791 0 : && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2792 : {
2793 : entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2794 : break;
2795 : }
2796 : }
2797 :
2798 1180284 : data->entry_parm = entry_parm;
2799 1180284 : }
2800 :
2801 : /* A subroutine of assign_parms. Reconstitute any values which were
2802 : passed in multiple registers and would fit in a single register. */
2803 :
2804 : static void
2805 3107173 : assign_parm_remove_parallels (struct assign_parm_data_one *data)
2806 : {
2807 3107173 : rtx entry_parm = data->entry_parm;
2808 :
2809 : /* Convert the PARALLEL to a REG of the same mode as the parallel.
2810 : This can be done with register operations rather than on the
2811 : stack, even if we will store the reconstituted parameter on the
2812 : stack later. */
2813 3107173 : if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2814 : {
2815 49641 : rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2816 49641 : emit_group_store (parmreg, entry_parm, data->arg.type,
2817 99282 : GET_MODE_SIZE (GET_MODE (entry_parm)));
2818 49641 : entry_parm = parmreg;
2819 : }
2820 :
2821 3107173 : data->entry_parm = entry_parm;
2822 3107173 : }
2823 :
2824 : /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2825 : always valid and properly aligned. */
2826 :
2827 : static void
2828 3180986 : assign_parm_adjust_stack_rtl (tree parm, struct assign_parm_data_one *data)
2829 : {
2830 3180986 : rtx stack_parm = data->stack_parm;
2831 :
2832 : /* If we can't trust the parm stack slot to be aligned enough for its
2833 : ultimate type, don't use that slot after entry. We'll make another
2834 : stack slot, if we need one. */
2835 3180986 : if (stack_parm
2836 3180986 : && ((GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm)
2837 24913 : && ((optab_handler (movmisalign_optab, data->nominal_mode)
2838 : != CODE_FOR_nothing)
2839 49818 : || targetm.slow_unaligned_access (data->nominal_mode,
2840 24909 : MEM_ALIGN (stack_parm))))
2841 1178871 : || (data->nominal_type
2842 2357742 : && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2843 4503 : && ((MEM_ALIGN (stack_parm)
2844 4503 : < MIN (BIGGEST_ALIGNMENT, MAX_SUPPORTED_STACK_ALIGNMENT))
2845 : /* If its address is taken, make a local copy whose
2846 : maximum alignment is MAX_SUPPORTED_STACK_ALIGNMENT.
2847 : */
2848 4 : || (TREE_ADDRESSABLE (parm)
2849 : && (MEM_ALIGN (stack_parm)
2850 : < MAX_SUPPORTED_STACK_ALIGNMENT))))))
2851 : stack_parm = NULL;
2852 :
2853 : /* If parm was passed in memory, and we need to convert it on entry,
2854 : don't store it back in that same slot. */
2855 3176482 : else if (data->entry_parm == stack_parm
2856 1064178 : && data->nominal_mode != BLKmode
2857 995815 : && data->nominal_mode != data->passed_mode)
2858 : stack_parm = NULL;
2859 :
2860 : /* If stack protection is in effect for this function, don't leave any
2861 : pointers in their passed stack slots. */
2862 3176482 : else if (crtl->stack_protect_guard
2863 192 : && (flag_stack_protect == SPCT_FLAG_ALL
2864 155 : || data->arg.pass_by_reference
2865 155 : || POINTER_TYPE_P (data->nominal_type)))
2866 4640 : stack_parm = NULL;
2867 :
2868 3180986 : data->stack_parm = stack_parm;
2869 3180986 : }
2870 :
2871 : /* A subroutine of assign_parms. Return true if the current parameter
2872 : should be stored as a BLKmode in the current frame. */
2873 :
2874 : static bool
2875 3180986 : assign_parm_setup_block_p (struct assign_parm_data_one *data)
2876 : {
2877 0 : if (data->nominal_mode == BLKmode)
2878 : return true;
2879 3107173 : if (GET_MODE (data->entry_parm) == BLKmode)
2880 0 : return true;
2881 :
2882 : #ifdef BLOCK_REG_PADDING
2883 : /* Only assign_parm_setup_block knows how to deal with register arguments
2884 : that are padded at the least significant end. */
2885 : if (REG_P (data->entry_parm)
2886 : && known_lt (GET_MODE_SIZE (data->arg.mode), UNITS_PER_WORD)
2887 : && (BLOCK_REG_PADDING (data->passed_mode, data->arg.type, 1)
2888 : == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
2889 : return true;
2890 : #endif
2891 :
2892 : return false;
2893 : }
2894 :
2895 : /* A subroutine of assign_parms. Arrange for the parameter to be
2896 : present and valid in DATA->STACK_RTL. */
2897 :
2898 : static void
2899 73813 : assign_parm_setup_block (struct assign_parm_data_all *all,
2900 : tree parm, struct assign_parm_data_one *data)
2901 : {
2902 73813 : rtx entry_parm = data->entry_parm;
2903 73813 : rtx stack_parm = data->stack_parm;
2904 73813 : rtx target_reg = NULL_RTX;
2905 73813 : bool in_conversion_seq = false;
2906 73813 : HOST_WIDE_INT size;
2907 73813 : HOST_WIDE_INT size_stored;
2908 :
2909 73813 : if (GET_CODE (entry_parm) == PARALLEL)
2910 3814 : entry_parm = emit_group_move_into_temps (entry_parm);
2911 :
2912 : /* If we want the parameter in a pseudo, don't use a stack slot. */
2913 73813 : if (is_gimple_reg (parm) && use_register_for_decl (parm))
2914 : {
2915 0 : tree def = ssa_default_def (cfun, parm);
2916 0 : gcc_assert (def);
2917 0 : machine_mode mode = promote_ssa_mode (def, NULL);
2918 0 : rtx reg = gen_reg_rtx (mode);
2919 0 : if (GET_CODE (reg) != CONCAT)
2920 : stack_parm = reg;
2921 : else
2922 : {
2923 0 : target_reg = reg;
2924 : /* Avoid allocating a stack slot, if there isn't one
2925 : preallocated by the ABI. It might seem like we should
2926 : always prefer a pseudo, but converting between
2927 : floating-point and integer modes goes through the stack
2928 : on various machines, so it's better to use the reserved
2929 : stack slot than to risk wasting it and allocating more
2930 : for the conversion. */
2931 0 : if (stack_parm == NULL_RTX)
2932 : {
2933 0 : int save = generating_concat_p;
2934 0 : generating_concat_p = 0;
2935 0 : stack_parm = gen_reg_rtx (mode);
2936 0 : generating_concat_p = save;
2937 : }
2938 : }
2939 0 : data->stack_parm = NULL;
2940 : }
2941 :
2942 73813 : size = int_size_in_bytes (data->arg.type);
2943 86439 : size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2944 73813 : if (stack_parm == 0)
2945 : {
2946 5453 : HOST_WIDE_INT parm_align
2947 : = ((STRICT_ALIGNMENT || BITS_PER_WORD <= MAX_SUPPORTED_STACK_ALIGNMENT)
2948 5665 : ? MAX (DECL_ALIGN (parm), BITS_PER_WORD) : DECL_ALIGN (parm));
2949 :
2950 5453 : SET_DECL_ALIGN (parm, parm_align);
2951 5453 : if (DECL_ALIGN (parm) > MAX_SUPPORTED_STACK_ALIGNMENT)
2952 : {
2953 : rtx allocsize = gen_int_mode (size_stored, Pmode);
2954 : get_dynamic_stack_size (&allocsize, 0, DECL_ALIGN (parm), NULL);
2955 : stack_parm = assign_stack_local (BLKmode, UINTVAL (allocsize),
2956 : MAX_SUPPORTED_STACK_ALIGNMENT);
2957 : rtx addr = align_dynamic_address (XEXP (stack_parm, 0),
2958 : DECL_ALIGN (parm));
2959 : mark_reg_pointer (addr, DECL_ALIGN (parm));
2960 : stack_parm = gen_rtx_MEM (GET_MODE (stack_parm), addr);
2961 : MEM_NOTRAP_P (stack_parm) = 1;
2962 : }
2963 : else
2964 5453 : stack_parm = assign_stack_local (BLKmode, size_stored,
2965 5453 : DECL_ALIGN (parm));
2966 10906 : if (known_eq (GET_MODE_SIZE (GET_MODE (entry_parm)), size))
2967 77 : PUT_MODE (stack_parm, GET_MODE (entry_parm));
2968 5453 : set_mem_attributes (stack_parm, parm, 1);
2969 : }
2970 :
2971 : /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2972 : calls that pass values in multiple non-contiguous locations. */
2973 73813 : if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2974 : {
2975 5150 : rtx mem;
2976 :
2977 : /* Note that we will be storing an integral number of words.
2978 : So we have to be careful to ensure that we allocate an
2979 : integral number of words. We do this above when we call
2980 : assign_stack_local if space was not allocated in the argument
2981 : list. If it was, this will not work if PARM_BOUNDARY is not
2982 : a multiple of BITS_PER_WORD. It isn't clear how to fix this
2983 : if it becomes a problem. Exception is when BLKmode arrives
2984 : with arguments not conforming to word_mode. */
2985 :
2986 5150 : if (data->stack_parm == 0)
2987 : ;
2988 5150 : else if (GET_CODE (entry_parm) == PARALLEL)
2989 : ;
2990 : else
2991 5150 : gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2992 :
2993 5150 : mem = validize_mem (copy_rtx (stack_parm));
2994 :
2995 : /* Handle values in multiple non-contiguous locations. */
2996 5150 : if (GET_CODE (entry_parm) == PARALLEL && !MEM_P (mem))
2997 0 : emit_group_store (mem, entry_parm, data->arg.type, size);
2998 5150 : else if (GET_CODE (entry_parm) == PARALLEL)
2999 : {
3000 3814 : push_to_sequence2 (all->first_conversion_insn,
3001 : all->last_conversion_insn);
3002 3814 : emit_group_store (mem, entry_parm, data->arg.type, size);
3003 3814 : all->first_conversion_insn = get_insns ();
3004 3814 : all->last_conversion_insn = get_last_insn ();
3005 3814 : end_sequence ();
3006 3814 : in_conversion_seq = true;
3007 : }
3008 :
3009 1336 : else if (size == 0)
3010 : ;
3011 :
3012 : /* If SIZE is that of a mode no bigger than a word, just use
3013 : that mode's store operation. */
3014 1320 : else if (size <= UNITS_PER_WORD)
3015 : {
3016 1316 : unsigned int bits = size * BITS_PER_UNIT;
3017 1316 : machine_mode mode = int_mode_for_size (bits, 0).else_blk ();
3018 :
3019 1316 : if (mode != BLKmode
3020 : #ifdef BLOCK_REG_PADDING
3021 : && (size == UNITS_PER_WORD
3022 : || (BLOCK_REG_PADDING (mode, data->arg.type, 1)
3023 : != (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
3024 : #endif
3025 : )
3026 : {
3027 102 : rtx reg;
3028 :
3029 : /* We are really truncating a word_mode value containing
3030 : SIZE bytes into a value of mode MODE. If such an
3031 : operation requires no actual instructions, we can refer
3032 : to the value directly in mode MODE, otherwise we must
3033 : start with the register in word_mode and explicitly
3034 : convert it. */
3035 102 : if (mode == word_mode
3036 102 : || TRULY_NOOP_TRUNCATION_MODES_P (mode, word_mode))
3037 102 : reg = gen_rtx_REG (mode, REGNO (entry_parm));
3038 : else
3039 : {
3040 0 : reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3041 0 : reg = convert_to_mode (mode, copy_to_reg (reg), 1);
3042 : }
3043 :
3044 : /* We use adjust_address to get a new MEM with the mode
3045 : changed. adjust_address is better than change_address
3046 : for this purpose because adjust_address does not lose
3047 : the MEM_EXPR associated with the MEM.
3048 :
3049 : If the MEM_EXPR is lost, then optimizations like DSE
3050 : assume the MEM escapes and thus is not subject to DSE. */
3051 102 : emit_move_insn (adjust_address (mem, mode, 0), reg);
3052 : }
3053 :
3054 : #ifdef BLOCK_REG_PADDING
3055 : /* Storing the register in memory as a full word, as
3056 : move_block_from_reg below would do, and then using the
3057 : MEM in a smaller mode, has the effect of shifting right
3058 : if BYTES_BIG_ENDIAN. If we're bypassing memory, the
3059 : shifting must be explicit. */
3060 : else if (!MEM_P (mem))
3061 : {
3062 : rtx x;
3063 :
3064 : /* If the assert below fails, we should have taken the
3065 : mode != BLKmode path above, unless we have downward
3066 : padding of smaller-than-word arguments on a machine
3067 : with little-endian bytes, which would likely require
3068 : additional changes to work correctly. */
3069 : gcc_checking_assert (BYTES_BIG_ENDIAN
3070 : && (BLOCK_REG_PADDING (mode,
3071 : data->arg.type, 1)
3072 : == PAD_UPWARD));
3073 :
3074 : int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3075 :
3076 : x = gen_rtx_REG (word_mode, REGNO (entry_parm));
3077 : x = expand_shift (RSHIFT_EXPR, word_mode, x, by,
3078 : NULL_RTX, 1);
3079 : x = force_reg (word_mode, x);
3080 : x = gen_lowpart_SUBREG (GET_MODE (mem), x);
3081 :
3082 : emit_move_insn (mem, x);
3083 : }
3084 : #endif
3085 :
3086 : /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
3087 : machine must be aligned to the left before storing
3088 : to memory. Note that the previous test doesn't
3089 : handle all cases (e.g. SIZE == 3). */
3090 1214 : else if (size != UNITS_PER_WORD
3091 : #ifdef BLOCK_REG_PADDING
3092 : && (BLOCK_REG_PADDING (mode, data->arg.type, 1)
3093 : == PAD_DOWNWARD)
3094 : #else
3095 : && BYTES_BIG_ENDIAN
3096 : #endif
3097 : )
3098 : {
3099 : rtx tem, x;
3100 : int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3101 : rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3102 :
3103 : x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
3104 : tem = change_address (mem, word_mode, 0);
3105 : emit_move_insn (tem, x);
3106 : }
3107 : else
3108 2428 : move_block_from_reg (REGNO (entry_parm), mem,
3109 1214 : size_stored / UNITS_PER_WORD);
3110 : }
3111 2 : else if (!MEM_P (mem))
3112 : {
3113 0 : gcc_checking_assert (size > UNITS_PER_WORD);
3114 : #ifdef BLOCK_REG_PADDING
3115 : gcc_checking_assert (BLOCK_REG_PADDING (GET_MODE (mem),
3116 : data->arg.type, 0)
3117 : == PAD_UPWARD);
3118 : #endif
3119 0 : emit_move_insn (mem, entry_parm);
3120 : }
3121 : else
3122 2 : move_block_from_reg (REGNO (entry_parm), mem,
3123 2 : size_stored / UNITS_PER_WORD);
3124 : }
3125 68663 : else if (data->stack_parm == 0 && !TYPE_EMPTY_P (data->arg.type))
3126 : {
3127 207 : push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3128 207 : emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
3129 : BLOCK_OP_NORMAL);
3130 207 : all->first_conversion_insn = get_insns ();
3131 207 : all->last_conversion_insn = get_last_insn ();
3132 207 : end_sequence ();
3133 207 : in_conversion_seq = true;
3134 : }
3135 :
3136 73813 : if (target_reg)
3137 : {
3138 0 : if (!in_conversion_seq)
3139 0 : emit_move_insn (target_reg, stack_parm);
3140 : else
3141 : {
3142 0 : push_to_sequence2 (all->first_conversion_insn,
3143 : all->last_conversion_insn);
3144 0 : emit_move_insn (target_reg, stack_parm);
3145 0 : all->first_conversion_insn = get_insns ();
3146 0 : all->last_conversion_insn = get_last_insn ();
3147 0 : end_sequence ();
3148 : }
3149 : stack_parm = target_reg;
3150 : }
3151 :
3152 73813 : data->stack_parm = stack_parm;
3153 73813 : set_parm_rtl (parm, stack_parm);
3154 73813 : }
3155 :
3156 : /* A subroutine of assign_parms. Allocate a pseudo to hold the current
3157 : parameter. Get it there. Perform all ABI specified conversions. */
3158 :
3159 : static void
3160 2289013 : assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
3161 : struct assign_parm_data_one *data)
3162 : {
3163 2289013 : rtx parmreg, validated_mem;
3164 2289013 : rtx equiv_stack_parm;
3165 2289013 : machine_mode promoted_nominal_mode;
3166 2289013 : int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
3167 2289013 : bool did_conversion = false;
3168 2289013 : bool need_conversion, moved;
3169 2289013 : enum insn_code icode;
3170 2289013 : rtx rtl;
3171 :
3172 : /* Store the parm in a pseudoregister during the function, but we may
3173 : need to do it in a wider mode. Using 2 here makes the result
3174 : consistent with promote_decl_mode and thus expand_expr_real_1. */
3175 2289013 : promoted_nominal_mode
3176 4578026 : = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
3177 2289013 : TREE_TYPE (current_function_decl), 2);
3178 :
3179 2289013 : parmreg = gen_reg_rtx (promoted_nominal_mode);
3180 2289013 : if (!DECL_ARTIFICIAL (parm))
3181 2063905 : mark_user_reg (parmreg);
3182 :
3183 : /* If this was an item that we received a pointer to,
3184 : set rtl appropriately. */
3185 2289013 : if (data->arg.pass_by_reference)
3186 : {
3187 4930 : rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->arg.type)), parmreg);
3188 4930 : set_mem_attributes (rtl, parm, 1);
3189 : }
3190 : else
3191 : rtl = parmreg;
3192 :
3193 2289013 : assign_parm_remove_parallels (data);
3194 :
3195 : /* Copy the value into the register, thus bridging between
3196 : assign_parm_find_data_types and expand_expr_real_1. */
3197 :
3198 2289013 : equiv_stack_parm = data->stack_parm;
3199 2289013 : validated_mem = validize_mem (copy_rtx (data->entry_parm));
3200 :
3201 2289013 : need_conversion = (data->nominal_mode != data->passed_mode
3202 2289013 : || promoted_nominal_mode != data->arg.mode);
3203 4171 : moved = false;
3204 :
3205 : if (need_conversion
3206 4171 : && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
3207 3660 : && data->nominal_mode == data->passed_mode
3208 0 : && data->nominal_mode == GET_MODE (data->entry_parm))
3209 : {
3210 : /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3211 : mode, by the caller. We now have to convert it to
3212 : NOMINAL_MODE, if different. However, PARMREG may be in
3213 : a different mode than NOMINAL_MODE if it is being stored
3214 : promoted.
3215 :
3216 : If ENTRY_PARM is a hard register, it might be in a register
3217 : not valid for operating in its mode (e.g., an odd-numbered
3218 : register for a DFmode). In that case, moves are the only
3219 : thing valid, so we can't do a convert from there. This
3220 : occurs when the calling sequence allow such misaligned
3221 : usages.
3222 :
3223 : In addition, the conversion may involve a call, which could
3224 : clobber parameters which haven't been copied to pseudo
3225 : registers yet.
3226 :
3227 : First, we try to emit an insn which performs the necessary
3228 : conversion. We verify that this insn does not clobber any
3229 : hard registers. */
3230 :
3231 0 : rtx op0, op1;
3232 :
3233 0 : icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3234 : unsignedp);
3235 :
3236 0 : op0 = parmreg;
3237 0 : op1 = validated_mem;
3238 0 : if (icode != CODE_FOR_nothing
3239 0 : && insn_operand_matches (icode, 0, op0)
3240 0 : && insn_operand_matches (icode, 1, op1))
3241 : {
3242 0 : enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
3243 0 : rtx_insn *insn, *insns;
3244 0 : rtx t = op1;
3245 0 : HARD_REG_SET hardregs;
3246 :
3247 0 : start_sequence ();
3248 : /* If op1 is a hard register that is likely spilled, first
3249 : force it into a pseudo, otherwise combiner might extend
3250 : its lifetime too much. */
3251 0 : if (GET_CODE (t) == SUBREG)
3252 0 : t = SUBREG_REG (t);
3253 0 : if (REG_P (t)
3254 0 : && HARD_REGISTER_P (t)
3255 0 : && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3256 0 : && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3257 : {
3258 0 : t = gen_reg_rtx (GET_MODE (op1));
3259 0 : emit_move_insn (t, op1);
3260 : }
3261 : else
3262 : t = op1;
3263 0 : rtx_insn *pat = gen_extend_insn (op0, t, promoted_nominal_mode,
3264 : data->passed_mode, unsignedp);
3265 0 : emit_insn (pat);
3266 0 : insns = get_insns ();
3267 :
3268 0 : moved = true;
3269 0 : CLEAR_HARD_REG_SET (hardregs);
3270 0 : for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3271 : {
3272 0 : if (INSN_P (insn))
3273 0 : note_stores (insn, record_hard_reg_sets, &hardregs);
3274 0 : if (!hard_reg_set_empty_p (hardregs))
3275 0 : moved = false;
3276 : }
3277 :
3278 0 : end_sequence ();
3279 :
3280 0 : if (moved)
3281 : {
3282 0 : emit_insn (insns);
3283 0 : if (equiv_stack_parm != NULL_RTX)
3284 0 : equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3285 : equiv_stack_parm);
3286 : }
3287 : }
3288 : }
3289 :
3290 0 : if (moved)
3291 : /* Nothing to do. */
3292 : ;
3293 2289013 : else if (need_conversion)
3294 : {
3295 : /* We did not have an insn to convert directly, or the sequence
3296 : generated appeared unsafe. We must first copy the parm to a
3297 : pseudo reg, and save the conversion until after all
3298 : parameters have been moved. */
3299 :
3300 4171 : int save_tree_used;
3301 4171 : rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3302 :
3303 4171 : emit_move_insn (tempreg, validated_mem);
3304 :
3305 4171 : push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3306 4171 : tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3307 :
3308 4171 : if (partial_subreg_p (tempreg)
3309 3660 : && GET_MODE (tempreg) == data->nominal_mode
3310 3660 : && REG_P (SUBREG_REG (tempreg))
3311 3660 : && data->nominal_mode == data->passed_mode
3312 3660 : && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm))
3313 : {
3314 : /* The argument is already sign/zero extended, so note it
3315 : into the subreg. */
3316 0 : SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3317 0 : SUBREG_PROMOTED_SET (tempreg, unsignedp);
3318 : }
3319 :
3320 : /* TREE_USED gets set erroneously during expand_assignment. */
3321 4171 : save_tree_used = TREE_USED (parm);
3322 4171 : SET_DECL_RTL (parm, rtl);
3323 4171 : expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3324 4171 : SET_DECL_RTL (parm, NULL_RTX);
3325 4171 : TREE_USED (parm) = save_tree_used;
3326 4171 : all->first_conversion_insn = get_insns ();
3327 4171 : all->last_conversion_insn = get_last_insn ();
3328 4171 : end_sequence ();
3329 :
3330 4171 : did_conversion = true;
3331 : }
3332 2284842 : else if (MEM_P (data->entry_parm)
3333 822963 : && GET_MODE_ALIGNMENT (promoted_nominal_mode)
3334 823021 : > MEM_ALIGN (data->entry_parm)
3335 2308743 : && (((icode = optab_handler (movmisalign_optab,
3336 : promoted_nominal_mode))
3337 : != CODE_FOR_nothing)
3338 23897 : || targetm.slow_unaligned_access (promoted_nominal_mode,
3339 23955 : MEM_ALIGN (data->entry_parm))))
3340 : {
3341 4 : if (icode != CODE_FOR_nothing)
3342 4 : emit_insn (GEN_FCN (icode) (parmreg, validated_mem));
3343 : else
3344 0 : rtl = parmreg = extract_bit_field (validated_mem,
3345 0 : GET_MODE_BITSIZE (promoted_nominal_mode), 0,
3346 : unsignedp, parmreg,
3347 : promoted_nominal_mode, VOIDmode, false, NULL);
3348 : }
3349 : else
3350 2284838 : emit_move_insn (parmreg, validated_mem);
3351 :
3352 : /* If we were passed a pointer but the actual value can live in a register,
3353 : retrieve it and use it directly. Note that we cannot use nominal_mode,
3354 : because it will have been set to Pmode above, we must use the actual mode
3355 : of the parameter instead. */
3356 2289013 : if (data->arg.pass_by_reference && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
3357 : {
3358 : /* Use a stack slot for debugging purposes if possible. */
3359 695 : if (use_register_for_decl (parm))
3360 : {
3361 365 : parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3362 365 : mark_user_reg (parmreg);
3363 : }
3364 : else
3365 : {
3366 330 : int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3367 : TYPE_MODE (TREE_TYPE (parm)),
3368 : TYPE_ALIGN (TREE_TYPE (parm)));
3369 330 : parmreg
3370 330 : = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
3371 660 : GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
3372 : align);
3373 330 : set_mem_attributes (parmreg, parm, 1);
3374 : }
3375 :
3376 : /* We need to preserve an address based on VIRTUAL_STACK_VARS_REGNUM for
3377 : the debug info in case it is not legitimate. */
3378 695 : if (GET_MODE (parmreg) != GET_MODE (rtl))
3379 : {
3380 0 : rtx tempreg = gen_reg_rtx (GET_MODE (rtl));
3381 0 : int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3382 :
3383 0 : push_to_sequence2 (all->first_conversion_insn,
3384 : all->last_conversion_insn);
3385 0 : emit_move_insn (tempreg, rtl);
3386 0 : tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3387 0 : emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg,
3388 : tempreg);
3389 0 : all->first_conversion_insn = get_insns ();
3390 0 : all->last_conversion_insn = get_last_insn ();
3391 0 : end_sequence ();
3392 :
3393 0 : did_conversion = true;
3394 : }
3395 : else
3396 695 : emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg, rtl);
3397 :
3398 695 : rtl = parmreg;
3399 :
3400 : /* STACK_PARM is the pointer, not the parm, and PARMREG is
3401 : now the parm. */
3402 695 : data->stack_parm = NULL;
3403 : }
3404 :
3405 2289013 : set_parm_rtl (parm, rtl);
3406 :
3407 : /* Mark the register as eliminable if we did no conversion and it was
3408 : copied from memory at a fixed offset, and the arg pointer was not
3409 : copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3410 : offset formed an invalid address, such memory-equivalences as we
3411 : make here would screw up life analysis for it. */
3412 2289013 : if (data->nominal_mode == data->passed_mode
3413 2284842 : && !did_conversion
3414 2284842 : && data->stack_parm != 0
3415 878477 : && MEM_P (data->stack_parm)
3416 878477 : && data->locate.offset.var == 0
3417 3167490 : && reg_mentioned_p (virtual_incoming_args_rtx,
3418 878477 : XEXP (data->stack_parm, 0)))
3419 : {
3420 878477 : rtx_insn *linsn = get_last_insn ();
3421 878477 : rtx_insn *sinsn;
3422 878477 : rtx set;
3423 :
3424 : /* Mark complex types separately. */
3425 878477 : if (GET_CODE (parmreg) == CONCAT)
3426 : {
3427 1166 : scalar_mode submode = GET_MODE_INNER (GET_MODE (parmreg));
3428 1166 : int regnor = REGNO (XEXP (parmreg, 0));
3429 1166 : int regnoi = REGNO (XEXP (parmreg, 1));
3430 1166 : rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3431 2332 : rtx stacki = adjust_address_nv (data->stack_parm, submode,
3432 : GET_MODE_SIZE (submode));
3433 :
3434 : /* Scan backwards for the set of the real and
3435 : imaginary parts. */
3436 6072 : for (sinsn = linsn; sinsn != 0;
3437 4906 : sinsn = prev_nonnote_insn (sinsn))
3438 : {
3439 4906 : set = single_set (sinsn);
3440 4906 : if (set == 0)
3441 0 : continue;
3442 :
3443 4906 : if (SET_DEST (set) == regno_reg_rtx [regnoi])
3444 1166 : set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3445 3740 : else if (SET_DEST (set) == regno_reg_rtx [regnor])
3446 1166 : set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3447 : }
3448 : }
3449 : else
3450 877311 : set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
3451 : }
3452 :
3453 : /* For pointer data type, suggest pointer register. */
3454 2289013 : if (POINTER_TYPE_P (TREE_TYPE (parm)))
3455 938049 : mark_reg_pointer (parmreg,
3456 938049 : TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3457 2289013 : }
3458 :
3459 : /* A subroutine of assign_parms. Allocate stack space to hold the current
3460 : parameter. Get it there. Perform all ABI specified conversions. */
3461 :
3462 : static void
3463 818160 : assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3464 : struct assign_parm_data_one *data)
3465 : {
3466 : /* Value must be stored in the stack slot STACK_PARM during function
3467 : execution. */
3468 818160 : bool to_conversion = false;
3469 :
3470 818160 : assign_parm_remove_parallels (data);
3471 :
3472 818160 : if (data->arg.mode != data->nominal_mode)
3473 : {
3474 : /* Conversion is required. */
3475 835 : rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3476 :
3477 835 : emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
3478 :
3479 : /* Some ABIs require scalar floating point modes to be passed
3480 : in a wider scalar integer mode. We need to explicitly
3481 : truncate to an integer mode of the correct precision before
3482 : using a SUBREG to reinterpret as a floating point value. */
3483 835 : if (SCALAR_FLOAT_MODE_P (data->nominal_mode)
3484 101 : && SCALAR_INT_MODE_P (data->arg.mode)
3485 835 : && known_lt (GET_MODE_SIZE (data->nominal_mode),
3486 : GET_MODE_SIZE (data->arg.mode)))
3487 0 : tempreg = convert_wider_int_to_float (data->nominal_mode,
3488 : data->arg.mode, tempreg);
3489 :
3490 835 : push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3491 835 : to_conversion = true;
3492 :
3493 2505 : data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3494 835 : TYPE_UNSIGNED (TREE_TYPE (parm)));
3495 :
3496 835 : if (data->stack_parm)
3497 : {
3498 0 : poly_int64 offset
3499 0 : = subreg_lowpart_offset (data->nominal_mode,
3500 0 : GET_MODE (data->stack_parm));
3501 : /* ??? This may need a big-endian conversion on sparc64. */
3502 0 : data->stack_parm
3503 0 : = adjust_address (data->stack_parm, data->nominal_mode, 0);
3504 0 : if (maybe_ne (offset, 0) && MEM_OFFSET_KNOWN_P (data->stack_parm))
3505 0 : set_mem_offset (data->stack_parm,
3506 0 : MEM_OFFSET (data->stack_parm) + offset);
3507 : }
3508 : }
3509 :
3510 818160 : if (data->entry_parm != data->stack_parm)
3511 : {
3512 641092 : rtx src, dest;
3513 :
3514 641092 : if (data->stack_parm == 0)
3515 : {
3516 591334 : int align = STACK_SLOT_ALIGNMENT (data->arg.type,
3517 : GET_MODE (data->entry_parm),
3518 : TYPE_ALIGN (data->arg.type));
3519 591334 : if (align < (int)GET_MODE_ALIGNMENT (GET_MODE (data->entry_parm))
3520 593889 : && ((optab_handler (movmisalign_optab,
3521 2555 : GET_MODE (data->entry_parm))
3522 : != CODE_FOR_nothing)
3523 2555 : || targetm.slow_unaligned_access (GET_MODE (data->entry_parm),
3524 : align)))
3525 0 : align = GET_MODE_ALIGNMENT (GET_MODE (data->entry_parm));
3526 591334 : data->stack_parm
3527 591334 : = assign_stack_local (GET_MODE (data->entry_parm),
3528 1182668 : GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3529 : align);
3530 591334 : align = MEM_ALIGN (data->stack_parm);
3531 591334 : set_mem_attributes (data->stack_parm, parm, 1);
3532 591334 : set_mem_align (data->stack_parm, align);
3533 : }
3534 :
3535 641092 : dest = validize_mem (copy_rtx (data->stack_parm));
3536 641092 : src = validize_mem (copy_rtx (data->entry_parm));
3537 :
3538 641092 : if (TYPE_EMPTY_P (data->arg.type))
3539 : /* Empty types don't really need to be copied. */;
3540 639676 : else if (MEM_P (src))
3541 : {
3542 : /* Use a block move to handle potentially misaligned entry_parm. */
3543 61 : if (!to_conversion)
3544 61 : push_to_sequence2 (all->first_conversion_insn,
3545 : all->last_conversion_insn);
3546 61 : to_conversion = true;
3547 :
3548 61 : emit_block_move (dest, src,
3549 61 : GEN_INT (int_size_in_bytes (data->arg.type)),
3550 : BLOCK_OP_NORMAL);
3551 : }
3552 : else
3553 : {
3554 639615 : if (!REG_P (src))
3555 1531 : src = force_reg (GET_MODE (src), src);
3556 639615 : emit_move_insn (dest, src);
3557 : }
3558 : }
3559 :
3560 818160 : if (to_conversion)
3561 : {
3562 896 : all->first_conversion_insn = get_insns ();
3563 896 : all->last_conversion_insn = get_last_insn ();
3564 896 : end_sequence ();
3565 : }
3566 :
3567 818160 : set_parm_rtl (parm, data->stack_parm);
3568 818160 : }
3569 :
3570 : /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3571 : undo the frobbing that we did in assign_parms_augmented_arg_list. */
3572 :
3573 : static void
3574 0 : assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3575 : vec<tree> fnargs)
3576 : {
3577 0 : tree parm;
3578 0 : tree orig_fnargs = all->orig_fnargs;
3579 0 : unsigned i = 0;
3580 :
3581 0 : for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3582 : {
3583 0 : if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3584 0 : && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3585 : {
3586 0 : rtx tmp, real, imag;
3587 0 : scalar_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3588 :
3589 0 : real = DECL_RTL (fnargs[i]);
3590 0 : imag = DECL_RTL (fnargs[i + 1]);
3591 0 : if (inner != GET_MODE (real))
3592 : {
3593 0 : real = gen_lowpart_SUBREG (inner, real);
3594 0 : imag = gen_lowpart_SUBREG (inner, imag);
3595 : }
3596 :
3597 0 : if (TREE_ADDRESSABLE (parm))
3598 : {
3599 0 : rtx rmem, imem;
3600 0 : HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3601 0 : int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3602 : DECL_MODE (parm),
3603 : TYPE_ALIGN (TREE_TYPE (parm)));
3604 :
3605 : /* split_complex_arg put the real and imag parts in
3606 : pseudos. Move them to memory. */
3607 0 : tmp = assign_stack_local (DECL_MODE (parm), size, align);
3608 0 : set_mem_attributes (tmp, parm, 1);
3609 0 : rmem = adjust_address_nv (tmp, inner, 0);
3610 0 : imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3611 0 : push_to_sequence2 (all->first_conversion_insn,
3612 : all->last_conversion_insn);
3613 0 : emit_move_insn (rmem, real);
3614 0 : emit_move_insn (imem, imag);
3615 0 : all->first_conversion_insn = get_insns ();
3616 0 : all->last_conversion_insn = get_last_insn ();
3617 0 : end_sequence ();
3618 : }
3619 : else
3620 0 : tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3621 0 : set_parm_rtl (parm, tmp);
3622 :
3623 0 : real = DECL_INCOMING_RTL (fnargs[i]);
3624 0 : imag = DECL_INCOMING_RTL (fnargs[i + 1]);
3625 0 : if (inner != GET_MODE (real))
3626 : {
3627 0 : real = gen_lowpart_SUBREG (inner, real);
3628 0 : imag = gen_lowpart_SUBREG (inner, imag);
3629 : }
3630 0 : tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3631 0 : set_decl_incoming_rtl (parm, tmp, false);
3632 0 : i++;
3633 : }
3634 : }
3635 0 : }
3636 :
3637 : /* Assign RTL expressions to the function's parameters. This may involve
3638 : copying them into registers and using those registers as the DECL_RTL. */
3639 :
3640 : static void
3641 1480896 : assign_parms (tree fndecl)
3642 : {
3643 1480896 : struct assign_parm_data_all all;
3644 1480896 : tree parm;
3645 1480896 : vec<tree> fnargs;
3646 1480896 : unsigned i;
3647 :
3648 1480896 : crtl->args.internal_arg_pointer
3649 1480896 : = targetm.calls.internal_arg_pointer ();
3650 :
3651 1480896 : assign_parms_initialize_all (&all);
3652 1480896 : fnargs = assign_parms_augmented_arg_list (&all);
3653 :
3654 1480896 : if (TYPE_NO_NAMED_ARGS_STDARG_P (TREE_TYPE (fndecl))
3655 1480896 : && fnargs.is_empty ())
3656 : {
3657 97 : struct assign_parm_data_one data = {};
3658 97 : assign_parms_setup_varargs (&all, &data, false);
3659 : }
3660 :
3661 4661882 : FOR_EACH_VEC_ELT (fnargs, i, parm)
3662 : {
3663 3180986 : struct assign_parm_data_one data;
3664 :
3665 : /* Extract the type of PARM; adjust it according to ABI. */
3666 3180986 : assign_parm_find_data_types (&all, parm, &data);
3667 :
3668 : /* Early out for errors and void parameters. */
3669 3180986 : if (data.passed_mode == VOIDmode)
3670 : {
3671 0 : SET_DECL_RTL (parm, const0_rtx);
3672 0 : DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3673 0 : continue;
3674 : }
3675 :
3676 : /* Estimate stack alignment from parameter alignment. */
3677 3180986 : if (SUPPORTS_STACK_ALIGNMENT)
3678 : {
3679 3180986 : unsigned int align
3680 6361972 : = targetm.calls.function_arg_boundary (data.arg.mode,
3681 3180986 : data.arg.type);
3682 3180986 : align = MINIMUM_ALIGNMENT (data.arg.type, data.arg.mode, align);
3683 3180986 : if (TYPE_ALIGN (data.nominal_type) > align)
3684 4732 : align = MINIMUM_ALIGNMENT (data.nominal_type,
3685 : TYPE_MODE (data.nominal_type),
3686 : TYPE_ALIGN (data.nominal_type));
3687 3180986 : if (crtl->stack_alignment_estimated < align)
3688 : {
3689 358578 : gcc_assert (!crtl->stack_realign_processed);
3690 358578 : crtl->stack_alignment_estimated = align;
3691 : }
3692 : }
3693 :
3694 : /* Find out where the parameter arrives in this function. */
3695 3180986 : assign_parm_find_entry_rtl (&all, &data);
3696 :
3697 : /* Find out where stack space for this parameter might be. */
3698 3180986 : if (assign_parm_is_stack_parm (&all, &data))
3699 : {
3700 1180284 : assign_parm_find_stack_rtl (parm, &data);
3701 1180284 : assign_parm_adjust_entry_rtl (&data);
3702 : /* For arguments that occupy no space in the parameter
3703 : passing area, have non-zero size and have address taken,
3704 : force creation of a stack slot so that they have distinct
3705 : address from other parameters. */
3706 1180284 : if (TYPE_EMPTY_P (data.arg.type)
3707 6639 : && TREE_ADDRESSABLE (parm)
3708 1575 : && data.entry_parm == data.stack_parm
3709 1575 : && MEM_P (data.entry_parm)
3710 1181859 : && int_size_in_bytes (data.arg.type))
3711 1409 : data.stack_parm = NULL_RTX;
3712 : }
3713 : /* Record permanently how this parm was passed. */
3714 3180986 : if (data.arg.pass_by_reference)
3715 : {
3716 4930 : rtx incoming_rtl
3717 4930 : = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.arg.type)),
3718 : data.entry_parm);
3719 4930 : set_decl_incoming_rtl (parm, incoming_rtl, true);
3720 : }
3721 : else
3722 3176056 : set_decl_incoming_rtl (parm, data.entry_parm, false);
3723 :
3724 3180986 : assign_parm_adjust_stack_rtl (parm, &data);
3725 :
3726 3180986 : if (assign_parm_setup_block_p (&data))
3727 73813 : assign_parm_setup_block (&all, parm, &data);
3728 3107173 : else if (data.arg.pass_by_reference || use_register_for_decl (parm))
3729 2289013 : assign_parm_setup_reg (&all, parm, &data);
3730 : else
3731 818160 : assign_parm_setup_stack (&all, parm, &data);
3732 :
3733 3180986 : if (cfun->stdarg && !DECL_CHAIN (parm))
3734 21423 : assign_parms_setup_varargs (&all, &data, false);
3735 :
3736 : /* Update info on where next arg arrives in registers. */
3737 3180986 : targetm.calls.function_arg_advance (all.args_so_far, data.arg);
3738 : }
3739 :
3740 1480896 : if (targetm.calls.split_complex_arg)
3741 0 : assign_parms_unsplit_complex (&all, fnargs);
3742 :
3743 1480896 : fnargs.release ();
3744 :
3745 : /* Output all parameter conversion instructions (possibly including calls)
3746 : now that all parameters have been copied out of hard registers. */
3747 1480896 : emit_insn (all.first_conversion_insn);
3748 :
3749 1480896 : do_pending_stack_adjust ();
3750 :
3751 : /* Estimate reload stack alignment from scalar return mode. */
3752 1480896 : if (SUPPORTS_STACK_ALIGNMENT)
3753 : {
3754 1480896 : if (DECL_RESULT (fndecl))
3755 : {
3756 1480896 : tree type = TREE_TYPE (DECL_RESULT (fndecl));
3757 1480896 : machine_mode mode = TYPE_MODE (type);
3758 :
3759 1480896 : if (mode != BLKmode
3760 1429926 : && mode != VOIDmode
3761 737999 : && !AGGREGATE_TYPE_P (type))
3762 : {
3763 677305 : unsigned int align = GET_MODE_ALIGNMENT (mode);
3764 677305 : if (crtl->stack_alignment_estimated < align)
3765 : {
3766 10 : gcc_assert (!crtl->stack_realign_processed);
3767 10 : crtl->stack_alignment_estimated = align;
3768 : }
3769 : }
3770 : }
3771 : }
3772 :
3773 : /* If we are receiving a struct value address as the first argument, set up
3774 : the RTL for the function result. As this might require code to convert
3775 : the transmitted address to Pmode, we do this here to ensure that possible
3776 : preliminary conversions of the address have been emitted already. */
3777 1480896 : if (all.function_result_decl)
3778 : {
3779 69518 : tree result = DECL_RESULT (current_function_decl);
3780 69518 : rtx addr = DECL_RTL (all.function_result_decl);
3781 69518 : rtx x;
3782 :
3783 69518 : if (DECL_BY_REFERENCE (result))
3784 : {
3785 8871 : SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3786 8871 : x = addr;
3787 : }
3788 : else
3789 : {
3790 60647 : SET_DECL_VALUE_EXPR (result,
3791 : build1 (INDIRECT_REF, TREE_TYPE (result),
3792 : all.function_result_decl));
3793 60647 : addr = convert_memory_address (Pmode, addr);
3794 60647 : x = gen_rtx_MEM (DECL_MODE (result), addr);
3795 60647 : set_mem_attributes (x, result, 1);
3796 : }
3797 :
3798 69518 : DECL_HAS_VALUE_EXPR_P (result) = 1;
3799 :
3800 69518 : set_parm_rtl (result, x);
3801 : }
3802 :
3803 : /* We have aligned all the args, so add space for the pretend args. */
3804 1480896 : crtl->args.pretend_args_size = all.pretend_args_size;
3805 1480896 : all.stack_args_size.constant += all.extra_pretend_bytes;
3806 1480896 : crtl->args.size = all.stack_args_size.constant;
3807 :
3808 : /* Adjust function incoming argument size for alignment and
3809 : minimum length. */
3810 :
3811 1480896 : crtl->args.size = upper_bound (crtl->args.size, all.reg_parm_stack_space);
3812 2961792 : crtl->args.size = aligned_upper_bound (crtl->args.size,
3813 1480896 : PARM_BOUNDARY / BITS_PER_UNIT);
3814 :
3815 1480896 : if (ARGS_GROW_DOWNWARD)
3816 : {
3817 : crtl->args.arg_offset_rtx
3818 : = (all.stack_args_size.var == 0
3819 : ? gen_int_mode (-all.stack_args_size.constant, Pmode)
3820 : : expand_expr (size_diffop (all.stack_args_size.var,
3821 : size_int (-all.stack_args_size.constant)),
3822 : NULL_RTX, VOIDmode, EXPAND_NORMAL));
3823 : }
3824 : else
3825 1607489 : crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3826 :
3827 : /* See how many bytes, if any, of its args a function should try to pop
3828 : on return. */
3829 :
3830 1480896 : crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3831 1480896 : TREE_TYPE (fndecl),
3832 : crtl->args.size);
3833 :
3834 : /* For stdarg.h function, save info about
3835 : regs and stack space used by the named args. */
3836 :
3837 1480896 : crtl->args.info = all.args_so_far_v;
3838 :
3839 : /* Set the rtx used for the function return value. Put this in its
3840 : own variable so any optimizers that need this information don't have
3841 : to include tree.h. Do this here so it gets done when an inlined
3842 : function gets output. */
3843 :
3844 1480896 : crtl->return_rtx
3845 1480896 : = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3846 2269865 : ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3847 :
3848 : /* If scalar return value was computed in a pseudo-reg, or was a named
3849 : return value that got dumped to the stack, copy that to the hard
3850 : return register. */
3851 1480896 : if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3852 : {
3853 788969 : tree decl_result = DECL_RESULT (fndecl);
3854 788969 : rtx decl_rtl = DECL_RTL (decl_result);
3855 :
3856 788969 : if (REG_P (decl_rtl)
3857 788969 : ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3858 67347 : : DECL_REGISTER (decl_result))
3859 : {
3860 726431 : rtx real_decl_rtl;
3861 :
3862 : /* Unless the psABI says not to. */
3863 726431 : if (TYPE_EMPTY_P (TREE_TYPE (decl_result)))
3864 : real_decl_rtl = NULL_RTX;
3865 : else
3866 : {
3867 721755 : real_decl_rtl
3868 721755 : = targetm.calls.function_value (TREE_TYPE (decl_result),
3869 : fndecl, true);
3870 721755 : REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3871 : }
3872 : /* The delay slot scheduler assumes that crtl->return_rtx
3873 : holds the hard register containing the return value, not a
3874 : temporary pseudo. */
3875 726431 : crtl->return_rtx = real_decl_rtl;
3876 : }
3877 : }
3878 1480896 : }
3879 :
3880 : /* Gimplify the parameter list for current_function_decl. This involves
3881 : evaluating SAVE_EXPRs of variable sized parameters and generating code
3882 : to implement callee-copies reference parameters. Returns a sequence of
3883 : statements to add to the beginning of the function. */
3884 :
3885 : gimple_seq
3886 2899479 : gimplify_parameters (gimple_seq *cleanup)
3887 : {
3888 2899479 : struct assign_parm_data_all all;
3889 2899479 : tree parm;
3890 2899479 : gimple_seq stmts = NULL;
3891 2899479 : vec<tree> fnargs;
3892 2899479 : unsigned i;
3893 :
3894 2899479 : assign_parms_initialize_all (&all);
3895 2899479 : fnargs = assign_parms_augmented_arg_list (&all);
3896 :
3897 8886120 : FOR_EACH_VEC_ELT (fnargs, i, parm)
3898 : {
3899 5986641 : struct assign_parm_data_one data;
3900 :
3901 : /* Extract the type of PARM; adjust it according to ABI. */
3902 5986641 : assign_parm_find_data_types (&all, parm, &data);
3903 :
3904 : /* Early out for errors and void parameters. */
3905 5986641 : if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3906 87 : continue;
3907 :
3908 : /* Update info on where next arg arrives in registers. */
3909 5986554 : targetm.calls.function_arg_advance (all.args_so_far, data.arg);
3910 :
3911 : /* ??? Once upon a time variable_size stuffed parameter list
3912 : SAVE_EXPRs (amongst others) onto a pending sizes list. This
3913 : turned out to be less than manageable in the gimple world.
3914 : Now we have to hunt them down ourselves. */
3915 5986554 : gimplify_type_sizes (TREE_TYPE (parm), &stmts);
3916 :
3917 5986554 : if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
3918 : {
3919 41 : gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3920 41 : gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3921 : }
3922 :
3923 5986554 : if (data.arg.pass_by_reference)
3924 : {
3925 4938 : tree type = TREE_TYPE (data.arg.type);
3926 4938 : function_arg_info orig_arg (type, data.arg.named);
3927 4938 : if (reference_callee_copied (&all.args_so_far_v, orig_arg))
3928 : {
3929 0 : tree local, t;
3930 :
3931 : /* For constant-sized objects, this is trivial; for
3932 : variable-sized objects, we have to play games. */
3933 0 : if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3934 0 : && !(flag_stack_check == GENERIC_STACK_CHECK
3935 0 : && compare_tree_int (DECL_SIZE_UNIT (parm),
3936 : STACK_CHECK_MAX_VAR_SIZE) > 0))
3937 : {
3938 0 : local = create_tmp_var (type, get_name (parm));
3939 0 : DECL_IGNORED_P (local) = 0;
3940 : /* If PARM was addressable, move that flag over
3941 : to the local copy, as its address will be taken,
3942 : not the PARMs. Keep the parms address taken
3943 : as we'll query that flag during gimplification. */
3944 0 : if (TREE_ADDRESSABLE (parm))
3945 0 : TREE_ADDRESSABLE (local) = 1;
3946 0 : if (DECL_NOT_GIMPLE_REG_P (parm))
3947 0 : DECL_NOT_GIMPLE_REG_P (local) = 1;
3948 :
3949 0 : if (!is_gimple_reg (local)
3950 0 : && flag_stack_reuse != SR_NONE)
3951 : {
3952 0 : tree clobber = build_clobber (type);
3953 0 : gimple *clobber_stmt;
3954 0 : clobber_stmt = gimple_build_assign (local, clobber);
3955 0 : gimple_seq_add_stmt (cleanup, clobber_stmt);
3956 : }
3957 : }
3958 : else
3959 : {
3960 0 : tree ptr_type, addr;
3961 :
3962 0 : ptr_type = build_pointer_type (type);
3963 0 : addr = create_tmp_reg (ptr_type, get_name (parm));
3964 0 : DECL_IGNORED_P (addr) = 0;
3965 0 : local = build_fold_indirect_ref (addr);
3966 :
3967 0 : t = build_alloca_call_expr (DECL_SIZE_UNIT (parm),
3968 0 : DECL_ALIGN (parm),
3969 : max_int_size_in_bytes (type));
3970 : /* The call has been built for a variable-sized object. */
3971 0 : CALL_ALLOCA_FOR_VAR_P (t) = 1;
3972 0 : t = fold_convert (ptr_type, t);
3973 0 : t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3974 0 : gimplify_and_add (t, &stmts);
3975 : }
3976 :
3977 0 : gimplify_assign (local, parm, &stmts);
3978 :
3979 0 : SET_DECL_VALUE_EXPR (parm, local);
3980 0 : DECL_HAS_VALUE_EXPR_P (parm) = 1;
3981 : }
3982 : }
3983 : }
3984 :
3985 2899479 : fnargs.release ();
3986 :
3987 2899479 : return stmts;
3988 : }
3989 :
3990 : /* Compute the size and offset from the start of the stacked arguments for a
3991 : parm passed in mode PASSED_MODE and with type TYPE.
3992 :
3993 : INITIAL_OFFSET_PTR points to the current offset into the stacked
3994 : arguments.
3995 :
3996 : The starting offset and size for this parm are returned in
3997 : LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3998 : nonzero, the offset is that of stack slot, which is returned in
3999 : LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
4000 : padding required from the initial offset ptr to the stack slot.
4001 :
4002 : IN_REGS is nonzero if the argument will be passed in registers. It will
4003 : never be set if REG_PARM_STACK_SPACE is not defined.
4004 :
4005 : REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
4006 : for arguments which are passed in registers.
4007 :
4008 : FNDECL is the function in which the argument was defined.
4009 :
4010 : There are two types of rounding that are done. The first, controlled by
4011 : TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
4012 : argument list to be aligned to the specific boundary (in bits). This
4013 : rounding affects the initial and starting offsets, but not the argument
4014 : size.
4015 :
4016 : The second, controlled by TARGET_FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4017 : optionally rounds the size of the parm to PARM_BOUNDARY. The
4018 : initial offset is not affected by this rounding, while the size always
4019 : is and the starting offset may be. */
4020 :
4021 : /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
4022 : INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
4023 : callers pass in the total size of args so far as
4024 : INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
4025 :
4026 : void
4027 5431561 : locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
4028 : int reg_parm_stack_space, int partial,
4029 : tree fndecl ATTRIBUTE_UNUSED,
4030 : struct args_size *initial_offset_ptr,
4031 : struct locate_and_pad_arg_data *locate)
4032 : {
4033 5431561 : tree sizetree;
4034 5431561 : pad_direction where_pad;
4035 5431561 : unsigned int boundary, round_boundary;
4036 5431561 : int part_size_in_regs;
4037 :
4038 : /* If we have found a stack parm before we reach the end of the
4039 : area reserved for registers, skip that area. */
4040 5431561 : if (! in_regs)
4041 : {
4042 3254569 : if (reg_parm_stack_space > 0)
4043 : {
4044 83755 : if (initial_offset_ptr->var
4045 83755 : || !ordered_p (initial_offset_ptr->constant,
4046 : reg_parm_stack_space))
4047 : {
4048 0 : initial_offset_ptr->var
4049 0 : = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4050 : ssize_int (reg_parm_stack_space));
4051 0 : initial_offset_ptr->constant = 0;
4052 : }
4053 : else
4054 83755 : initial_offset_ptr->constant
4055 83755 : = ordered_max (initial_offset_ptr->constant,
4056 : reg_parm_stack_space);
4057 : }
4058 : }
4059 :
4060 5431561 : part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
4061 :
4062 5431561 : sizetree = (type
4063 5431561 : ? arg_size_in_bytes (type)
4064 37927 : : size_int (GET_MODE_SIZE (passed_mode)));
4065 5431561 : where_pad = targetm.calls.function_arg_padding (passed_mode, type);
4066 5431561 : boundary = targetm.calls.function_arg_boundary (passed_mode, type);
4067 5431561 : round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
4068 : type);
4069 5431561 : locate->where_pad = where_pad;
4070 :
4071 : /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
4072 5431561 : if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4073 : boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4074 :
4075 5431561 : locate->boundary = boundary;
4076 :
4077 5431561 : if (SUPPORTS_STACK_ALIGNMENT)
4078 : {
4079 : /* stack_alignment_estimated can't change after stack has been
4080 : realigned. */
4081 5431561 : if (crtl->stack_alignment_estimated < boundary)
4082 : {
4083 5072 : if (!crtl->stack_realign_processed)
4084 5072 : crtl->stack_alignment_estimated = boundary;
4085 : else
4086 : {
4087 : /* If stack is realigned and stack alignment value
4088 : hasn't been finalized, it is OK not to increase
4089 : stack_alignment_estimated. The bigger alignment
4090 : requirement is recorded in stack_alignment_needed
4091 : below. */
4092 0 : gcc_assert (!crtl->stack_realign_finalized
4093 : && crtl->stack_realign_needed);
4094 : }
4095 : }
4096 : }
4097 :
4098 5431561 : if (ARGS_GROW_DOWNWARD)
4099 : {
4100 : locate->slot_offset.constant = -initial_offset_ptr->constant;
4101 : if (initial_offset_ptr->var)
4102 : locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
4103 : initial_offset_ptr->var);
4104 :
4105 : {
4106 : tree s2 = sizetree;
4107 : if (where_pad != PAD_NONE
4108 : && (!tree_fits_uhwi_p (sizetree)
4109 : || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4110 : s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
4111 : SUB_PARM_SIZE (locate->slot_offset, s2);
4112 : }
4113 :
4114 : locate->slot_offset.constant += part_size_in_regs;
4115 :
4116 : if (!in_regs || reg_parm_stack_space > 0)
4117 : pad_to_arg_alignment (&locate->slot_offset, boundary,
4118 : &locate->alignment_pad);
4119 :
4120 : locate->size.constant = (-initial_offset_ptr->constant
4121 : - locate->slot_offset.constant);
4122 : if (initial_offset_ptr->var)
4123 : locate->size.var = size_binop (MINUS_EXPR,
4124 : size_binop (MINUS_EXPR,
4125 : ssize_int (0),
4126 : initial_offset_ptr->var),
4127 : locate->slot_offset.var);
4128 :
4129 : /* Pad_below needs the pre-rounded size to know how much to pad
4130 : below. */
4131 : locate->offset = locate->slot_offset;
4132 : if (where_pad == PAD_DOWNWARD)
4133 : pad_below (&locate->offset, passed_mode, sizetree);
4134 :
4135 : }
4136 : else
4137 : {
4138 5431561 : if (!in_regs || reg_parm_stack_space > 0)
4139 3430859 : pad_to_arg_alignment (initial_offset_ptr, boundary,
4140 : &locate->alignment_pad);
4141 5431561 : locate->slot_offset = *initial_offset_ptr;
4142 :
4143 : #ifdef PUSH_ROUNDING
4144 5431561 : if (passed_mode != BLKmode)
4145 5086959 : sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4146 : #endif
4147 :
4148 : /* Pad_below needs the pre-rounded size to know how much to pad below
4149 : so this must be done before rounding up. */
4150 5431561 : locate->offset = locate->slot_offset;
4151 5431561 : if (where_pad == PAD_DOWNWARD)
4152 0 : pad_below (&locate->offset, passed_mode, sizetree);
4153 :
4154 5431561 : if (where_pad != PAD_NONE
4155 5431561 : && (!tree_fits_uhwi_p (sizetree)
4156 5431561 : || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4157 18564 : sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
4158 :
4159 5431561 : ADD_PARM_SIZE (locate->size, sizetree);
4160 :
4161 5431561 : locate->size.constant -= part_size_in_regs;
4162 : }
4163 :
4164 5431561 : locate->offset.constant
4165 5431561 : += targetm.calls.function_arg_offset (passed_mode, type);
4166 5431561 : }
4167 :
4168 : /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4169 : BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4170 :
4171 : static void
4172 3430859 : pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
4173 : struct args_size *alignment_pad)
4174 : {
4175 3430859 : tree save_var = NULL_TREE;
4176 3430859 : poly_int64 save_constant = 0;
4177 3430859 : int boundary_in_bytes = boundary / BITS_PER_UNIT;
4178 3430859 : poly_int64 sp_offset = STACK_POINTER_OFFSET;
4179 :
4180 : #ifdef SPARC_STACK_BOUNDARY_HACK
4181 : /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4182 : the real alignment of %sp. However, when it does this, the
4183 : alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
4184 : if (SPARC_STACK_BOUNDARY_HACK)
4185 : sp_offset = 0;
4186 : #endif
4187 :
4188 5193244 : if (boundary > PARM_BOUNDARY)
4189 : {
4190 140276 : save_var = offset_ptr->var;
4191 140276 : save_constant = offset_ptr->constant;
4192 : }
4193 :
4194 3430859 : alignment_pad->var = NULL_TREE;
4195 3430859 : alignment_pad->constant = 0;
4196 :
4197 3430859 : if (boundary > BITS_PER_UNIT)
4198 : {
4199 3430859 : int misalign;
4200 3430859 : if (offset_ptr->var
4201 3430859 : || !known_misalignment (offset_ptr->constant + sp_offset,
4202 : boundary_in_bytes, &misalign))
4203 : {
4204 0 : tree sp_offset_tree = ssize_int (sp_offset);
4205 0 : tree offset = size_binop (PLUS_EXPR,
4206 : ARGS_SIZE_TREE (*offset_ptr),
4207 : sp_offset_tree);
4208 0 : tree rounded;
4209 0 : if (ARGS_GROW_DOWNWARD)
4210 : rounded = round_down (offset, boundary / BITS_PER_UNIT);
4211 : else
4212 0 : rounded = round_up (offset, boundary / BITS_PER_UNIT);
4213 :
4214 0 : offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
4215 : /* ARGS_SIZE_TREE includes constant term. */
4216 0 : offset_ptr->constant = 0;
4217 0 : if (boundary > PARM_BOUNDARY)
4218 0 : alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
4219 : save_var);
4220 : }
4221 : else
4222 : {
4223 3430859 : if (ARGS_GROW_DOWNWARD)
4224 : offset_ptr->constant -= misalign;
4225 : else
4226 3430859 : offset_ptr->constant += -misalign & (boundary_in_bytes - 1);
4227 :
4228 3430859 : if (boundary > PARM_BOUNDARY)
4229 140276 : alignment_pad->constant = offset_ptr->constant - save_constant;
4230 : }
4231 : }
4232 3430859 : }
4233 :
4234 : static void
4235 0 : pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
4236 : {
4237 0 : unsigned int align = PARM_BOUNDARY / BITS_PER_UNIT;
4238 0 : int misalign;
4239 0 : if (passed_mode != BLKmode
4240 0 : && known_misalignment (GET_MODE_SIZE (passed_mode), align, &misalign))
4241 0 : offset_ptr->constant += -misalign & (align - 1);
4242 : else
4243 : {
4244 0 : if (TREE_CODE (sizetree) != INTEGER_CST
4245 0 : || (TREE_INT_CST_LOW (sizetree) & (align - 1)) != 0)
4246 : {
4247 : /* Round the size up to multiple of PARM_BOUNDARY bits. */
4248 0 : tree s2 = round_up (sizetree, align);
4249 : /* Add it in. */
4250 0 : ADD_PARM_SIZE (*offset_ptr, s2);
4251 0 : SUB_PARM_SIZE (*offset_ptr, sizetree);
4252 : }
4253 : }
4254 0 : }
4255 :
4256 :
4257 : /* True if register REGNO was alive at a place where `setjmp' was
4258 : called and was set more than once or is an argument. Such regs may
4259 : be clobbered by `longjmp'. */
4260 :
4261 : static bool
4262 44 : regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
4263 : {
4264 : /* There appear to be cases where some local vars never reach the
4265 : backend but have bogus regnos. */
4266 44 : if (regno >= max_reg_num ())
4267 : return false;
4268 :
4269 44 : return ((REG_N_SETS (regno) > 1
4270 42 : || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
4271 : regno))
4272 44 : && REGNO_REG_SET_P (setjmp_crosses, regno));
4273 : }
4274 :
4275 : /* Walk the tree of blocks describing the binding levels within a
4276 : function and warn about variables the might be killed by setjmp or
4277 : vfork. This is done after calling flow_analysis before register
4278 : allocation since that will clobber the pseudo-regs to hard
4279 : regs. */
4280 :
4281 : static void
4282 78 : setjmp_vars_warning (bitmap setjmp_crosses, tree block)
4283 : {
4284 78 : tree decl, sub;
4285 :
4286 197 : for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
4287 : {
4288 119 : if (VAR_P (decl)
4289 119 : && DECL_RTL_SET_P (decl)
4290 25 : && REG_P (DECL_RTL (decl))
4291 137 : && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4292 1 : warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
4293 : " %<longjmp%> or %<vfork%>", decl);
4294 : }
4295 :
4296 134 : for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
4297 56 : setjmp_vars_warning (setjmp_crosses, sub);
4298 78 : }
4299 :
4300 : /* Do the appropriate part of setjmp_vars_warning
4301 : but for arguments instead of local variables. */
4302 :
4303 : static void
4304 22 : setjmp_args_warning (bitmap setjmp_crosses)
4305 : {
4306 22 : tree decl;
4307 22 : for (decl = DECL_ARGUMENTS (current_function_decl);
4308 48 : decl; decl = DECL_CHAIN (decl))
4309 26 : if (DECL_RTL (decl) != 0
4310 26 : && REG_P (DECL_RTL (decl))
4311 52 : && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4312 0 : warning (OPT_Wclobbered,
4313 : "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4314 : decl);
4315 22 : }
4316 :
4317 : /* Generate warning messages for variables live across setjmp. */
4318 :
4319 : void
4320 135308 : generate_setjmp_warnings (void)
4321 : {
4322 135308 : bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4323 :
4324 135308 : if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
4325 135308 : || bitmap_empty_p (setjmp_crosses))
4326 : return;
4327 :
4328 22 : setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4329 22 : setjmp_args_warning (setjmp_crosses);
4330 : }
4331 :
4332 :
4333 : /* Reverse the order of elements in the fragment chain T of blocks,
4334 : and return the new head of the chain (old last element).
4335 : In addition to that clear BLOCK_SAME_RANGE flags when needed
4336 : and adjust BLOCK_SUPERCONTEXT from the super fragment to
4337 : its super fragment origin. */
4338 :
4339 : static tree
4340 5554573 : block_fragments_nreverse (tree t)
4341 : {
4342 5554573 : tree prev = 0, block, next, prev_super = 0;
4343 5554573 : tree super = BLOCK_SUPERCONTEXT (t);
4344 5554573 : if (BLOCK_FRAGMENT_ORIGIN (super))
4345 4596959 : super = BLOCK_FRAGMENT_ORIGIN (super);
4346 16643305 : for (block = t; block; block = next)
4347 : {
4348 11088732 : next = BLOCK_FRAGMENT_CHAIN (block);
4349 11088732 : BLOCK_FRAGMENT_CHAIN (block) = prev;
4350 5534159 : if ((prev && !BLOCK_SAME_RANGE (prev))
4351 14269628 : || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4352 : != prev_super))
4353 3245539 : BLOCK_SAME_RANGE (block) = 0;
4354 11088732 : prev_super = BLOCK_SUPERCONTEXT (block);
4355 11088732 : BLOCK_SUPERCONTEXT (block) = super;
4356 11088732 : prev = block;
4357 : }
4358 5554573 : t = BLOCK_FRAGMENT_ORIGIN (t);
4359 5554573 : if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4360 : != prev_super)
4361 1792795 : BLOCK_SAME_RANGE (t) = 0;
4362 5554573 : BLOCK_SUPERCONTEXT (t) = super;
4363 5554573 : return prev;
4364 : }
4365 :
4366 : /* Reverse the order of elements in the chain T of blocks,
4367 : and return the new head of the chain (old last element).
4368 : Also do the same on subblocks and reverse the order of elements
4369 : in BLOCK_FRAGMENT_CHAIN as well. */
4370 :
4371 : static tree
4372 23691161 : blocks_nreverse_all (tree t)
4373 : {
4374 23691161 : tree prev = 0, block, next;
4375 46807460 : for (block = t; block; block = next)
4376 : {
4377 23116299 : next = BLOCK_CHAIN (block);
4378 23116299 : BLOCK_CHAIN (block) = prev;
4379 23116299 : if (BLOCK_FRAGMENT_CHAIN (block)
4380 23116299 : && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4381 : {
4382 11109146 : BLOCK_FRAGMENT_CHAIN (block)
4383 5554573 : = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4384 5554573 : if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4385 2664246 : BLOCK_SAME_RANGE (block) = 0;
4386 : }
4387 23116299 : BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4388 23116299 : prev = block;
4389 : }
4390 23691161 : return prev;
4391 : }
4392 :
4393 :
4394 : /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4395 : and create duplicate blocks. */
4396 : /* ??? Need an option to either create block fragments or to create
4397 : abstract origin duplicates of a source block. It really depends
4398 : on what optimization has been performed. */
4399 :
4400 : void
4401 574862 : reorder_blocks (void)
4402 : {
4403 574862 : tree block = DECL_INITIAL (current_function_decl);
4404 :
4405 574862 : if (block == NULL_TREE)
4406 0 : return;
4407 :
4408 574862 : auto_vec<tree, 10> block_stack;
4409 :
4410 : /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4411 574862 : clear_block_marks (block);
4412 :
4413 : /* Prune the old trees away, so that they don't get in the way. */
4414 574862 : BLOCK_SUBBLOCKS (block) = NULL_TREE;
4415 574862 : BLOCK_CHAIN (block) = NULL_TREE;
4416 :
4417 : /* Recreate the block tree from the note nesting. */
4418 574862 : reorder_blocks_1 (get_insns (), block, &block_stack);
4419 574862 : BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4420 574862 : }
4421 :
4422 : /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4423 :
4424 : void
4425 24349088 : clear_block_marks (tree block)
4426 : {
4427 45224374 : while (block)
4428 : {
4429 20875286 : TREE_ASM_WRITTEN (block) = 0;
4430 20875286 : clear_block_marks (BLOCK_SUBBLOCKS (block));
4431 20875286 : block = BLOCK_CHAIN (block);
4432 : }
4433 24349088 : }
4434 :
4435 : static void
4436 574862 : reorder_blocks_1 (rtx_insn *insns, tree current_block,
4437 : vec<tree> *p_block_stack)
4438 : {
4439 574862 : rtx_insn *insn;
4440 574862 : tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
4441 :
4442 192021590 : for (insn = insns; insn; insn = NEXT_INSN (insn))
4443 : {
4444 191446728 : if (NOTE_P (insn))
4445 : {
4446 138721762 : if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4447 : {
4448 23116299 : tree block = NOTE_BLOCK (insn);
4449 23116299 : tree origin;
4450 :
4451 23116299 : gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4452 23116299 : origin = block;
4453 :
4454 23116299 : if (prev_end)
4455 1413657 : BLOCK_SAME_RANGE (prev_end) = 0;
4456 23116299 : prev_end = NULL_TREE;
4457 :
4458 : /* If we have seen this block before, that means it now
4459 : spans multiple address regions. Create a new fragment. */
4460 23116299 : if (TREE_ASM_WRITTEN (block))
4461 : {
4462 11088732 : tree new_block = copy_node (block);
4463 :
4464 11088732 : BLOCK_SAME_RANGE (new_block) = 0;
4465 11088732 : BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4466 11088732 : BLOCK_FRAGMENT_CHAIN (new_block)
4467 11088732 : = BLOCK_FRAGMENT_CHAIN (origin);
4468 11088732 : BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4469 :
4470 11088732 : NOTE_BLOCK (insn) = new_block;
4471 11088732 : block = new_block;
4472 : }
4473 :
4474 23116299 : if (prev_beg == current_block && prev_beg)
4475 15054710 : BLOCK_SAME_RANGE (block) = 1;
4476 :
4477 23116299 : prev_beg = origin;
4478 :
4479 23116299 : BLOCK_SUBBLOCKS (block) = 0;
4480 23116299 : TREE_ASM_WRITTEN (block) = 1;
4481 : /* When there's only one block for the entire function,
4482 : current_block == block and we mustn't do this, it
4483 : will cause infinite recursion. */
4484 23116299 : if (block != current_block)
4485 : {
4486 23116299 : tree super;
4487 23116299 : if (block != origin)
4488 11088732 : gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4489 : || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4490 : (origin))
4491 : == current_block);
4492 23116299 : if (p_block_stack->is_empty ())
4493 : super = current_block;
4494 : else
4495 : {
4496 20802140 : super = p_block_stack->last ();
4497 32103422 : gcc_assert (super == current_block
4498 : || BLOCK_FRAGMENT_ORIGIN (super)
4499 : == current_block);
4500 : }
4501 23116299 : BLOCK_SUPERCONTEXT (block) = super;
4502 23116299 : BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4503 23116299 : BLOCK_SUBBLOCKS (current_block) = block;
4504 23116299 : current_block = origin;
4505 : }
4506 23116299 : p_block_stack->safe_push (block);
4507 : }
4508 115605463 : else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4509 : {
4510 23116299 : NOTE_BLOCK (insn) = p_block_stack->pop ();
4511 23116299 : current_block = BLOCK_SUPERCONTEXT (current_block);
4512 23116299 : if (BLOCK_FRAGMENT_ORIGIN (current_block))
4513 3803318 : current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4514 23116299 : prev_beg = NULL_TREE;
4515 38171009 : prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4516 23116299 : ? NOTE_BLOCK (insn) : NULL_TREE;
4517 : }
4518 : }
4519 : else
4520 : {
4521 52724966 : prev_beg = NULL_TREE;
4522 52724966 : if (prev_end)
4523 748712 : BLOCK_SAME_RANGE (prev_end) = 0;
4524 : prev_end = NULL_TREE;
4525 : }
4526 : }
4527 574862 : }
4528 :
4529 : /* Reverse the order of elements in the chain T of blocks,
4530 : and return the new head of the chain (old last element). */
4531 :
4532 : tree
4533 31299075 : blocks_nreverse (tree t)
4534 : {
4535 31299075 : tree prev = 0, block, next;
4536 55094190 : for (block = t; block; block = next)
4537 : {
4538 23795115 : next = BLOCK_CHAIN (block);
4539 23795115 : BLOCK_CHAIN (block) = prev;
4540 23795115 : prev = block;
4541 : }
4542 31299075 : return prev;
4543 : }
4544 :
4545 : /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4546 : by modifying the last node in chain 1 to point to chain 2. */
4547 :
4548 : tree
4549 82533345 : block_chainon (tree op1, tree op2)
4550 : {
4551 82533345 : tree t1;
4552 :
4553 82533345 : if (!op1)
4554 : return op2;
4555 4082089 : if (!op2)
4556 : return op1;
4557 :
4558 25628522 : for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4559 21546433 : continue;
4560 4082089 : BLOCK_CHAIN (t1) = op2;
4561 :
4562 : #ifdef ENABLE_TREE_CHECKING
4563 4082089 : {
4564 4082089 : tree t2;
4565 8180272 : for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4566 4098183 : gcc_assert (t2 != t1);
4567 : }
4568 : #endif
4569 :
4570 : return op1;
4571 21546433 : }
4572 :
4573 : /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4574 : non-NULL, list them all into VECTOR, in a depth-first preorder
4575 : traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4576 : blocks. */
4577 :
4578 : static int
4579 122934242 : all_blocks (tree block, tree *vector)
4580 : {
4581 122934242 : int n_blocks = 0;
4582 :
4583 241559618 : while (block)
4584 : {
4585 118625376 : TREE_ASM_WRITTEN (block) = 0;
4586 :
4587 : /* Record this block. */
4588 118625376 : if (vector)
4589 59312688 : vector[n_blocks] = block;
4590 :
4591 118625376 : ++n_blocks;
4592 :
4593 : /* Record the subblocks, and their subblocks... */
4594 177938064 : n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4595 59312688 : vector ? vector + n_blocks : 0);
4596 118625376 : block = BLOCK_CHAIN (block);
4597 : }
4598 :
4599 122934242 : return n_blocks;
4600 : }
4601 :
4602 : /* Return a vector containing all the blocks rooted at BLOCK. The
4603 : number of elements in the vector is stored in N_BLOCKS_P. The
4604 : vector is dynamically allocated; it is the caller's responsibility
4605 : to call `free' on the pointer returned. */
4606 :
4607 : static tree *
4608 2154433 : get_block_vector (tree block, int *n_blocks_p)
4609 : {
4610 2154433 : tree *block_vector;
4611 :
4612 2154433 : *n_blocks_p = all_blocks (block, NULL);
4613 2154433 : block_vector = XNEWVEC (tree, *n_blocks_p);
4614 2154433 : all_blocks (block, block_vector);
4615 :
4616 2154433 : return block_vector;
4617 : }
4618 :
4619 : static GTY(()) int next_block_index = 2;
4620 :
4621 : /* Set BLOCK_NUMBER for all the blocks in FN. */
4622 :
4623 : void
4624 2154433 : number_blocks (tree fn)
4625 : {
4626 2154433 : int i;
4627 2154433 : int n_blocks;
4628 2154433 : tree *block_vector;
4629 :
4630 2154433 : block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4631 :
4632 : /* The top-level BLOCK isn't numbered at all. */
4633 61467121 : for (i = 1; i < n_blocks; ++i)
4634 : /* We number the blocks from two. */
4635 57158255 : BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4636 :
4637 2154433 : free (block_vector);
4638 :
4639 2154433 : return;
4640 : }
4641 :
4642 : /* If VAR is present in a subblock of BLOCK, return the subblock. */
4643 :
4644 : DEBUG_FUNCTION tree
4645 0 : debug_find_var_in_block_tree (tree var, tree block)
4646 : {
4647 0 : tree t;
4648 :
4649 0 : for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4650 0 : if (t == var)
4651 : return block;
4652 :
4653 0 : for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4654 : {
4655 0 : tree ret = debug_find_var_in_block_tree (var, t);
4656 0 : if (ret)
4657 : return ret;
4658 : }
4659 :
4660 : return NULL_TREE;
4661 : }
4662 :
4663 : /* Keep track of whether we're in a dummy function context. If we are,
4664 : we don't want to invoke the set_current_function hook, because we'll
4665 : get into trouble if the hook calls target_reinit () recursively or
4666 : when the initial initialization is not yet complete. */
4667 :
4668 : static bool in_dummy_function;
4669 :
4670 : /* Invoke the target hook when setting cfun. Update the optimization options
4671 : if the function uses different options than the default. */
4672 :
4673 : static void
4674 774378279 : invoke_set_current_function_hook (tree fndecl)
4675 : {
4676 774378279 : if (!in_dummy_function)
4677 : {
4678 773946805 : tree opts = ((fndecl)
4679 773946805 : ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4680 773946805 : : optimization_default_node);
4681 :
4682 773946805 : if (!opts)
4683 358014814 : opts = optimization_default_node;
4684 :
4685 : /* Change optimization options if needed. */
4686 773946805 : if (optimization_current_node != opts)
4687 : {
4688 3240072 : optimization_current_node = opts;
4689 3240072 : cl_optimization_restore (&global_options, &global_options_set,
4690 3240072 : TREE_OPTIMIZATION (opts));
4691 : }
4692 :
4693 773946805 : targetm.set_current_function (fndecl);
4694 773946805 : this_fn_optabs = this_target_optabs;
4695 :
4696 : /* Initialize global alignment variables after op. */
4697 773946805 : parse_alignment_opts ();
4698 :
4699 773946805 : if (opts != optimization_default_node)
4700 : {
4701 1655095 : init_tree_optimization_optabs (opts);
4702 1655095 : if (TREE_OPTIMIZATION_OPTABS (opts))
4703 112380 : this_fn_optabs = (struct target_optabs *)
4704 112380 : TREE_OPTIMIZATION_OPTABS (opts);
4705 : }
4706 : }
4707 774378279 : }
4708 :
4709 : /* Set cfun to NEW_CFUN and switch to the optimization and target options
4710 : associated with NEW_FNDECL.
4711 :
4712 : FORCE says whether we should do the switch even if NEW_CFUN is the current
4713 : function, e.g. because there has been a change in optimization or target
4714 : options. */
4715 :
4716 : static void
4717 1828840564 : set_function_decl (function *new_cfun, tree new_fndecl, bool force)
4718 : {
4719 1828840564 : if (cfun != new_cfun || force)
4720 : {
4721 577629141 : cfun = new_cfun;
4722 577629141 : invoke_set_current_function_hook (new_fndecl);
4723 577629141 : redirect_edge_var_map_empty ();
4724 : }
4725 1828840564 : }
4726 :
4727 : /* cfun should never be set directly; use this function. */
4728 :
4729 : void
4730 1103356348 : set_cfun (struct function *new_cfun, bool force)
4731 : {
4732 1103356348 : set_function_decl (new_cfun, new_cfun ? new_cfun->decl : NULL_TREE, force);
4733 1103356348 : }
4734 :
4735 : /* Initialized with NOGC, making this poisonous to the garbage collector. */
4736 :
4737 : static vec<function *> cfun_stack;
4738 :
4739 : /* Push the current cfun onto the stack, then switch to function NEW_CFUN
4740 : and FUNCTION_DECL NEW_FNDECL. FORCE is as for set_function_decl. */
4741 :
4742 : static void
4743 725484216 : push_function_decl (function *new_cfun, tree new_fndecl, bool force)
4744 : {
4745 725484216 : gcc_assert ((!cfun && !current_function_decl)
4746 : || (cfun && current_function_decl == cfun->decl));
4747 725484216 : cfun_stack.safe_push (cfun);
4748 725484216 : current_function_decl = new_fndecl;
4749 725484216 : set_function_decl (new_cfun, new_fndecl, force);
4750 725484216 : }
4751 :
4752 : /* Push the current cfun onto the stack and switch to function declaration
4753 : NEW_FNDECL, which might or might not have a function body. FORCE is as for
4754 : set_function_decl. */
4755 :
4756 : void
4757 0 : push_function_decl (tree new_fndecl, bool force)
4758 : {
4759 0 : force |= current_function_decl != new_fndecl;
4760 0 : push_function_decl (DECL_STRUCT_FUNCTION (new_fndecl), new_fndecl, force);
4761 0 : }
4762 :
4763 : /* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4764 : current_function_decl accordingly. */
4765 :
4766 : void
4767 725484216 : push_cfun (struct function *new_cfun)
4768 : {
4769 725484216 : push_function_decl (new_cfun, new_cfun ? new_cfun->decl : NULL_TREE, false);
4770 725484216 : }
4771 :
4772 : /* A common subroutine for pop_cfun and pop_function_decl. FORCE is as
4773 : for set_function_decl. */
4774 :
4775 : static void
4776 726345316 : pop_cfun_1 (bool force)
4777 : {
4778 726345316 : struct function *new_cfun = cfun_stack.pop ();
4779 : /* When in_dummy_function, we do have a cfun but current_function_decl is
4780 : NULL. We also allow pushing NULL cfun and subsequently changing
4781 : current_function_decl to something else and have both restored by
4782 : pop_cfun. */
4783 726345316 : gcc_checking_assert (in_dummy_function
4784 : || !cfun
4785 : || current_function_decl == cfun->decl);
4786 726345316 : set_cfun (new_cfun, force);
4787 726345316 : current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4788 726345316 : }
4789 :
4790 : /* Pop cfun from the stack. Also set current_function_decl accordingly. */
4791 :
4792 : void
4793 726345316 : pop_cfun (void)
4794 : {
4795 726345316 : pop_cfun_1 (false);
4796 726345316 : }
4797 :
4798 : /* Undo push_function_decl. */
4799 :
4800 : void
4801 0 : pop_function_decl (void)
4802 : {
4803 : /* If the previous cfun was null, the options should be reset to the
4804 : global set. Checking the current cfun against the new (popped) cfun
4805 : wouldn't catch this if the current function decl has no function
4806 : struct. */
4807 0 : pop_cfun_1 (!cfun_stack.last ());
4808 0 : }
4809 :
4810 : /* Return value of funcdef and increase it. */
4811 : int
4812 196533386 : get_next_funcdef_no (void)
4813 : {
4814 196533386 : return funcdef_no++;
4815 : }
4816 :
4817 : /* Return value of funcdef. */
4818 : int
4819 0 : get_last_funcdef_no (void)
4820 : {
4821 0 : return funcdef_no;
4822 : }
4823 :
4824 : /* Allocate and initialize the stack usage info data structure for the
4825 : current function. */
4826 : static void
4827 690 : allocate_stack_usage_info (void)
4828 : {
4829 690 : gcc_assert (!cfun->su);
4830 690 : cfun->su = ggc_cleared_alloc<stack_usage> ();
4831 690 : cfun->su->static_stack_size = -1;
4832 690 : }
4833 :
4834 : /* Allocate a function structure for FNDECL and set its contents
4835 : to the defaults. Set cfun to the newly-allocated object.
4836 : Some of the helper functions invoked during initialization assume
4837 : that cfun has already been set. Therefore, assign the new object
4838 : directly into cfun and invoke the back end hook explicitly at the
4839 : very end, rather than initializing a temporary and calling set_cfun
4840 : on it.
4841 :
4842 : ABSTRACT_P is true if this is a function that will never be seen by
4843 : the middle-end. Such functions are front-end concepts (like C++
4844 : function templates) that do not correspond directly to functions
4845 : placed in object files. */
4846 :
4847 : void
4848 196749138 : allocate_struct_function (tree fndecl, bool abstract_p)
4849 : {
4850 196749138 : tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4851 :
4852 196749138 : cfun = ggc_cleared_alloc<function> ();
4853 :
4854 196749138 : init_eh_for_function ();
4855 :
4856 196749138 : if (init_machine_status)
4857 196749138 : cfun->machine = (*init_machine_status) ();
4858 :
4859 : #ifdef OVERRIDE_ABI_FORMAT
4860 196749138 : OVERRIDE_ABI_FORMAT (fndecl);
4861 : #endif
4862 :
4863 196749138 : if (fndecl != NULL_TREE)
4864 : {
4865 196533386 : DECL_STRUCT_FUNCTION (fndecl) = cfun;
4866 196533386 : cfun->decl = fndecl;
4867 196533386 : current_function_funcdef_no = get_next_funcdef_no ();
4868 : }
4869 :
4870 196749138 : invoke_set_current_function_hook (fndecl);
4871 :
4872 196749138 : if (fndecl != NULL_TREE)
4873 : {
4874 196533386 : tree result = DECL_RESULT (fndecl);
4875 :
4876 196533386 : if (!abstract_p)
4877 : {
4878 : /* Now that we have activated any function-specific attributes
4879 : that might affect layout, particularly vector modes, relayout
4880 : each of the parameters and the result. */
4881 102761459 : relayout_decl (result);
4882 311182128 : for (tree parm = DECL_ARGUMENTS (fndecl); parm;
4883 208420669 : parm = DECL_CHAIN (parm))
4884 208420669 : relayout_decl (parm);
4885 :
4886 : /* Similarly relayout the function decl. */
4887 102761459 : targetm.target_option.relayout_function (fndecl);
4888 : }
4889 :
4890 102761459 : if (!abstract_p && aggregate_value_p (result, fndecl))
4891 : {
4892 : #ifdef PCC_STATIC_STRUCT_RETURN
4893 : cfun->returns_pcc_struct = 1;
4894 : #endif
4895 2792221 : cfun->returns_struct = 1;
4896 : }
4897 :
4898 196533386 : cfun->stdarg = stdarg_p (fntype);
4899 :
4900 : /* Assume all registers in stdarg functions need to be saved. */
4901 196533386 : cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4902 196533386 : cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4903 :
4904 : /* ??? This could be set on a per-function basis by the front-end
4905 : but is this worth the hassle? */
4906 196533386 : cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4907 196533386 : cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
4908 :
4909 196533386 : if (!profile_flag && !flag_instrument_function_entry_exit)
4910 196532928 : DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
4911 :
4912 196533386 : if (flag_callgraph_info)
4913 1 : allocate_stack_usage_info ();
4914 : }
4915 :
4916 : /* Don't enable begin stmt markers if var-tracking at assignments is
4917 : disabled. The markers make little sense without the variable
4918 : binding annotations among them. */
4919 393498276 : cfun->debug_nonbind_markers = lang_hooks.emits_begin_stmt
4920 196749138 : && MAY_HAVE_DEBUG_MARKER_STMTS;
4921 196749138 : }
4922 :
4923 : /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4924 : instead of just setting it. */
4925 :
4926 : void
4927 861125 : push_struct_function (tree fndecl, bool abstract_p)
4928 : {
4929 : /* When in_dummy_function we might be in the middle of a pop_cfun and
4930 : current_function_decl and cfun may not match. */
4931 861125 : gcc_assert (in_dummy_function
4932 : || (!cfun && !current_function_decl)
4933 : || (cfun && current_function_decl == cfun->decl));
4934 861125 : cfun_stack.safe_push (cfun);
4935 861125 : current_function_decl = fndecl;
4936 861125 : allocate_struct_function (fndecl, abstract_p);
4937 861125 : }
4938 :
4939 : /* Reset crtl and other non-struct-function variables to defaults as
4940 : appropriate for emitting rtl at the start of a function. */
4941 :
4942 : static void
4943 1702168 : prepare_function_start (void)
4944 : {
4945 1702168 : gcc_assert (!get_last_insn ());
4946 :
4947 1702168 : if (in_dummy_function)
4948 215732 : crtl->abi = &default_function_abi;
4949 : else
4950 1486436 : crtl->abi = &fndecl_abi (cfun->decl).base_abi ();
4951 :
4952 1702168 : init_temp_slots ();
4953 1702168 : init_emit ();
4954 1702168 : init_varasm_status ();
4955 1702168 : init_expr ();
4956 1702168 : default_rtl_profile ();
4957 :
4958 1702168 : if (flag_stack_usage_info && !flag_callgraph_info)
4959 689 : allocate_stack_usage_info ();
4960 :
4961 1702168 : cse_not_expected = ! optimize;
4962 :
4963 : /* Caller save not needed yet. */
4964 1702168 : caller_save_needed = 0;
4965 :
4966 : /* We haven't done register allocation yet. */
4967 1702168 : reg_renumber = 0;
4968 :
4969 : /* Indicate that we have not instantiated virtual registers yet. */
4970 1702168 : virtuals_instantiated = 0;
4971 :
4972 : /* Indicate that we want CONCATs now. */
4973 1702168 : generating_concat_p = 1;
4974 :
4975 : /* Indicate we have no need of a frame pointer yet. */
4976 1702168 : frame_pointer_needed = 0;
4977 :
4978 : /* Reset the cache of the "extended" flag in the target's
4979 : _BitInt info struct. */
4980 1702168 : bitint_extended = -1;
4981 1702168 : }
4982 :
4983 : void
4984 215737 : push_dummy_function (bool with_decl)
4985 : {
4986 215737 : tree fn_decl, fn_type, fn_result_decl;
4987 :
4988 215737 : gcc_assert (!in_dummy_function);
4989 215737 : in_dummy_function = true;
4990 :
4991 215737 : if (with_decl)
4992 : {
4993 5 : fn_type = build_function_type_list (void_type_node, NULL_TREE);
4994 5 : fn_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
4995 : fn_type);
4996 5 : fn_result_decl = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
4997 : NULL_TREE, void_type_node);
4998 5 : DECL_RESULT (fn_decl) = fn_result_decl;
4999 5 : DECL_ARTIFICIAL (fn_decl) = 1;
5000 5 : tree fn_name = get_identifier (" ");
5001 5 : SET_DECL_ASSEMBLER_NAME (fn_decl, fn_name);
5002 : }
5003 : else
5004 : fn_decl = NULL_TREE;
5005 :
5006 215737 : push_struct_function (fn_decl);
5007 215737 : }
5008 :
5009 : /* Initialize the rtl expansion mechanism so that we can do simple things
5010 : like generate sequences. This is used to provide a context during global
5011 : initialization of some passes. You must call expand_dummy_function_end
5012 : to exit this context. */
5013 :
5014 : void
5015 215732 : init_dummy_function_start (void)
5016 : {
5017 215732 : push_dummy_function (false);
5018 215732 : prepare_function_start ();
5019 215732 : }
5020 :
5021 : /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5022 : and initialize static variables for generating RTL for the statements
5023 : of the function. */
5024 :
5025 : void
5026 1486436 : init_function_start (tree subr)
5027 : {
5028 : /* Initialize backend, if needed. */
5029 1486436 : initialize_rtl ();
5030 :
5031 1486436 : prepare_function_start ();
5032 1486436 : decide_function_section (subr);
5033 :
5034 : /* Warn if this value is an aggregate type,
5035 : regardless of which calling convention we are using for it. */
5036 1486436 : if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5037 107545 : warning_at (DECL_SOURCE_LOCATION (DECL_RESULT (subr)),
5038 107545 : OPT_Waggregate_return, "function returns an aggregate");
5039 1486436 : }
5040 :
5041 : /* Expand code to verify the stack_protect_guard. This is invoked at
5042 : the end of a function to be protected. */
5043 :
5044 : void
5045 286 : stack_protect_epilogue (void)
5046 : {
5047 286 : tree guard_decl = crtl->stack_protect_guard_decl;
5048 286 : rtx_code_label *label = gen_label_rtx ();
5049 286 : rtx x, y;
5050 286 : rtx_insn *seq = NULL;
5051 :
5052 286 : x = expand_normal (crtl->stack_protect_guard);
5053 :
5054 286 : if (targetm.have_stack_protect_combined_test () && guard_decl)
5055 : {
5056 0 : gcc_assert (DECL_P (guard_decl));
5057 0 : y = DECL_RTL (guard_decl);
5058 : /* Allow the target to compute address of Y and compare it with X without
5059 : leaking Y into a register. This combined address + compare pattern
5060 : allows the target to prevent spilling of any intermediate results by
5061 : splitting it after register allocator. */
5062 0 : seq = targetm.gen_stack_protect_combined_test (x, y, label);
5063 : }
5064 : else
5065 : {
5066 286 : if (guard_decl)
5067 286 : y = expand_normal (guard_decl);
5068 : else
5069 0 : y = const0_rtx;
5070 :
5071 : /* Allow the target to compare Y with X without leaking either into
5072 : a register. */
5073 286 : if (targetm.have_stack_protect_test ())
5074 286 : seq = targetm.gen_stack_protect_test (x, y, label);
5075 : }
5076 :
5077 286 : if (seq)
5078 286 : emit_insn (seq);
5079 : else
5080 0 : emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
5081 :
5082 : /* The noreturn predictor has been moved to the tree level. The rtl-level
5083 : predictors estimate this branch about 20%, which isn't enough to get
5084 : things moved out of line. Since this is the only extant case of adding
5085 : a noreturn function at the rtl level, it doesn't seem worth doing ought
5086 : except adding the prediction by hand. */
5087 286 : rtx_insn *tmp = get_last_insn ();
5088 286 : if (JUMP_P (tmp))
5089 286 : predict_insn_def (tmp, PRED_NORETURN, TAKEN);
5090 :
5091 286 : expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
5092 286 : free_temp_slots ();
5093 286 : emit_label (label);
5094 286 : }
5095 :
5096 : /* Start the RTL for a new function, and set variables used for
5097 : emitting RTL.
5098 : SUBR is the FUNCTION_DECL node.
5099 : PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5100 : the function's parameters, which must be run at any return statement. */
5101 :
5102 : bool currently_expanding_function_start;
5103 : void
5104 1480896 : expand_function_start (tree subr)
5105 : {
5106 1480896 : currently_expanding_function_start = true;
5107 :
5108 : /* Make sure volatile mem refs aren't considered
5109 : valid operands of arithmetic insns. */
5110 1480896 : init_recog_no_volatile ();
5111 :
5112 1480896 : crtl->profile
5113 2961792 : = (profile_flag
5114 1481229 : && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5115 :
5116 1480896 : crtl->limit_stack
5117 1480896 : = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
5118 :
5119 : /* Make the label for return statements to jump to. Do not special
5120 : case machines with special return instructions -- they will be
5121 : handled later during jump, ifcvt, or epilogue creation. */
5122 1480896 : return_label = gen_label_rtx ();
5123 :
5124 : /* Initialize rtx used to return the value. */
5125 : /* Do this before assign_parms so that we copy the struct value address
5126 : before any library calls that assign parms might generate. */
5127 :
5128 : /* Decide whether to return the value in memory or in a register. */
5129 1480896 : tree res = DECL_RESULT (subr);
5130 1480896 : if (aggregate_value_p (res, subr))
5131 : {
5132 : /* Returning something that won't go in a register. */
5133 69518 : rtx value_address = 0;
5134 :
5135 : #ifdef PCC_STATIC_STRUCT_RETURN
5136 : if (cfun->returns_pcc_struct)
5137 : {
5138 : int size = int_size_in_bytes (TREE_TYPE (res));
5139 : value_address = assemble_static_space (size);
5140 : }
5141 : else
5142 : #endif
5143 69518 : {
5144 69518 : rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
5145 : /* Expect to be passed the address of a place to store the value.
5146 : If it is passed as an argument, assign_parms will take care of
5147 : it. */
5148 69518 : if (sv)
5149 : {
5150 0 : value_address = gen_reg_rtx (Pmode);
5151 0 : emit_move_insn (value_address, sv);
5152 : }
5153 : }
5154 0 : if (value_address)
5155 : {
5156 0 : rtx x = value_address;
5157 0 : if (!DECL_BY_REFERENCE (res))
5158 : {
5159 0 : x = gen_rtx_MEM (DECL_MODE (res), x);
5160 0 : set_mem_attributes (x, res, 1);
5161 : }
5162 0 : set_parm_rtl (res, x);
5163 : }
5164 : }
5165 1411378 : else if (DECL_MODE (res) == VOIDmode)
5166 : /* If return mode is void, this decl rtl should not be used. */
5167 691927 : set_parm_rtl (res, NULL_RTX);
5168 : else
5169 : {
5170 : /* Compute the return values into a pseudo reg, which we will copy
5171 : into the true return register after the cleanups are done. */
5172 719451 : tree return_type = TREE_TYPE (res);
5173 :
5174 : /* If we may coalesce this result, make sure it has the expected mode
5175 : in case it was promoted. But we need not bother about BLKmode. */
5176 719451 : machine_mode promoted_mode
5177 533823 : = flag_tree_coalesce_vars && is_gimple_reg (res)
5178 1216551 : ? promote_ssa_mode (ssa_default_def (cfun, res), NULL)
5179 : : BLKmode;
5180 :
5181 497100 : if (promoted_mode != BLKmode)
5182 497099 : set_parm_rtl (res, gen_reg_rtx (promoted_mode));
5183 222352 : else if (TYPE_MODE (return_type) != BLKmode
5184 222352 : && targetm.calls.return_in_msb (return_type))
5185 : /* expand_function_end will insert the appropriate padding in
5186 : this case. Use the return value's natural (unpadded) mode
5187 : within the function proper. */
5188 0 : set_parm_rtl (res, gen_reg_rtx (TYPE_MODE (return_type)));
5189 : else
5190 : {
5191 : /* In order to figure out what mode to use for the pseudo, we
5192 : figure out what the mode of the eventual return register will
5193 : actually be, and use that. */
5194 222352 : rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
5195 :
5196 : /* Structures that are returned in registers are not
5197 : aggregate_value_p, so we may see a PARALLEL or a REG. */
5198 222352 : if (REG_P (hard_reg))
5199 219319 : set_parm_rtl (res, gen_reg_rtx (GET_MODE (hard_reg)));
5200 : else
5201 : {
5202 3033 : gcc_assert (GET_CODE (hard_reg) == PARALLEL);
5203 3033 : set_parm_rtl (res, gen_group_rtx (hard_reg));
5204 : }
5205 : }
5206 :
5207 : /* Set DECL_REGISTER flag so that expand_function_end will copy the
5208 : result to the real return register(s). */
5209 719451 : DECL_REGISTER (res) = 1;
5210 : }
5211 :
5212 : /* Initialize rtx for parameters and local variables.
5213 : In some cases this requires emitting insns. */
5214 1480896 : assign_parms (subr);
5215 :
5216 : /* If function gets a static chain arg, store it. */
5217 1480896 : if (cfun->static_chain_decl)
5218 : {
5219 19333 : tree parm = cfun->static_chain_decl;
5220 19333 : rtx local, chain;
5221 19333 : rtx_insn *insn;
5222 19333 : int unsignedp;
5223 :
5224 19333 : local = gen_reg_rtx (promote_decl_mode (parm, &unsignedp));
5225 19333 : chain = targetm.calls.static_chain (current_function_decl, true);
5226 :
5227 19333 : set_decl_incoming_rtl (parm, chain, false);
5228 19333 : set_parm_rtl (parm, local);
5229 19333 : mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
5230 :
5231 19333 : if (GET_MODE (local) != GET_MODE (chain))
5232 : {
5233 1 : convert_move (local, chain, unsignedp);
5234 1 : insn = get_last_insn ();
5235 : }
5236 : else
5237 19332 : insn = emit_move_insn (local, chain);
5238 :
5239 : /* Mark the register as eliminable, similar to parameters. */
5240 19333 : if (MEM_P (chain)
5241 19333 : && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
5242 0 : set_dst_reg_note (insn, REG_EQUIV, chain, local);
5243 :
5244 : /* If we aren't optimizing, save the static chain onto the stack. */
5245 19333 : if (!optimize)
5246 : {
5247 3924 : tree saved_static_chain_decl
5248 3924 : = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
5249 3924 : DECL_NAME (parm), TREE_TYPE (parm));
5250 3924 : rtx saved_static_chain_rtx
5251 7848 : = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5252 3924 : SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
5253 3924 : emit_move_insn (saved_static_chain_rtx, chain);
5254 3924 : SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
5255 3924 : DECL_HAS_VALUE_EXPR_P (parm) = 1;
5256 : }
5257 : }
5258 :
5259 : /* The following was moved from init_function_start.
5260 : The move was supposed to make sdb output more accurate. */
5261 : /* Indicate the beginning of the function body,
5262 : as opposed to parm setup. */
5263 1480896 : emit_note (NOTE_INSN_FUNCTION_BEG);
5264 :
5265 1480896 : gcc_assert (NOTE_P (get_last_insn ()));
5266 :
5267 1480896 : function_beg_insn = parm_birth_insn = get_last_insn ();
5268 :
5269 : /* If the function receives a non-local goto, then store the
5270 : bits we need to restore the frame pointer. */
5271 1480896 : if (cfun->nonlocal_goto_save_area)
5272 : {
5273 393 : tree t_save;
5274 393 : rtx r_save;
5275 :
5276 393 : tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
5277 393 : gcc_assert (DECL_RTL_SET_P (var));
5278 :
5279 393 : t_save = build4 (ARRAY_REF,
5280 393 : TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
5281 : cfun->nonlocal_goto_save_area,
5282 : integer_zero_node, NULL_TREE, NULL_TREE);
5283 393 : r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
5284 393 : gcc_assert (GET_MODE (r_save) == Pmode);
5285 :
5286 393 : emit_move_insn (r_save, hard_frame_pointer_rtx);
5287 393 : update_nonlocal_goto_save_area ();
5288 : }
5289 :
5290 1480896 : if (crtl->profile)
5291 : {
5292 : #ifdef PROFILE_HOOK
5293 : PROFILE_HOOK (current_function_funcdef_no);
5294 : #endif
5295 : }
5296 :
5297 : /* If we are doing generic stack checking, the probe should go here. */
5298 1480896 : if (flag_stack_check == GENERIC_STACK_CHECK)
5299 49 : stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
5300 :
5301 1480896 : currently_expanding_function_start = false;
5302 1480896 : }
5303 :
5304 : void
5305 215737 : pop_dummy_function (void)
5306 : {
5307 215737 : pop_cfun ();
5308 215737 : in_dummy_function = false;
5309 215737 : }
5310 :
5311 : /* Undo the effects of init_dummy_function_start. */
5312 : void
5313 215732 : expand_dummy_function_end (void)
5314 : {
5315 215732 : gcc_assert (in_dummy_function);
5316 :
5317 : /* End any sequences that failed to be closed due to syntax errors. */
5318 215732 : while (in_sequence_p ())
5319 0 : end_sequence ();
5320 :
5321 : /* Outside function body, can't compute type's actual size
5322 : until next function's body starts. */
5323 :
5324 215732 : free_after_parsing (cfun);
5325 215732 : free_after_compilation (cfun);
5326 215732 : pop_dummy_function ();
5327 215732 : }
5328 :
5329 : /* Helper for diddle_return_value. */
5330 :
5331 : void
5332 20193410 : diddle_return_value_1 (void (*doit) (rtx, void *), void *arg, rtx outgoing)
5333 : {
5334 20193410 : if (! outgoing)
5335 : return;
5336 :
5337 10543564 : if (REG_P (outgoing))
5338 10488357 : (*doit) (outgoing, arg);
5339 55207 : else if (GET_CODE (outgoing) == PARALLEL)
5340 : {
5341 : int i;
5342 :
5343 129796 : for (i = 0; i < XVECLEN (outgoing, 0); i++)
5344 : {
5345 76968 : rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
5346 :
5347 76968 : if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5348 76968 : (*doit) (x, arg);
5349 : }
5350 : }
5351 : }
5352 :
5353 : /* Call DOIT for each hard register used as a return value from
5354 : the current function. */
5355 :
5356 : void
5357 20193410 : diddle_return_value (void (*doit) (rtx, void *), void *arg)
5358 : {
5359 20193410 : diddle_return_value_1 (doit, arg, crtl->return_rtx);
5360 20193410 : }
5361 :
5362 : static void
5363 12688 : do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5364 : {
5365 5190 : emit_clobber (reg);
5366 7498 : }
5367 :
5368 : void
5369 624612 : clobber_return_register (void)
5370 : {
5371 624612 : diddle_return_value (do_clobber_return_reg, NULL);
5372 :
5373 : /* In case we do use pseudo to return value, clobber it too. */
5374 624612 : if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5375 : {
5376 7530 : tree decl_result = DECL_RESULT (current_function_decl);
5377 7530 : rtx decl_rtl = DECL_RTL (decl_result);
5378 7530 : if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
5379 : {
5380 7498 : do_clobber_return_reg (decl_rtl, NULL);
5381 : }
5382 : }
5383 624612 : }
5384 :
5385 : static void
5386 785939 : do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5387 : {
5388 785939 : emit_use (reg);
5389 785939 : }
5390 :
5391 : static void
5392 1480895 : use_return_register (void)
5393 : {
5394 0 : diddle_return_value (do_use_return_reg, NULL);
5395 0 : }
5396 :
5397 : /* Generate RTL for the end of the current function. */
5398 :
5399 : void
5400 1480895 : expand_function_end (void)
5401 : {
5402 : /* If arg_pointer_save_area was referenced only from a nested
5403 : function, we will not have initialized it yet. Do that now. */
5404 1480895 : if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
5405 0 : get_arg_pointer_save_area ();
5406 :
5407 : /* If we are doing generic stack checking and this function makes calls,
5408 : do a stack probe at the start of the function to ensure we have enough
5409 : space for another stack frame. */
5410 1480895 : if (flag_stack_check == GENERIC_STACK_CHECK)
5411 : {
5412 49 : rtx_insn *insn, *seq;
5413 :
5414 650 : for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5415 633 : if (CALL_P (insn))
5416 : {
5417 32 : rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
5418 32 : start_sequence ();
5419 32 : if (STACK_CHECK_MOVING_SP)
5420 32 : anti_adjust_stack_and_probe (max_frame_size, true);
5421 : else
5422 : probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
5423 32 : seq = end_sequence ();
5424 32 : set_insn_locations (seq, prologue_location);
5425 32 : emit_insn_before (seq, stack_check_probe_note);
5426 32 : break;
5427 : }
5428 : }
5429 :
5430 : /* End any sequences that failed to be closed due to syntax errors. */
5431 1480895 : while (in_sequence_p ())
5432 0 : end_sequence ();
5433 :
5434 1480895 : clear_pending_stack_adjust ();
5435 1480895 : do_pending_stack_adjust ();
5436 :
5437 : /* Output a linenumber for the end of the function.
5438 : SDB depended on this. */
5439 1480895 : set_curr_insn_location (input_location);
5440 :
5441 : /* Before the return label (if any), clobber the return
5442 : registers so that they are not propagated live to the rest of
5443 : the function. This can only happen with functions that drop
5444 : through; if there had been a return statement, there would
5445 : have either been a return rtx, or a jump to the return label.
5446 :
5447 : We delay actual code generation after the current_function_value_rtx
5448 : is computed. */
5449 1480895 : rtx_insn *clobber_after = get_last_insn ();
5450 :
5451 : /* Output the label for the actual return from the function. */
5452 1480895 : emit_label (return_label);
5453 :
5454 1480895 : if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
5455 : {
5456 : /* Let except.cc know where it should emit the call to unregister
5457 : the function context for sjlj exceptions. */
5458 0 : if (flag_exceptions)
5459 0 : sjlj_emit_function_exit_after (get_last_insn ());
5460 : }
5461 :
5462 : /* If this is an implementation of throw, do what's necessary to
5463 : communicate between __builtin_eh_return and the epilogue. */
5464 1480895 : expand_eh_return ();
5465 :
5466 : /* If stack protection is enabled for this function, check the guard. */
5467 1480895 : if (crtl->stack_protect_guard
5468 254 : && targetm.stack_protect_runtime_enabled_p ()
5469 1481148 : && naked_return_label == NULL_RTX)
5470 253 : stack_protect_epilogue ();
5471 :
5472 : /* If scalar return value was computed in a pseudo-reg, or was a named
5473 : return value that got dumped to the stack, copy that to the hard
5474 : return register. */
5475 1480895 : if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5476 : {
5477 788968 : tree decl_result = DECL_RESULT (current_function_decl);
5478 788968 : rtx decl_rtl = DECL_RTL (decl_result);
5479 :
5480 788968 : if ((REG_P (decl_rtl)
5481 788968 : ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5482 67347 : : DECL_REGISTER (decl_result))
5483 : /* Unless the psABI says not to. */
5484 788968 : && !TYPE_EMPTY_P (TREE_TYPE (decl_result)))
5485 : {
5486 721755 : rtx real_decl_rtl = crtl->return_rtx;
5487 721755 : complex_mode cmode;
5488 :
5489 : /* This should be set in assign_parms. */
5490 721755 : gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
5491 :
5492 : /* If this is a BLKmode structure being returned in registers,
5493 : then use the mode computed in expand_return. Note that if
5494 : decl_rtl is memory, then its mode may have been changed,
5495 : but that crtl->return_rtx has not. */
5496 721755 : if (GET_MODE (real_decl_rtl) == BLKmode)
5497 2701 : PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
5498 :
5499 : /* If a non-BLKmode return value should be padded at the least
5500 : significant end of the register, shift it left by the appropriate
5501 : amount. BLKmode results are handled using the group load/store
5502 : machinery. */
5503 721755 : if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5504 719051 : && REG_P (real_decl_rtl)
5505 1438161 : && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5506 : {
5507 0 : emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5508 : REGNO (real_decl_rtl)),
5509 : decl_rtl);
5510 0 : shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5511 : }
5512 721755 : else if (GET_CODE (real_decl_rtl) == PARALLEL)
5513 : {
5514 : /* If expand_function_start has created a PARALLEL for decl_rtl,
5515 : move the result to the real return registers. Otherwise, do
5516 : a group load from decl_rtl for a named return. */
5517 4122 : if (GET_CODE (decl_rtl) == PARALLEL)
5518 3033 : emit_group_move (real_decl_rtl, decl_rtl);
5519 : else
5520 1089 : emit_group_load (real_decl_rtl, decl_rtl,
5521 1089 : TREE_TYPE (decl_result),
5522 1089 : int_size_in_bytes (TREE_TYPE (decl_result)));
5523 : }
5524 : /* In the case of complex integer modes smaller than a word, we'll
5525 : need to generate some non-trivial bitfield insertions. Do that
5526 : on a pseudo and not the hard register. */
5527 717633 : else if (GET_CODE (decl_rtl) == CONCAT
5528 687 : && is_complex_int_mode (GET_MODE (decl_rtl), &cmode)
5529 717757 : && GET_MODE_BITSIZE (cmode) <= BITS_PER_WORD)
5530 : {
5531 86 : int old_generating_concat_p;
5532 86 : rtx tmp;
5533 :
5534 86 : old_generating_concat_p = generating_concat_p;
5535 86 : generating_concat_p = 0;
5536 86 : tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5537 86 : generating_concat_p = old_generating_concat_p;
5538 :
5539 86 : emit_move_insn (tmp, decl_rtl);
5540 86 : emit_move_insn (real_decl_rtl, tmp);
5541 : }
5542 : /* If a named return value dumped decl_return to memory, then
5543 : we may need to re-do the PROMOTE_MODE signed/unsigned
5544 : extension. */
5545 717547 : else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5546 : {
5547 0 : int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5548 0 : promote_function_mode (TREE_TYPE (decl_result),
5549 : GET_MODE (decl_rtl), &unsignedp,
5550 0 : TREE_TYPE (current_function_decl), 1);
5551 :
5552 0 : convert_move (real_decl_rtl, decl_rtl, unsignedp);
5553 : }
5554 : else
5555 717547 : emit_move_insn (real_decl_rtl, decl_rtl);
5556 : }
5557 : }
5558 :
5559 : /* If returning a structure, arrange to return the address of the value
5560 : in a place where debuggers expect to find it.
5561 :
5562 : If returning a structure PCC style,
5563 : the caller also depends on this value.
5564 : And cfun->returns_pcc_struct is not necessarily set. */
5565 1480895 : if ((cfun->returns_struct || cfun->returns_pcc_struct)
5566 69313 : && !targetm.calls.omit_struct_return_reg)
5567 : {
5568 69313 : rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5569 69313 : tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5570 69313 : rtx outgoing;
5571 :
5572 69313 : if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5573 8871 : type = TREE_TYPE (type);
5574 : else
5575 60442 : value_address = XEXP (value_address, 0);
5576 :
5577 69313 : outgoing = targetm.calls.function_value (build_pointer_type (type),
5578 : current_function_decl, true);
5579 :
5580 : /* Mark this as a function return value so integrate will delete the
5581 : assignment and USE below when inlining this function. */
5582 69313 : REG_FUNCTION_VALUE_P (outgoing) = 1;
5583 :
5584 : /* The address may be ptr_mode and OUTGOING may be Pmode. */
5585 69313 : scalar_int_mode mode = as_a <scalar_int_mode> (GET_MODE (outgoing));
5586 69313 : value_address = convert_memory_address (mode, value_address);
5587 :
5588 69313 : emit_move_insn (outgoing, value_address);
5589 :
5590 : /* Show return register used to hold result (in this case the address
5591 : of the result. */
5592 69313 : crtl->return_rtx = outgoing;
5593 : }
5594 :
5595 : /* Emit the actual code to clobber return register. Don't emit
5596 : it if clobber_after is a barrier, then the previous basic block
5597 : certainly doesn't fall thru into the exit block. */
5598 1480895 : if (!BARRIER_P (clobber_after))
5599 : {
5600 555552 : start_sequence ();
5601 555552 : clobber_return_register ();
5602 555552 : rtx_insn *seq = end_sequence ();
5603 :
5604 555552 : emit_insn_after (seq, clobber_after);
5605 : }
5606 :
5607 : /* Output the label for the naked return from the function. */
5608 1480895 : if (naked_return_label)
5609 379 : emit_label (naked_return_label);
5610 :
5611 : /* @@@ This is a kludge. We want to ensure that instructions that
5612 : may trap are not moved into the epilogue by scheduling, because
5613 : we don't always emit unwind information for the epilogue. */
5614 1480895 : if (cfun->can_throw_non_call_exceptions
5615 1480895 : && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
5616 262872 : emit_insn (gen_blockage ());
5617 :
5618 : /* If stack protection is enabled for this function, check the guard. */
5619 1480895 : if (crtl->stack_protect_guard
5620 254 : && targetm.stack_protect_runtime_enabled_p ()
5621 1481148 : && naked_return_label)
5622 0 : stack_protect_epilogue ();
5623 :
5624 : /* If we had calls to alloca, and this machine needs
5625 : an accurate stack pointer to exit the function,
5626 : insert some code to save and restore the stack pointer. */
5627 1480895 : if (! EXIT_IGNORE_STACK
5628 : && cfun->calls_alloca)
5629 : {
5630 : rtx tem = 0;
5631 :
5632 : start_sequence ();
5633 : emit_stack_save (SAVE_FUNCTION, &tem);
5634 : rtx_insn *seq = end_sequence ();
5635 : emit_insn_before (seq, parm_birth_insn);
5636 :
5637 : emit_stack_restore (SAVE_FUNCTION, tem);
5638 : }
5639 :
5640 : /* ??? This should no longer be necessary since stupid is no longer with
5641 : us, but there are some parts of the compiler (eg reload_combine, and
5642 : sh mach_dep_reorg) that still try and compute their own lifetime info
5643 : instead of using the general framework. */
5644 1480895 : use_return_register ();
5645 1480895 : }
5646 :
5647 : rtx
5648 0 : get_arg_pointer_save_area (void)
5649 : {
5650 0 : rtx ret = arg_pointer_save_area;
5651 :
5652 0 : if (! ret)
5653 : {
5654 0 : ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5655 0 : arg_pointer_save_area = ret;
5656 : }
5657 :
5658 0 : if (! crtl->arg_pointer_save_area_init)
5659 : {
5660 : /* Save the arg pointer at the beginning of the function. The
5661 : generated stack slot may not be a valid memory address, so we
5662 : have to check it and fix it if necessary. */
5663 0 : start_sequence ();
5664 0 : emit_move_insn (validize_mem (copy_rtx (ret)),
5665 : crtl->args.internal_arg_pointer);
5666 0 : rtx_insn *seq = end_sequence ();
5667 :
5668 0 : push_topmost_sequence ();
5669 0 : emit_insn_after (seq, entry_of_function ());
5670 0 : pop_topmost_sequence ();
5671 :
5672 0 : crtl->arg_pointer_save_area_init = true;
5673 : }
5674 :
5675 0 : return ret;
5676 : }
5677 :
5678 :
5679 : /* If debugging dumps are requested, dump information about how the
5680 : target handled -fstack-check=clash for the prologue.
5681 :
5682 : PROBES describes what if any probes were emitted.
5683 :
5684 : RESIDUALS indicates if the prologue had any residual allocation
5685 : (i.e. total allocation was not a multiple of PROBE_INTERVAL). */
5686 :
5687 : void
5688 127 : dump_stack_clash_frame_info (enum stack_clash_probes probes, bool residuals)
5689 : {
5690 127 : if (!dump_file)
5691 : return;
5692 :
5693 17 : switch (probes)
5694 : {
5695 1 : case NO_PROBE_NO_FRAME:
5696 1 : fprintf (dump_file,
5697 : "Stack clash no probe no stack adjustment in prologue.\n");
5698 1 : break;
5699 11 : case NO_PROBE_SMALL_FRAME:
5700 11 : fprintf (dump_file,
5701 : "Stack clash no probe small stack adjustment in prologue.\n");
5702 11 : break;
5703 3 : case PROBE_INLINE:
5704 3 : fprintf (dump_file, "Stack clash inline probes in prologue.\n");
5705 3 : break;
5706 2 : case PROBE_LOOP:
5707 2 : fprintf (dump_file, "Stack clash probe loop in prologue.\n");
5708 2 : break;
5709 : }
5710 :
5711 17 : if (residuals)
5712 16 : fprintf (dump_file, "Stack clash residual allocation in prologue.\n");
5713 : else
5714 1 : fprintf (dump_file, "Stack clash no residual allocation in prologue.\n");
5715 :
5716 17 : if (frame_pointer_needed)
5717 0 : fprintf (dump_file, "Stack clash frame pointer needed.\n");
5718 : else
5719 17 : fprintf (dump_file, "Stack clash no frame pointer needed.\n");
5720 :
5721 17 : if (TREE_THIS_VOLATILE (cfun->decl))
5722 1 : fprintf (dump_file,
5723 : "Stack clash noreturn prologue, assuming no implicit"
5724 : " probes in caller.\n");
5725 : else
5726 16 : fprintf (dump_file,
5727 : "Stack clash not noreturn prologue.\n");
5728 : }
5729 :
5730 : /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5731 : for the first time. */
5732 :
5733 : static void
5734 3687932 : record_insns (rtx_insn *insns, rtx end, hash_table<insn_cache_hasher> **hashp)
5735 : {
5736 3687932 : rtx_insn *tmp;
5737 3687932 : hash_table<insn_cache_hasher> *hash = *hashp;
5738 :
5739 3687932 : if (hash == NULL)
5740 2960236 : *hashp = hash = hash_table<insn_cache_hasher>::create_ggc (17);
5741 :
5742 14263498 : for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5743 : {
5744 10575566 : rtx *slot = hash->find_slot (tmp, INSERT);
5745 10575566 : gcc_assert (*slot == NULL);
5746 10575566 : *slot = tmp;
5747 : }
5748 3687932 : }
5749 :
5750 : /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5751 : basic block, splitting or peepholes. If INSN is a prologue or epilogue
5752 : insn, then record COPY as well. */
5753 :
5754 : void
5755 3823971 : maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5756 : {
5757 3823971 : hash_table<insn_cache_hasher> *hash;
5758 3823971 : rtx *slot;
5759 :
5760 3823971 : hash = epilogue_insn_hash;
5761 3823971 : if (!hash || !hash->find (insn))
5762 : {
5763 3315548 : hash = prologue_insn_hash;
5764 3315548 : if (!hash || !hash->find (insn))
5765 3240146 : return;
5766 : }
5767 :
5768 583825 : slot = hash->find_slot (copy, INSERT);
5769 583825 : gcc_assert (*slot == NULL);
5770 583825 : *slot = copy;
5771 : }
5772 :
5773 : /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5774 : we can be running after reorg, SEQUENCE rtl is possible. */
5775 :
5776 : static bool
5777 285414144 : contains (const rtx_insn *insn, hash_table<insn_cache_hasher> *hash)
5778 : {
5779 285414144 : if (hash == NULL)
5780 : return false;
5781 :
5782 285360676 : if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5783 : {
5784 0 : rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
5785 0 : int i;
5786 0 : for (i = seq->len () - 1; i >= 0; i--)
5787 0 : if (hash->find (seq->element (i)))
5788 : return true;
5789 : return false;
5790 : }
5791 :
5792 285360676 : return hash->find (const_cast<rtx_insn *> (insn)) != NULL;
5793 : }
5794 :
5795 : bool
5796 108743445 : prologue_contains (const rtx_insn *insn)
5797 : {
5798 108743445 : return contains (insn, prologue_insn_hash);
5799 : }
5800 :
5801 : bool
5802 108743445 : epilogue_contains (const rtx_insn *insn)
5803 : {
5804 108743445 : return contains (insn, epilogue_insn_hash);
5805 : }
5806 :
5807 : bool
5808 4467 : prologue_epilogue_contains (const rtx_insn *insn)
5809 : {
5810 4467 : if (contains (insn, prologue_insn_hash))
5811 : return true;
5812 4277 : if (contains (insn, epilogue_insn_hash))
5813 : return true;
5814 : return false;
5815 : }
5816 :
5817 : void
5818 168833 : record_prologue_seq (rtx_insn *seq)
5819 : {
5820 168833 : record_insns (seq, NULL, &prologue_insn_hash);
5821 168833 : }
5822 :
5823 : void
5824 150912 : record_epilogue_seq (rtx_insn *seq)
5825 : {
5826 150912 : record_insns (seq, NULL, &epilogue_insn_hash);
5827 150912 : }
5828 :
5829 : /* Set JUMP_LABEL for a return insn. */
5830 :
5831 : void
5832 1525362 : set_return_jump_label (rtx_insn *returnjump)
5833 : {
5834 1525362 : rtx pat = PATTERN (returnjump);
5835 1525362 : if (GET_CODE (pat) == PARALLEL)
5836 27230 : pat = XVECEXP (pat, 0, 0);
5837 1525362 : if (ANY_RETURN_P (pat))
5838 1525362 : JUMP_LABEL (returnjump) = pat;
5839 : else
5840 0 : JUMP_LABEL (returnjump) = ret_rtx;
5841 1525362 : }
5842 :
5843 : /* Return a sequence to be used as the split prologue for the current
5844 : function, or NULL. */
5845 :
5846 : static rtx_insn *
5847 1524871 : make_split_prologue_seq (void)
5848 : {
5849 1524871 : if (!flag_split_stack
5850 1524871 : || lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl)))
5851 1264939 : return NULL;
5852 :
5853 259932 : start_sequence ();
5854 259932 : emit_insn (targetm.gen_split_stack_prologue ());
5855 259932 : rtx_insn *seq = end_sequence ();
5856 :
5857 259932 : record_insns (seq, NULL, &prologue_insn_hash);
5858 259932 : set_insn_locations (seq, prologue_location);
5859 :
5860 259932 : return seq;
5861 : }
5862 :
5863 : /* Return a sequence to be used as the prologue for the current function,
5864 : or NULL. */
5865 :
5866 : static rtx_insn *
5867 1524871 : make_prologue_seq (void)
5868 : {
5869 1524871 : if (!targetm.have_prologue ())
5870 : return NULL;
5871 :
5872 1524871 : start_sequence ();
5873 1524871 : rtx_insn *seq = targetm.gen_prologue ();
5874 1524871 : emit_insn (seq);
5875 :
5876 : /* Insert an explicit USE for the frame pointer
5877 : if the profiling is on and the frame pointer is required. */
5878 1524871 : if (crtl->profile && frame_pointer_needed)
5879 292 : emit_use (hard_frame_pointer_rtx);
5880 :
5881 : /* Retain a map of the prologue insns. */
5882 1524871 : record_insns (seq, NULL, &prologue_insn_hash);
5883 1524871 : emit_note (NOTE_INSN_PROLOGUE_END);
5884 :
5885 : /* Ensure that instructions are not moved into the prologue when
5886 : profiling is on. The call to the profiling routine can be
5887 : emitted within the live range of a call-clobbered register. */
5888 1524871 : if (!targetm.profile_before_prologue () && crtl->profile)
5889 12 : emit_insn (gen_blockage ());
5890 :
5891 1524871 : seq = end_sequence ();
5892 1524871 : set_insn_locations (seq, prologue_location);
5893 :
5894 1524871 : return seq;
5895 : }
5896 :
5897 : /* Emit a sequence of insns to zero the call-used registers before RET
5898 : according to ZERO_REGS_TYPE. */
5899 :
5900 : static void
5901 177 : gen_call_used_regs_seq (rtx_insn *ret, unsigned int zero_regs_type)
5902 : {
5903 177 : bool only_gpr = true;
5904 177 : bool only_used = true;
5905 177 : bool only_arg = true;
5906 :
5907 : /* No need to zero call-used-regs in main (). */
5908 177 : if (MAIN_NAME_P (DECL_NAME (current_function_decl)))
5909 46 : return;
5910 :
5911 : /* No need to zero call-used-regs if __builtin_eh_return is called
5912 : since it isn't a normal function return. */
5913 135 : if (crtl->calls_eh_return)
5914 : return;
5915 :
5916 : /* If only_gpr is true, only zero call-used registers that are
5917 : general-purpose registers; if only_used is true, only zero
5918 : call-used registers that are used in the current function;
5919 : if only_arg is true, only zero call-used registers that pass
5920 : parameters defined by the flatform's calling conversion. */
5921 :
5922 135 : using namespace zero_regs_flags;
5923 :
5924 135 : only_gpr = zero_regs_type & ONLY_GPR;
5925 135 : only_used = zero_regs_type & ONLY_USED;
5926 135 : only_arg = zero_regs_type & ONLY_ARG;
5927 :
5928 135 : if ((zero_regs_type & LEAFY_MODE) && leaf_function_p ())
5929 : only_used = true;
5930 :
5931 : /* For each of the hard registers, we should zero it if:
5932 : 1. it is a call-used register;
5933 : and 2. it is not a fixed register;
5934 : and 3. it is not live at the return of the routine;
5935 : and 4. it is general registor if only_gpr is true;
5936 : and 5. it is used in the routine if only_used is true;
5937 : and 6. it is a register that passes parameter if only_arg is true. */
5938 :
5939 : /* First, prepare the data flow information. */
5940 135 : basic_block bb = BLOCK_FOR_INSN (ret);
5941 135 : auto_bitmap live_out;
5942 135 : bitmap_copy (live_out, df_get_live_out (bb));
5943 135 : df_simulate_initialize_backwards (bb, live_out);
5944 135 : df_simulate_one_insn_backwards (bb, ret, live_out);
5945 :
5946 135 : HARD_REG_SET selected_hardregs;
5947 135 : HARD_REG_SET all_call_used_regs;
5948 540 : CLEAR_HARD_REG_SET (selected_hardregs);
5949 12555 : CLEAR_HARD_REG_SET (all_call_used_regs);
5950 12555 : for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5951 : {
5952 12420 : if (!crtl->abi->clobbers_full_reg_p (regno))
5953 1215 : continue;
5954 11205 : if (fixed_regs[regno])
5955 5614 : continue;
5956 5591 : if (REGNO_REG_SET_P (live_out, regno))
5957 124 : continue;
5958 : #ifdef LEAF_REG_REMAP
5959 : if (crtl->uses_only_leaf_regs && LEAF_REG_REMAP (regno) < 0)
5960 : continue;
5961 : #endif
5962 : /* This is a call used register that is dead at return. */
5963 5467 : SET_HARD_REG_BIT (all_call_used_regs, regno);
5964 :
5965 7387 : if (only_gpr
5966 5467 : && !TEST_HARD_REG_BIT (reg_class_contents[GENERAL_REGS], regno))
5967 1920 : continue;
5968 3547 : if (only_used && !df_regs_ever_live_p (regno))
5969 1806 : continue;
5970 1741 : if (only_arg && !FUNCTION_ARG_REGNO_P (regno))
5971 364 : continue;
5972 :
5973 : /* Now this is a register that we might want to zero. */
5974 1377 : SET_HARD_REG_BIT (selected_hardregs, regno);
5975 : }
5976 :
5977 135 : if (hard_reg_set_empty_p (selected_hardregs))
5978 4 : return;
5979 :
5980 : /* Now that we have a hard register set that needs to be zeroed, pass it to
5981 : target to generate zeroing sequence. */
5982 131 : HARD_REG_SET zeroed_hardregs;
5983 131 : start_sequence ();
5984 131 : zeroed_hardregs = targetm.calls.zero_call_used_regs (selected_hardregs);
5985 :
5986 : /* For most targets, the returned set of registers is a subset of
5987 : selected_hardregs, however, for some of the targets (for example MIPS),
5988 : clearing some registers that are in selected_hardregs requires clearing
5989 : other call used registers that are not in the selected_hardregs, under
5990 : such situation, the returned set of registers must be a subset of
5991 : all call used registers. */
5992 262 : gcc_assert (hard_reg_set_subset_p (zeroed_hardregs, all_call_used_regs));
5993 :
5994 131 : rtx_insn *seq = end_sequence ();
5995 131 : if (seq)
5996 : {
5997 : /* Emit the memory blockage and register clobber asm volatile before
5998 : the whole sequence. */
5999 131 : start_sequence ();
6000 131 : expand_asm_reg_clobber_mem_blockage (zeroed_hardregs);
6001 131 : rtx_insn *seq_barrier = end_sequence ();
6002 :
6003 131 : emit_insn_before (seq_barrier, ret);
6004 131 : emit_insn_before (seq, ret);
6005 :
6006 : /* Update the data flow information. */
6007 131 : crtl->must_be_zero_on_return |= zeroed_hardregs;
6008 131 : df_update_exit_block_uses ();
6009 : }
6010 135 : }
6011 :
6012 :
6013 : /* Return a sequence to be used as the epilogue for the current function,
6014 : or NULL. */
6015 :
6016 : static rtx_insn *
6017 1524871 : make_epilogue_seq (void)
6018 : {
6019 1524871 : if (!targetm.have_epilogue ())
6020 : return NULL;
6021 :
6022 1524871 : start_sequence ();
6023 1524871 : emit_note (NOTE_INSN_EPILOGUE_BEG);
6024 1524871 : rtx_insn *seq = targetm.gen_epilogue ();
6025 1524871 : if (seq)
6026 1524871 : emit_jump_insn (seq);
6027 :
6028 : /* Retain a map of the epilogue insns. */
6029 1524871 : record_insns (seq, NULL, &epilogue_insn_hash);
6030 1524871 : set_insn_locations (seq, epilogue_location);
6031 :
6032 1524871 : seq = get_insns ();
6033 1524871 : rtx_insn *returnjump = get_last_insn ();
6034 1524871 : end_sequence ();
6035 :
6036 1524871 : if (JUMP_P (returnjump))
6037 1524797 : set_return_jump_label (returnjump);
6038 :
6039 : return seq;
6040 : }
6041 :
6042 :
6043 : /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6044 : this into place with notes indicating where the prologue ends and where
6045 : the epilogue begins. Update the basic block information when possible.
6046 :
6047 : Notes on epilogue placement:
6048 : There are several kinds of edges to the exit block:
6049 : * a single fallthru edge from LAST_BB
6050 : * possibly, edges from blocks containing sibcalls
6051 : * possibly, fake edges from infinite loops
6052 :
6053 : The epilogue is always emitted on the fallthru edge from the last basic
6054 : block in the function, LAST_BB, into the exit block.
6055 :
6056 : If LAST_BB is empty except for a label, it is the target of every
6057 : other basic block in the function that ends in a return. If a
6058 : target has a return or simple_return pattern (possibly with
6059 : conditional variants), these basic blocks can be changed so that a
6060 : return insn is emitted into them, and their target is adjusted to
6061 : the real exit block.
6062 :
6063 : Notes on shrink wrapping: We implement a fairly conservative
6064 : version of shrink-wrapping rather than the textbook one. We only
6065 : generate a single prologue and a single epilogue. This is
6066 : sufficient to catch a number of interesting cases involving early
6067 : exits.
6068 :
6069 : First, we identify the blocks that require the prologue to occur before
6070 : them. These are the ones that modify a call-saved register, or reference
6071 : any of the stack or frame pointer registers. To simplify things, we then
6072 : mark everything reachable from these blocks as also requiring a prologue.
6073 : This takes care of loops automatically, and avoids the need to examine
6074 : whether MEMs reference the frame, since it is sufficient to check for
6075 : occurrences of the stack or frame pointer.
6076 :
6077 : We then compute the set of blocks for which the need for a prologue
6078 : is anticipatable (borrowing terminology from the shrink-wrapping
6079 : description in Muchnick's book). These are the blocks which either
6080 : require a prologue themselves, or those that have only successors
6081 : where the prologue is anticipatable. The prologue needs to be
6082 : inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
6083 : is not. For the moment, we ensure that only one such edge exists.
6084 :
6085 : The epilogue is placed as described above, but we make a
6086 : distinction between inserting return and simple_return patterns
6087 : when modifying other blocks that end in a return. Blocks that end
6088 : in a sibcall omit the sibcall_epilogue if the block is not in
6089 : ANTIC. */
6090 :
6091 : void
6092 1480118 : thread_prologue_and_epilogue_insns (void)
6093 : {
6094 1480118 : df_analyze ();
6095 :
6096 : /* Can't deal with multiple successors of the entry block at the
6097 : moment. Function should always have at least one entry
6098 : point. */
6099 1480118 : gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6100 :
6101 1480118 : edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
6102 1480118 : edge orig_entry_edge = entry_edge;
6103 :
6104 1480118 : rtx_insn *split_prologue_seq = make_split_prologue_seq ();
6105 1480118 : rtx_insn *prologue_seq = make_prologue_seq ();
6106 1480118 : rtx_insn *epilogue_seq = make_epilogue_seq ();
6107 :
6108 : /* Try to perform a kind of shrink-wrapping, making sure the
6109 : prologue/epilogue is emitted only around those parts of the
6110 : function that require it. */
6111 1480118 : try_shrink_wrapping (&entry_edge, prologue_seq);
6112 :
6113 : /* If the target can handle splitting the prologue/epilogue into separate
6114 : components, try to shrink-wrap these components separately. */
6115 1480118 : try_shrink_wrapping_separate (entry_edge->dest);
6116 :
6117 : /* If that did anything for any component we now need the generate the
6118 : "main" prologue again. Because some targets require some of these
6119 : to be called in a specific order (i386 requires the split prologue
6120 : to be first, for example), we create all three sequences again here.
6121 : If this does not work for some target, that target should not enable
6122 : separate shrink-wrapping. */
6123 1480118 : if (crtl->shrink_wrapped_separate)
6124 : {
6125 44753 : split_prologue_seq = make_split_prologue_seq ();
6126 44753 : prologue_seq = make_prologue_seq ();
6127 44753 : epilogue_seq = make_epilogue_seq ();
6128 : }
6129 :
6130 1480118 : rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
6131 :
6132 : /* A small fib -- epilogue is not yet completed, but we wish to re-use
6133 : this marker for the splits of EH_RETURN patterns, and nothing else
6134 : uses the flag in the meantime. */
6135 1480118 : epilogue_completed = 1;
6136 :
6137 : /* Find non-fallthru edges that end with EH_RETURN instructions. On
6138 : some targets, these get split to a special version of the epilogue
6139 : code. In order to be able to properly annotate these with unwind
6140 : info, try to split them now. If we get a valid split, drop an
6141 : EPILOGUE_BEG note and mark the insns as epilogue insns. */
6142 1480118 : edge e;
6143 1480118 : edge_iterator ei;
6144 3013973 : FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6145 : {
6146 1533855 : rtx_insn *prev, *last, *trial;
6147 :
6148 1533855 : if (e->flags & EDGE_FALLTHRU)
6149 1349401 : continue;
6150 184454 : last = BB_END (e->src);
6151 184454 : if (!eh_returnjump_p (last))
6152 184425 : continue;
6153 :
6154 29 : prev = PREV_INSN (last);
6155 29 : trial = try_split (PATTERN (last), last, 1);
6156 29 : if (trial == last)
6157 0 : continue;
6158 :
6159 29 : record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
6160 29 : emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6161 : }
6162 :
6163 1480118 : edge exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
6164 :
6165 1480118 : if (exit_fallthru_edge)
6166 : {
6167 1349401 : if (epilogue_seq)
6168 : {
6169 1349401 : insert_insn_on_edge (epilogue_seq, exit_fallthru_edge);
6170 1349401 : commit_edge_insertions ();
6171 :
6172 : /* The epilogue insns we inserted may cause the exit edge to no longer
6173 : be fallthru. */
6174 2778874 : FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6175 : {
6176 1429473 : if (((e->flags & EDGE_FALLTHRU) != 0)
6177 1429473 : && returnjump_p (BB_END (e->src)))
6178 0 : e->flags &= ~EDGE_FALLTHRU;
6179 : }
6180 :
6181 1349401 : find_sub_basic_blocks (BLOCK_FOR_INSN (epilogue_seq));
6182 : }
6183 0 : else if (next_active_insn (BB_END (exit_fallthru_edge->src)))
6184 : {
6185 : /* We have a fall-through edge to the exit block, the source is not
6186 : at the end of the function, and there will be an assembler epilogue
6187 : at the end of the function.
6188 : We can't use force_nonfallthru here, because that would try to
6189 : use return. Inserting a jump 'by hand' is extremely messy, so
6190 : we take advantage of cfg_layout_finalize using
6191 : fixup_fallthru_exit_predecessor. */
6192 0 : cfg_layout_initialize (0);
6193 0 : basic_block cur_bb;
6194 0 : FOR_EACH_BB_FN (cur_bb, cfun)
6195 0 : if (cur_bb->index >= NUM_FIXED_BLOCKS
6196 0 : && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
6197 0 : cur_bb->aux = cur_bb->next_bb;
6198 0 : cfg_layout_finalize ();
6199 : }
6200 : }
6201 :
6202 : /* Insert the prologue. */
6203 :
6204 1480118 : rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
6205 :
6206 1480118 : if (split_prologue_seq || prologue_seq)
6207 : {
6208 1480118 : rtx_insn *split_prologue_insn = split_prologue_seq;
6209 1480118 : if (split_prologue_seq)
6210 : {
6211 259928 : while (split_prologue_insn && !NONDEBUG_INSN_P (split_prologue_insn))
6212 0 : split_prologue_insn = NEXT_INSN (split_prologue_insn);
6213 259928 : insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
6214 : }
6215 :
6216 1480118 : rtx_insn *prologue_insn = prologue_seq;
6217 1480118 : if (prologue_seq)
6218 : {
6219 1871241 : while (prologue_insn && !NONDEBUG_INSN_P (prologue_insn))
6220 391123 : prologue_insn = NEXT_INSN (prologue_insn);
6221 1480118 : insert_insn_on_edge (prologue_seq, entry_edge);
6222 : }
6223 :
6224 1480118 : commit_edge_insertions ();
6225 :
6226 : /* Look for basic blocks within the prologue insns. */
6227 1480118 : if (split_prologue_insn
6228 1480118 : && BLOCK_FOR_INSN (split_prologue_insn) == NULL)
6229 : split_prologue_insn = NULL;
6230 1480118 : if (prologue_insn
6231 1480118 : && BLOCK_FOR_INSN (prologue_insn) == NULL)
6232 : prologue_insn = NULL;
6233 1480118 : if (split_prologue_insn || prologue_insn)
6234 : {
6235 1112638 : auto_sbitmap blocks (last_basic_block_for_fn (cfun));
6236 1112638 : bitmap_clear (blocks);
6237 1112638 : if (split_prologue_insn)
6238 259928 : bitmap_set_bit (blocks,
6239 259928 : BLOCK_FOR_INSN (split_prologue_insn)->index);
6240 1112638 : if (prologue_insn)
6241 1088995 : bitmap_set_bit (blocks, BLOCK_FOR_INSN (prologue_insn)->index);
6242 1112638 : find_many_sub_basic_blocks (blocks);
6243 1112638 : }
6244 : }
6245 :
6246 1480118 : default_rtl_profile ();
6247 :
6248 : /* Emit sibling epilogues before any sibling call sites. */
6249 1480118 : for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
6250 3013973 : (e = ei_safe_edge (ei));
6251 1533855 : ei_next (&ei))
6252 : {
6253 : /* Skip those already handled, the ones that run without prologue. */
6254 1533855 : if (e->flags & EDGE_IGNORE)
6255 : {
6256 5003 : e->flags &= ~EDGE_IGNORE;
6257 5003 : continue;
6258 : }
6259 :
6260 1528852 : rtx_insn *insn = BB_END (e->src);
6261 :
6262 1528852 : if (!(CALL_P (insn) && SIBLING_CALL_P (insn)))
6263 1403686 : continue;
6264 :
6265 125166 : rtx_insn *ep_seq;
6266 125166 : if (targetm.emit_epilogue_for_sibcall)
6267 : {
6268 0 : start_sequence ();
6269 0 : targetm.emit_epilogue_for_sibcall (as_a<rtx_call_insn *> (insn));
6270 0 : ep_seq = end_sequence ();
6271 : }
6272 : else
6273 125166 : ep_seq = targetm.gen_sibcall_epilogue ();
6274 125166 : if (ep_seq)
6275 : {
6276 58484 : start_sequence ();
6277 58484 : emit_note (NOTE_INSN_EPILOGUE_BEG);
6278 58484 : emit_insn (ep_seq);
6279 58484 : rtx_insn *seq = end_sequence ();
6280 :
6281 : /* Retain a map of the epilogue insns. Used in life analysis to
6282 : avoid getting rid of sibcall epilogue insns. Do this before we
6283 : actually emit the sequence. */
6284 58484 : record_insns (seq, NULL, &epilogue_insn_hash);
6285 58484 : set_insn_locations (seq, epilogue_location);
6286 :
6287 58484 : emit_insn_before (seq, insn);
6288 :
6289 58484 : find_sub_basic_blocks (BLOCK_FOR_INSN (insn));
6290 : }
6291 : }
6292 :
6293 1480118 : if (epilogue_seq)
6294 : {
6295 : rtx_insn *insn, *next;
6296 :
6297 : /* Similarly, move any line notes that appear after the epilogue.
6298 : There is no need, however, to be quite so anal about the existence
6299 : of such a note. Also possibly move
6300 : NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6301 : info generation. */
6302 11651938 : for (insn = epilogue_seq; insn; insn = next)
6303 : {
6304 10171820 : next = NEXT_INSN (insn);
6305 10171820 : if (NOTE_P (insn)
6306 3102407 : && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
6307 0 : reorder_insns (insn, insn, PREV_INSN (epilogue_seq));
6308 : }
6309 : }
6310 :
6311 : /* Threading the prologue and epilogue changes the artificial refs in the
6312 : entry and exit blocks, and may invalidate DF info for tail calls.
6313 : This is also needed for [[musttail]] conversion even when not
6314 : optimizing. */
6315 1480118 : if (optimize
6316 434555 : || cfun->tail_call_marked
6317 434394 : || flag_optimize_sibling_calls
6318 434353 : || flag_ipa_icf_functions
6319 434296 : || in_lto_p)
6320 1054402 : df_update_entry_exit_and_calls ();
6321 : else
6322 : {
6323 425716 : df_update_entry_block_defs ();
6324 425716 : df_update_exit_block_uses ();
6325 : }
6326 1480118 : }
6327 :
6328 : /* Reposition the prologue-end and epilogue-begin notes after
6329 : instruction scheduling. */
6330 :
6331 : void
6332 965490 : reposition_prologue_and_epilogue_notes (void)
6333 : {
6334 965490 : if (!targetm.have_prologue ()
6335 0 : && !targetm.have_epilogue ()
6336 0 : && !targetm.have_sibcall_epilogue ()
6337 965490 : && !targetm.emit_epilogue_for_sibcall)
6338 : return;
6339 :
6340 : /* Since the hash table is created on demand, the fact that it is
6341 : non-null is a signal that it is non-empty. */
6342 965490 : if (prologue_insn_hash != NULL)
6343 : {
6344 965490 : size_t len = prologue_insn_hash->elements ();
6345 965490 : rtx_insn *insn, *last = NULL, *note = NULL;
6346 :
6347 : /* Scan from the beginning until we reach the last prologue insn. */
6348 : /* ??? While we do have the CFG intact, there are two problems:
6349 : (1) The prologue can contain loops (typically probing the stack),
6350 : which means that the end of the prologue isn't in the first bb.
6351 : (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6352 68713192 : for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6353 : {
6354 68177022 : if (NOTE_P (insn))
6355 : {
6356 12822823 : if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
6357 67747702 : note = insn;
6358 : }
6359 55354199 : else if (contains (insn, prologue_insn_hash))
6360 : {
6361 3448900 : last = insn;
6362 3448900 : if (--len == 0)
6363 : break;
6364 : }
6365 : }
6366 :
6367 965490 : if (last)
6368 : {
6369 626804 : if (note == NULL)
6370 : {
6371 : /* Scan forward looking for the PROLOGUE_END note. It should
6372 : be right at the beginning of the block, possibly with other
6373 : insn notes that got moved there. */
6374 1396 : for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6375 : {
6376 1396 : if (NOTE_P (note)
6377 1282 : && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6378 : break;
6379 : }
6380 : }
6381 :
6382 : /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6383 626804 : if (LABEL_P (last))
6384 229 : last = NEXT_INSN (last);
6385 626804 : reorder_insns (note, note, last);
6386 : }
6387 : }
6388 :
6389 965490 : if (epilogue_insn_hash != NULL)
6390 : {
6391 965490 : edge_iterator ei;
6392 965490 : edge e;
6393 :
6394 2194280 : FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6395 : {
6396 1228790 : rtx_insn *insn, *first = NULL, *note = NULL;
6397 1228790 : basic_block bb = e->src;
6398 :
6399 : /* Scan from the beginning until we reach the first epilogue insn. */
6400 16697009 : FOR_BB_INSNS (bb, insn)
6401 : {
6402 16545661 : if (NOTE_P (insn))
6403 : {
6404 3941665 : if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6405 : {
6406 1077442 : note = insn;
6407 1077442 : if (first != NULL)
6408 : break;
6409 : }
6410 : }
6411 12603996 : else if (first == NULL && contains (insn, epilogue_insn_hash))
6412 : {
6413 1077446 : first = insn;
6414 1077446 : if (note != NULL)
6415 : break;
6416 : }
6417 : }
6418 :
6419 1228790 : if (note)
6420 : {
6421 : /* If the function has a single basic block, and no real
6422 : epilogue insns (e.g. sibcall with no cleanup), the
6423 : epilogue note can get scheduled before the prologue
6424 : note. If we have frame related prologue insns, having
6425 : them scanned during the epilogue will result in a crash.
6426 : In this case re-order the epilogue note to just before
6427 : the last insn in the block. */
6428 1077442 : if (first == NULL)
6429 0 : first = BB_END (bb);
6430 :
6431 1077442 : if (PREV_INSN (first) != note)
6432 29976 : reorder_insns (note, note, PREV_INSN (first));
6433 : }
6434 : }
6435 : }
6436 : }
6437 :
6438 : /* Returns the name of function declared by FNDECL. */
6439 : const char *
6440 119355 : fndecl_name (tree fndecl)
6441 : {
6442 119355 : if (fndecl == NULL)
6443 : return "(nofn)";
6444 119340 : return lang_hooks.decl_printable_name (fndecl, 1);
6445 : }
6446 :
6447 : /* Returns the name of function FN. */
6448 : const char *
6449 119302 : function_name (const function *fn)
6450 : {
6451 119302 : tree fndecl = (fn == NULL) ? NULL : fn->decl;
6452 119302 : return fndecl_name (fndecl);
6453 : }
6454 :
6455 : /* Returns the name of the current function. */
6456 : const char *
6457 8725 : current_function_name (void)
6458 : {
6459 8725 : return function_name (cfun);
6460 : }
6461 :
6462 :
6463 : static void
6464 0 : rest_of_handle_check_leaf_regs (void)
6465 : {
6466 : #ifdef LEAF_REGISTERS
6467 : crtl->uses_only_leaf_regs
6468 : = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6469 : #endif
6470 0 : }
6471 :
6472 : /* Insert a TYPE into the used types hash table of CFUN. */
6473 :
6474 : static void
6475 56245325 : used_types_insert_helper (tree type, struct function *func)
6476 : {
6477 56245325 : if (type != NULL && func != NULL)
6478 : {
6479 56245325 : if (func->used_types_hash == NULL)
6480 17091229 : func->used_types_hash = hash_set<tree>::create_ggc (37);
6481 :
6482 56245325 : func->used_types_hash->add (type);
6483 : }
6484 56245325 : }
6485 :
6486 : /* Given a type, insert it into the used hash table in cfun. */
6487 : void
6488 206180284 : used_types_insert (tree t)
6489 : {
6490 215701171 : while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
6491 9742742 : if (TYPE_NAME (t))
6492 : break;
6493 : else
6494 9520887 : t = TREE_TYPE (t);
6495 206180284 : if (TREE_CODE (t) == ERROR_MARK)
6496 : return;
6497 206180279 : if (TYPE_NAME (t) == NULL_TREE
6498 206180279 : || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6499 75634674 : t = TYPE_MAIN_VARIANT (t);
6500 206180279 : if (debug_info_level > DINFO_LEVEL_NONE)
6501 : {
6502 81942429 : if (cfun)
6503 56245325 : used_types_insert_helper (t, cfun);
6504 : else
6505 : {
6506 : /* So this might be a type referenced by a global variable.
6507 : Record that type so that we can later decide to emit its
6508 : debug information. */
6509 25697104 : vec_safe_push (types_used_by_cur_var_decl, t);
6510 : }
6511 : }
6512 : }
6513 :
6514 : /* Helper to Hash a struct types_used_by_vars_entry. */
6515 :
6516 : static hashval_t
6517 168628537 : hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6518 : {
6519 168628537 : gcc_assert (entry && entry->var_decl && entry->type);
6520 :
6521 168628537 : return iterative_hash_object (entry->type,
6522 : iterative_hash_object (entry->var_decl, 0));
6523 : }
6524 :
6525 : /* Hash function of the types_used_by_vars_entry hash table. */
6526 :
6527 : hashval_t
6528 168628537 : used_type_hasher::hash (types_used_by_vars_entry *entry)
6529 : {
6530 168628537 : return hash_types_used_by_vars_entry (entry);
6531 : }
6532 :
6533 : /*Equality function of the types_used_by_vars_entry hash table. */
6534 :
6535 : bool
6536 183674521 : used_type_hasher::equal (types_used_by_vars_entry *e1,
6537 : types_used_by_vars_entry *e2)
6538 : {
6539 183674521 : return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6540 : }
6541 :
6542 : /* Inserts an entry into the types_used_by_vars_hash hash table. */
6543 :
6544 : void
6545 25626045 : types_used_by_var_decl_insert (tree type, tree var_decl)
6546 : {
6547 25626045 : if (type != NULL && var_decl != NULL)
6548 : {
6549 25626045 : types_used_by_vars_entry **slot;
6550 25626045 : struct types_used_by_vars_entry e;
6551 25626045 : e.var_decl = var_decl;
6552 25626045 : e.type = type;
6553 25626045 : if (types_used_by_vars_hash == NULL)
6554 14613 : types_used_by_vars_hash
6555 14613 : = hash_table<used_type_hasher>::create_ggc (37);
6556 :
6557 25626045 : slot = types_used_by_vars_hash->find_slot (&e, INSERT);
6558 25626045 : if (*slot == NULL)
6559 : {
6560 7699029 : struct types_used_by_vars_entry *entry;
6561 7699029 : entry = ggc_alloc<types_used_by_vars_entry> ();
6562 7699029 : entry->type = type;
6563 7699029 : entry->var_decl = var_decl;
6564 7699029 : *slot = entry;
6565 : }
6566 : }
6567 25626045 : }
6568 :
6569 : namespace {
6570 :
6571 : const pass_data pass_data_leaf_regs =
6572 : {
6573 : RTL_PASS, /* type */
6574 : "*leaf_regs", /* name */
6575 : OPTGROUP_NONE, /* optinfo_flags */
6576 : TV_NONE, /* tv_id */
6577 : 0, /* properties_required */
6578 : 0, /* properties_provided */
6579 : 0, /* properties_destroyed */
6580 : 0, /* todo_flags_start */
6581 : 0, /* todo_flags_finish */
6582 : };
6583 :
6584 : class pass_leaf_regs : public rtl_opt_pass
6585 : {
6586 : public:
6587 288775 : pass_leaf_regs (gcc::context *ctxt)
6588 577550 : : rtl_opt_pass (pass_data_leaf_regs, ctxt)
6589 : {}
6590 :
6591 : /* opt_pass methods: */
6592 1480118 : unsigned int execute (function *) final override
6593 : {
6594 1480118 : rest_of_handle_check_leaf_regs ();
6595 1480118 : return 0;
6596 : }
6597 :
6598 : }; // class pass_leaf_regs
6599 :
6600 : } // anon namespace
6601 :
6602 : rtl_opt_pass *
6603 288775 : make_pass_leaf_regs (gcc::context *ctxt)
6604 : {
6605 288775 : return new pass_leaf_regs (ctxt);
6606 : }
6607 :
6608 : static void
6609 1480118 : rest_of_handle_thread_prologue_and_epilogue (function *fun)
6610 : {
6611 : /* prepare_shrink_wrap is sensitive to the block structure of the control
6612 : flow graph, so clean it up first. */
6613 1480118 : if (cfun->tail_call_marked || optimize)
6614 1045724 : cleanup_cfg (0);
6615 :
6616 : /* On some machines, the prologue and epilogue code, or parts thereof,
6617 : can be represented as RTL. Doing so lets us schedule insns between
6618 : it and the rest of the code and also allows delayed branch
6619 : scheduling to operate in the epilogue. */
6620 1480118 : thread_prologue_and_epilogue_insns ();
6621 :
6622 : /* Some non-cold blocks may now be only reachable from cold blocks.
6623 : Fix that up. */
6624 1480118 : fixup_partitions ();
6625 :
6626 : /* After prologue and epilogue generation, the judgement on whether
6627 : one memory access onto stack frame may trap or not could change,
6628 : since we get more exact stack information by now. So try to
6629 : remove any EH edges here, see PR90259. */
6630 1480118 : if (fun->can_throw_non_call_exceptions)
6631 262869 : purge_all_dead_edges ();
6632 :
6633 : /* Shrink-wrapping can result in unreachable edges in the epilogue,
6634 : see PR57320. */
6635 1914673 : cleanup_cfg (optimize ? CLEANUP_EXPENSIVE : 0);
6636 :
6637 : /* The stack usage info is finalized during prologue expansion. */
6638 1480118 : if (flag_stack_usage_info || flag_callgraph_info)
6639 356 : output_stack_usage ();
6640 1480118 : }
6641 :
6642 : /* Record a final call to CALLEE at LOCATION. */
6643 :
6644 : void
6645 0 : record_final_call (tree callee, location_t location)
6646 : {
6647 0 : struct callinfo_callee datum = { location, callee };
6648 0 : vec_safe_push (cfun->su->callees, datum);
6649 0 : }
6650 :
6651 : /* Record a dynamic allocation made for DECL_OR_EXP. */
6652 :
6653 : void
6654 0 : record_dynamic_alloc (tree decl_or_exp)
6655 : {
6656 0 : struct callinfo_dalloc datum;
6657 :
6658 0 : if (DECL_P (decl_or_exp))
6659 : {
6660 0 : datum.location = DECL_SOURCE_LOCATION (decl_or_exp);
6661 0 : const char *name = lang_hooks.decl_printable_name (decl_or_exp, 2);
6662 0 : const char *dot = strrchr (name, '.');
6663 0 : if (dot)
6664 0 : name = dot + 1;
6665 0 : datum.name = ggc_strdup (name);
6666 : }
6667 : else
6668 : {
6669 0 : datum.location = EXPR_LOCATION (decl_or_exp);
6670 0 : datum.name = NULL;
6671 : }
6672 :
6673 0 : vec_safe_push (cfun->su->dallocs, datum);
6674 0 : }
6675 :
6676 : namespace {
6677 :
6678 : const pass_data pass_data_thread_prologue_and_epilogue =
6679 : {
6680 : RTL_PASS, /* type */
6681 : "pro_and_epilogue", /* name */
6682 : OPTGROUP_NONE, /* optinfo_flags */
6683 : TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6684 : 0, /* properties_required */
6685 : 0, /* properties_provided */
6686 : 0, /* properties_destroyed */
6687 : 0, /* todo_flags_start */
6688 : ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
6689 : };
6690 :
6691 : class pass_thread_prologue_and_epilogue : public rtl_opt_pass
6692 : {
6693 : public:
6694 288775 : pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6695 577550 : : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
6696 : {}
6697 :
6698 : /* opt_pass methods: */
6699 1480125 : bool gate (function *) final override
6700 : {
6701 1480125 : return !targetm.use_late_prologue_epilogue ();
6702 : }
6703 :
6704 1480118 : unsigned int execute (function * fun) final override
6705 : {
6706 1480118 : rest_of_handle_thread_prologue_and_epilogue (fun);
6707 1480118 : return 0;
6708 : }
6709 :
6710 : }; // class pass_thread_prologue_and_epilogue
6711 :
6712 : const pass_data pass_data_late_thread_prologue_and_epilogue =
6713 : {
6714 : RTL_PASS, /* type */
6715 : "late_pro_and_epilogue", /* name */
6716 : OPTGROUP_NONE, /* optinfo_flags */
6717 : TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6718 : 0, /* properties_required */
6719 : 0, /* properties_provided */
6720 : 0, /* properties_destroyed */
6721 : 0, /* todo_flags_start */
6722 : ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
6723 : };
6724 :
6725 : class pass_late_thread_prologue_and_epilogue : public rtl_opt_pass
6726 : {
6727 : public:
6728 288775 : pass_late_thread_prologue_and_epilogue (gcc::context *ctxt)
6729 577550 : : rtl_opt_pass (pass_data_late_thread_prologue_and_epilogue, ctxt)
6730 : {}
6731 :
6732 : /* opt_pass methods: */
6733 1480125 : bool gate (function *) final override
6734 : {
6735 1480125 : return targetm.use_late_prologue_epilogue ();
6736 : }
6737 :
6738 0 : unsigned int execute (function *fn) final override
6739 : {
6740 : /* It's not currently possible to have both delay slots and
6741 : late prologue/epilogue, since the latter has to run before
6742 : the former, and the former won't honor whatever restrictions
6743 : the latter is trying to enforce. */
6744 0 : gcc_assert (!DELAY_SLOTS);
6745 0 : rest_of_handle_thread_prologue_and_epilogue (fn);
6746 0 : return 0;
6747 : }
6748 : }; // class pass_late_thread_prologue_and_epilogue
6749 :
6750 : } // anon namespace
6751 :
6752 : rtl_opt_pass *
6753 288775 : make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6754 : {
6755 288775 : return new pass_thread_prologue_and_epilogue (ctxt);
6756 : }
6757 :
6758 : rtl_opt_pass *
6759 288775 : make_pass_late_thread_prologue_and_epilogue (gcc::context *ctxt)
6760 : {
6761 288775 : return new pass_late_thread_prologue_and_epilogue (ctxt);
6762 : }
6763 :
6764 : namespace {
6765 :
6766 : const pass_data pass_data_zero_call_used_regs =
6767 : {
6768 : RTL_PASS, /* type */
6769 : "zero_call_used_regs", /* name */
6770 : OPTGROUP_NONE, /* optinfo_flags */
6771 : TV_NONE, /* tv_id */
6772 : 0, /* properties_required */
6773 : 0, /* properties_provided */
6774 : 0, /* properties_destroyed */
6775 : 0, /* todo_flags_start */
6776 : 0, /* todo_flags_finish */
6777 : };
6778 :
6779 : class pass_zero_call_used_regs: public rtl_opt_pass
6780 : {
6781 : public:
6782 288775 : pass_zero_call_used_regs (gcc::context *ctxt)
6783 577550 : : rtl_opt_pass (pass_data_zero_call_used_regs, ctxt)
6784 : {}
6785 :
6786 : /* opt_pass methods: */
6787 : unsigned int execute (function *) final override;
6788 :
6789 : }; // class pass_zero_call_used_regs
6790 :
6791 : unsigned int
6792 1480118 : pass_zero_call_used_regs::execute (function *fun)
6793 : {
6794 1480118 : using namespace zero_regs_flags;
6795 1480118 : unsigned int zero_regs_type = UNSET;
6796 :
6797 1480118 : tree attr_zero_regs = lookup_attribute ("zero_call_used_regs",
6798 1480118 : DECL_ATTRIBUTES (fun->decl));
6799 :
6800 : /* Get the type of zero_call_used_regs from function attribute.
6801 : We have filtered out invalid attribute values already at this point. */
6802 1480118 : if (attr_zero_regs)
6803 : {
6804 : /* The TREE_VALUE of an attribute is a TREE_LIST whose TREE_VALUE
6805 : is the attribute argument's value. */
6806 88 : attr_zero_regs = TREE_VALUE (attr_zero_regs);
6807 88 : gcc_assert (TREE_CODE (attr_zero_regs) == TREE_LIST);
6808 88 : attr_zero_regs = TREE_VALUE (attr_zero_regs);
6809 88 : gcc_assert (TREE_CODE (attr_zero_regs) == STRING_CST);
6810 :
6811 496 : for (unsigned int i = 0; zero_call_used_regs_opts[i].name != NULL; ++i)
6812 496 : if (strcmp (TREE_STRING_POINTER (attr_zero_regs),
6813 496 : zero_call_used_regs_opts[i].name) == 0)
6814 : {
6815 88 : zero_regs_type = zero_call_used_regs_opts[i].flag;
6816 88 : break;
6817 : }
6818 : }
6819 :
6820 88 : if (!zero_regs_type)
6821 1480030 : zero_regs_type = flag_zero_call_used_regs;
6822 :
6823 : /* No need to zero call-used-regs when no user request is present. */
6824 1480118 : if (!(zero_regs_type & ENABLED))
6825 : return 0;
6826 :
6827 183 : edge_iterator ei;
6828 183 : edge e;
6829 :
6830 : /* This pass needs data flow information. */
6831 183 : df_analyze ();
6832 :
6833 : /* Iterate over the function's return instructions and insert any
6834 : register zeroing required by the -fzero-call-used-regs command-line
6835 : option or the "zero_call_used_regs" function attribute. */
6836 367 : FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6837 : {
6838 184 : rtx_insn *insn = BB_END (e->src);
6839 184 : if (JUMP_P (insn) && ANY_RETURN_P (JUMP_LABEL (insn)))
6840 177 : gen_call_used_regs_seq (insn, zero_regs_type);
6841 : }
6842 :
6843 : return 0;
6844 : }
6845 :
6846 : } // anon namespace
6847 :
6848 : rtl_opt_pass *
6849 288775 : make_pass_zero_call_used_regs (gcc::context *ctxt)
6850 : {
6851 288775 : return new pass_zero_call_used_regs (ctxt);
6852 : }
6853 :
6854 : /* If CONSTRAINT is a matching constraint, then return its number.
6855 : Otherwise, return -1. */
6856 :
6857 : static int
6858 43678 : matching_constraint_num (const char *constraint)
6859 : {
6860 43678 : if (*constraint == '%')
6861 1140 : constraint++;
6862 :
6863 43678 : if (IN_RANGE (*constraint, '0', '9'))
6864 31939 : return strtoul (constraint, NULL, 10);
6865 :
6866 : return -1;
6867 : }
6868 :
6869 : /* This mini-pass fixes fall-out from SSA in asm statements that have
6870 : in-out constraints. Say you start with
6871 :
6872 : orig = inout;
6873 : asm ("": "+mr" (inout));
6874 : use (orig);
6875 :
6876 : which is transformed very early to use explicit output and match operands:
6877 :
6878 : orig = inout;
6879 : asm ("": "=mr" (inout) : "0" (inout));
6880 : use (orig);
6881 :
6882 : Or, after SSA and copyprop,
6883 :
6884 : asm ("": "=mr" (inout_2) : "0" (inout_1));
6885 : use (inout_1);
6886 :
6887 : Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6888 : they represent two separate values, so they will get different pseudo
6889 : registers during expansion. Then, since the two operands need to match
6890 : per the constraints, but use different pseudo registers, reload can
6891 : only register a reload for these operands. But reloads can only be
6892 : satisfied by hardregs, not by memory, so we need a register for this
6893 : reload, just because we are presented with non-matching operands.
6894 : So, even though we allow memory for this operand, no memory can be
6895 : used for it, just because the two operands don't match. This can
6896 : cause reload failures on register-starved targets.
6897 :
6898 : So it's a symptom of reload not being able to use memory for reloads
6899 : or, alternatively it's also a symptom of both operands not coming into
6900 : reload as matching (in which case the pseudo could go to memory just
6901 : fine, as the alternative allows it, and no reload would be necessary).
6902 : We fix the latter problem here, by transforming
6903 :
6904 : asm ("": "=mr" (inout_2) : "0" (inout_1));
6905 :
6906 : back to
6907 :
6908 : inout_2 = inout_1;
6909 : asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6910 :
6911 : static void
6912 34736 : match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
6913 : {
6914 34736 : int i;
6915 34736 : bool changed = false;
6916 34736 : rtx op = SET_SRC (p_sets[0]);
6917 34736 : int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6918 34736 : rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
6919 34736 : bool *output_matched = XALLOCAVEC (bool, noutputs);
6920 :
6921 34736 : memset (output_matched, 0, noutputs * sizeof (bool));
6922 77485 : for (i = 0; i < ninputs; i++)
6923 : {
6924 42749 : rtx input, output;
6925 42749 : rtx_insn *insns;
6926 42749 : const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6927 42749 : int match, j;
6928 :
6929 42749 : match = matching_constraint_num (constraint);
6930 42749 : if (match < 0)
6931 11735 : continue;
6932 :
6933 31014 : gcc_assert (match < noutputs);
6934 31014 : output = SET_DEST (p_sets[match]);
6935 31014 : input = RTVEC_ELT (inputs, i);
6936 : /* Only do the transformation for pseudos. */
6937 31987 : if (! REG_P (output)
6938 30837 : || rtx_equal_p (output, input)
6939 30180 : || !(REG_P (input) || SUBREG_P (input)
6940 3160 : || MEM_P (input) || CONSTANT_P (input))
6941 61193 : || !general_operand (input, GET_MODE (output)))
6942 973 : continue;
6943 :
6944 : /* We can't do anything if the output is also used as input,
6945 : as we're going to overwrite it. */
6946 81960 : for (j = 0; j < ninputs; j++)
6947 51919 : if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6948 : break;
6949 30041 : if (j != ninputs)
6950 0 : continue;
6951 :
6952 : /* Avoid changing the same input several times. For
6953 : asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6954 : only change it once (to out1), rather than changing it
6955 : first to out1 and afterwards to out2. */
6956 30041 : if (i > 0)
6957 : {
6958 39980 : for (j = 0; j < noutputs; j++)
6959 32650 : if (output_matched[j] && input == SET_DEST (p_sets[j]))
6960 : break;
6961 7401 : if (j != noutputs)
6962 71 : continue;
6963 : }
6964 29970 : output_matched[match] = true;
6965 :
6966 29970 : start_sequence ();
6967 29970 : emit_move_insn (output, copy_rtx (input));
6968 29970 : insns = end_sequence ();
6969 29970 : emit_insn_before (insns, insn);
6970 :
6971 29970 : constraint = ASM_OPERANDS_OUTPUT_CONSTRAINT(SET_SRC(p_sets[match]));
6972 29970 : bool early_clobber_p = strchr (constraint, '&') != NULL;
6973 :
6974 : /* Now replace all mentions of the input with output. We can't
6975 : just replace the occurrence in inputs[i], as the register might
6976 : also be used in some other input (or even in an address of an
6977 : output), which would mean possibly increasing the number of
6978 : inputs by one (namely 'output' in addition), which might pose
6979 : a too complicated problem for reload to solve. E.g. this situation:
6980 :
6981 : asm ("" : "=r" (output), "=m" (input) : "0" (input))
6982 :
6983 : Here 'input' is used in two occurrences as input (once for the
6984 : input operand, once for the address in the second output operand).
6985 : If we would replace only the occurrence of the input operand (to
6986 : make the matching) we would be left with this:
6987 :
6988 : output = input
6989 : asm ("" : "=r" (output), "=m" (input) : "0" (output))
6990 :
6991 : Now we suddenly have two different input values (containing the same
6992 : value, but different pseudos) where we formerly had only one.
6993 : With more complicated asms this might lead to reload failures
6994 : which wouldn't have happen without this pass. So, iterate over
6995 : all operands and replace all occurrences of the register used.
6996 :
6997 : However, if one or more of the 'input' uses have a non-matching
6998 : constraint and the matched output operand is an early clobber
6999 : operand, then do not replace the input operand, since by definition
7000 : it conflicts with the output operand and cannot share the same
7001 : register. See PR89313 for details. */
7002 :
7003 147167 : for (j = 0; j < noutputs; j++)
7004 117197 : if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
7005 117197 : && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
7006 635 : SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
7007 : input, output);
7008 81601 : for (j = 0; j < ninputs; j++)
7009 51631 : if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
7010 : {
7011 28177 : if (!early_clobber_p
7012 29106 : || match == matching_constraint_num
7013 929 : (ASM_OPERANDS_INPUT_CONSTRAINT (op, j)))
7014 28173 : RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
7015 : input, output);
7016 : }
7017 :
7018 : changed = true;
7019 : }
7020 :
7021 34736 : if (changed)
7022 22985 : df_insn_rescan (insn);
7023 34736 : }
7024 :
7025 : /* It is expected and desired that optimizations coalesce multiple pseudos into
7026 : one whenever possible. However, in case of hard register constraints we may
7027 : have to undo this and introduce copies since otherwise we could constraint a
7028 : single pseudo to different hard registers. For example, during register
7029 : allocation the following insn would be unsatisfiable since pseudo 60 is
7030 : constrained to hard register r5 and r6 at the same time.
7031 :
7032 : (insn 7 5 0 2 (asm_operands/v ("foo") ("") 0 [
7033 : (reg:DI 60) repeated x2
7034 : ]
7035 : [
7036 : (asm_input:DI ("{r5}") t.c:4)
7037 : (asm_input:DI ("{r6}") t.c:4)
7038 : ]
7039 : [] t.c:4) "t.c":4:3 -1
7040 : (expr_list:REG_DEAD (reg:DI 60)
7041 : (nil)))
7042 :
7043 : Therefore, introduce a copy of pseudo 60 and transform it into
7044 :
7045 : (insn 10 5 7 2 (set (reg:DI 62)
7046 : (reg:DI 60)) "t.c":4:3 1503 {*movdi_64}
7047 : (nil))
7048 : (insn 7 10 11 2 (asm_operands/v ("foo") ("") 0 [
7049 : (reg:DI 60)
7050 : (reg:DI 62)
7051 : ]
7052 : [
7053 : (asm_input:DI ("{r5}") t.c:4)
7054 : (asm_input:DI ("{r6}") t.c:4)
7055 : ]
7056 : [] t.c:4) "t.c":4:3 -1
7057 : (expr_list:REG_DEAD (reg:DI 62)
7058 : (expr_list:REG_DEAD (reg:DI 60)
7059 : (nil))))
7060 :
7061 : Now, LRA can assign pseudo 60 to r5, and pseudo 62 to r6.
7062 :
7063 : TODO: The current implementation is conservative and we could do a bit
7064 : better in case of alternatives. For example
7065 :
7066 : (insn 7 5 0 2 (asm_operands/v ("foo") ("") 0 [
7067 : (reg:DI 60) repeated x2
7068 : ]
7069 : [
7070 : (asm_input:DI ("r,{r5}") t.c:4)
7071 : (asm_input:DI ("{r6},r") t.c:4)
7072 : ]
7073 : [] t.c:4) "t.c":4:3 -1
7074 : (expr_list:REG_DEAD (reg:DI 60)
7075 : (nil)))
7076 :
7077 : For this insn we wouldn't need to come up with a copy of pseudo 60 since in
7078 : each alternative pseudo 60 is constrained exactly one time. */
7079 :
7080 : static void
7081 3572060 : match_asm_constraints_2 (rtx_insn *insn, rtx pat)
7082 : {
7083 3572060 : rtx op;
7084 3572060 : if (GET_CODE (pat) == SET && GET_CODE (SET_SRC (pat)) == ASM_OPERANDS)
7085 : op = SET_SRC (pat);
7086 3495502 : else if (GET_CODE (pat) == ASM_OPERANDS)
7087 : op = pat;
7088 : else
7089 3423028 : return;
7090 149032 : int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
7091 149032 : rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
7092 149032 : bool changed = false;
7093 149032 : auto_bitmap constrained_regs;
7094 :
7095 269648 : for (int i = 0; i < ninputs; ++i)
7096 : {
7097 120616 : rtx input = RTVEC_ELT (inputs, i);
7098 120616 : const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
7099 23517 : if ((!REG_P (input) && !SUBREG_P (input))
7100 97216 : || (REG_P (input) && HARD_REGISTER_P (input))
7101 216752 : || strchr (constraint, '{') == nullptr)
7102 120568 : continue;
7103 48 : int regno;
7104 48 : if (SUBREG_P (input))
7105 : {
7106 0 : if (REG_P (SUBREG_REG (input)))
7107 0 : regno = REGNO (SUBREG_REG (input));
7108 : else
7109 0 : continue;
7110 : }
7111 : else
7112 48 : regno = REGNO (input);
7113 : /* Keep the first usage of a constrained pseudo as is and only
7114 : introduce copies for subsequent usages. */
7115 48 : if (! bitmap_bit_p (constrained_regs, regno))
7116 : {
7117 48 : bitmap_set_bit (constrained_regs, regno);
7118 48 : continue;
7119 : }
7120 0 : rtx tmp = gen_reg_rtx (GET_MODE (input));
7121 0 : start_sequence ();
7122 0 : emit_move_insn (tmp, input);
7123 0 : rtx_insn *insns = get_insns ();
7124 0 : end_sequence ();
7125 0 : emit_insn_before (insns, insn);
7126 0 : RTVEC_ELT (inputs, i) = tmp;
7127 0 : changed = true;
7128 : }
7129 :
7130 149032 : if (changed)
7131 0 : df_insn_rescan (insn);
7132 149032 : }
7133 :
7134 : /* Add the decl D to the local_decls list of FUN. */
7135 :
7136 : void
7137 37633322 : add_local_decl (struct function *fun, tree d)
7138 : {
7139 37633322 : gcc_assert (VAR_P (d));
7140 37633322 : vec_safe_push (fun->local_decls, d);
7141 37633322 : }
7142 :
7143 : namespace {
7144 :
7145 : const pass_data pass_data_match_asm_constraints =
7146 : {
7147 : RTL_PASS, /* type */
7148 : "asmcons", /* name */
7149 : OPTGROUP_NONE, /* optinfo_flags */
7150 : TV_NONE, /* tv_id */
7151 : 0, /* properties_required */
7152 : 0, /* properties_provided */
7153 : 0, /* properties_destroyed */
7154 : 0, /* todo_flags_start */
7155 : 0, /* todo_flags_finish */
7156 : };
7157 :
7158 : class pass_match_asm_constraints : public rtl_opt_pass
7159 : {
7160 : public:
7161 288775 : pass_match_asm_constraints (gcc::context *ctxt)
7162 577550 : : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
7163 : {}
7164 :
7165 : /* opt_pass methods: */
7166 : unsigned int execute (function *) final override;
7167 :
7168 : }; // class pass_match_asm_constraints
7169 :
7170 : unsigned
7171 1480116 : pass_match_asm_constraints::execute (function *fun)
7172 : {
7173 1480116 : basic_block bb;
7174 1480116 : rtx_insn *insn;
7175 1480116 : rtx pat, *p_sets;
7176 1480116 : int noutputs;
7177 :
7178 1480116 : if (!crtl->has_asm_statement)
7179 : return 0;
7180 :
7181 34499 : df_set_flags (DF_DEFER_INSN_RESCAN);
7182 332641 : FOR_EACH_BB_FN (bb, fun)
7183 : {
7184 3943422 : FOR_BB_INSNS (bb, insn)
7185 : {
7186 3645280 : if (!INSN_P (insn))
7187 653727 : continue;
7188 :
7189 2991553 : pat = PATTERN (insn);
7190 :
7191 2991553 : if (GET_CODE (pat) == PARALLEL)
7192 1482867 : for (int i = XVECLEN (pat, 0) - 1; i >= 0; --i)
7193 1031687 : match_asm_constraints_2 (insn, XVECEXP (pat, 0, i));
7194 : else
7195 2540373 : match_asm_constraints_2 (insn, pat);
7196 :
7197 2991553 : if (GET_CODE (pat) == PARALLEL)
7198 451180 : p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
7199 2540373 : else if (GET_CODE (pat) == SET)
7200 1728656 : p_sets = &PATTERN (insn), noutputs = 1;
7201 : else
7202 811717 : continue;
7203 :
7204 2179836 : if (GET_CODE (*p_sets) == SET
7205 2103568 : && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
7206 34736 : match_asm_constraints_1 (insn, p_sets, noutputs);
7207 : }
7208 : }
7209 :
7210 : return TODO_df_finish;
7211 : }
7212 :
7213 : } // anon namespace
7214 :
7215 : rtl_opt_pass *
7216 288775 : make_pass_match_asm_constraints (gcc::context *ctxt)
7217 : {
7218 288775 : return new pass_match_asm_constraints (ctxt);
7219 : }
7220 :
7221 :
7222 : #include "gt-function.h"
|