Line data Source code
1 : /* Lower _BitInt(N) operations to scalar operations.
2 : Copyright (C) 2023-2026 Free Software Foundation, Inc.
3 : Contributed by Jakub Jelinek <jakub@redhat.com>.
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify it
8 : under the terms of the GNU General Public License as published by the
9 : Free Software Foundation; either version 3, or (at your option) any
10 : later version.
11 :
12 : GCC is distributed in the hope that it will be useful, but WITHOUT
13 : ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 : for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 : #include "config.h"
22 : #include "system.h"
23 : #include "coretypes.h"
24 : #include "backend.h"
25 : #include "rtl.h"
26 : #include "tree.h"
27 : #include "gimple.h"
28 : #include "cfghooks.h"
29 : #include "tree-pass.h"
30 : #include "ssa.h"
31 : #include "fold-const.h"
32 : #include "gimplify.h"
33 : #include "gimple-iterator.h"
34 : #include "tree-cfg.h"
35 : #include "tree-dfa.h"
36 : #include "cfgloop.h"
37 : #include "cfganal.h"
38 : #include "target.h"
39 : #include "tree-ssa-live.h"
40 : #include "tree-ssa-coalesce.h"
41 : #include "domwalk.h"
42 : #include "memmodel.h"
43 : #include "optabs.h"
44 : #include "varasm.h"
45 : #include "gimple-range.h"
46 : #include "value-range.h"
47 : #include "langhooks.h"
48 : #include "gimplify-me.h"
49 : #include "diagnostic-core.h"
50 : #include "tree-eh.h"
51 : #include "tree-pretty-print.h"
52 : #include "alloc-pool.h"
53 : #include "tree-into-ssa.h"
54 : #include "tree-cfgcleanup.h"
55 : #include "tree-switch-conversion.h"
56 : #include "ubsan.h"
57 : #include "stor-layout.h"
58 : #include "gimple-lower-bitint.h"
59 :
60 : /* Split BITINT_TYPE precisions in 4 categories. Small _BitInt, where
61 : target hook says it is a single limb, middle _BitInt which per ABI
62 : does not, but there is some INTEGER_TYPE in which arithmetics can be
63 : performed (operations on such _BitInt are lowered to casts to that
64 : arithmetic type and cast back; e.g. on x86_64 limb is DImode, but
65 : target supports TImode, so _BitInt(65) to _BitInt(128) are middle
66 : ones), large _BitInt which should by straight line code and
67 : finally huge _BitInt which should be handled by loops over the limbs. */
68 :
69 : enum bitint_prec_kind {
70 : bitint_prec_small,
71 : bitint_prec_middle,
72 : bitint_prec_large,
73 : bitint_prec_huge
74 : };
75 :
76 : /* Caches to speed up bitint_precision_kind. */
77 :
78 : static int small_max_prec, mid_min_prec, large_min_prec, huge_min_prec;
79 : static int limb_prec, abi_limb_prec;
80 : static bool bitint_big_endian, bitint_extended;
81 :
82 : /* Categorize _BitInt(PREC) as small, middle, large or huge. */
83 :
84 : static bitint_prec_kind
85 471733 : bitint_precision_kind (int prec)
86 : {
87 471733 : if (prec <= small_max_prec)
88 : return bitint_prec_small;
89 455673 : if (huge_min_prec && prec >= huge_min_prec)
90 : return bitint_prec_huge;
91 263224 : if (large_min_prec && prec >= large_min_prec)
92 : return bitint_prec_large;
93 51801 : if (mid_min_prec && prec >= mid_min_prec)
94 : return bitint_prec_middle;
95 :
96 9463 : struct bitint_info info;
97 9463 : bool ok = targetm.c.bitint_type_info (prec, &info);
98 9463 : gcc_assert (ok);
99 9463 : scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
100 9463 : if (prec <= GET_MODE_PRECISION (limb_mode))
101 : {
102 2237 : small_max_prec = prec;
103 2237 : return bitint_prec_small;
104 : }
105 7226 : bitint_big_endian = info.big_endian;
106 7226 : bitint_extended = info.extended;
107 7226 : if (!large_min_prec
108 14369 : && GET_MODE_PRECISION (limb_mode) <= MAX_FIXED_MODE_SIZE)
109 14286 : large_min_prec = MAX_FIXED_MODE_SIZE + 1;
110 7226 : if (!limb_prec)
111 7143 : limb_prec = GET_MODE_PRECISION (limb_mode);
112 7226 : if (!abi_limb_prec)
113 7143 : abi_limb_prec
114 7143 : = GET_MODE_PRECISION (as_a <scalar_int_mode> (info.abi_limb_mode));
115 7226 : if (!huge_min_prec)
116 : {
117 14286 : if (4 * limb_prec >= MAX_FIXED_MODE_SIZE)
118 7143 : huge_min_prec = 4 * limb_prec;
119 : else
120 0 : huge_min_prec = MAX_FIXED_MODE_SIZE + 1;
121 : }
122 14452 : if (prec <= MAX_FIXED_MODE_SIZE)
123 : {
124 1596 : if (!mid_min_prec || prec < mid_min_prec)
125 1596 : mid_min_prec = prec;
126 1596 : return bitint_prec_middle;
127 : }
128 5630 : if (huge_min_prec && prec >= huge_min_prec)
129 : return bitint_prec_huge;
130 : return bitint_prec_large;
131 : }
132 :
133 : /* Same for a TYPE. */
134 :
135 : static bitint_prec_kind
136 466427 : bitint_precision_kind (tree type)
137 : {
138 466427 : return bitint_precision_kind (TYPE_PRECISION (type));
139 : }
140 :
141 : /* Return minimum precision needed to describe INTEGER_CST
142 : CST. All bits above that precision up to precision of
143 : TREE_TYPE (CST) are cleared if EXT is set to 0, or set
144 : if EXT is set to -1. */
145 :
146 : static unsigned
147 5308 : bitint_min_cst_precision (tree cst, int &ext)
148 : {
149 5308 : ext = tree_int_cst_sgn (cst) < 0 ? -1 : 0;
150 5308 : wide_int w = wi::to_wide (cst);
151 5308 : unsigned min_prec = wi::min_precision (w, TYPE_SIGN (TREE_TYPE (cst)));
152 : /* For signed values, we don't need to count the sign bit,
153 : we'll use constant 0 or -1 for the upper bits. */
154 5308 : if (!TYPE_UNSIGNED (TREE_TYPE (cst)))
155 3234 : --min_prec;
156 : else
157 : {
158 : /* For unsigned values, also try signed min_precision
159 : in case the constant has lots of most significant bits set. */
160 2074 : unsigned min_prec2 = wi::min_precision (w, SIGNED) - 1;
161 2074 : if (min_prec2 < min_prec)
162 : {
163 990 : ext = -1;
164 990 : return min_prec2;
165 : }
166 : }
167 : return min_prec;
168 5308 : }
169 :
170 : namespace {
171 :
172 : /* If OP is middle _BitInt, cast it to corresponding INTEGER_TYPE
173 : cached in TYPE and return it. */
174 :
175 : tree
176 7760 : maybe_cast_middle_bitint (gimple_stmt_iterator *gsi, tree op, tree &type)
177 : {
178 7760 : if (op == NULL_TREE
179 7732 : || TREE_CODE (TREE_TYPE (op)) != BITINT_TYPE
180 14615 : || bitint_precision_kind (TREE_TYPE (op)) != bitint_prec_middle)
181 908 : return op;
182 :
183 6852 : int prec = TYPE_PRECISION (TREE_TYPE (op));
184 6852 : int uns = TYPE_UNSIGNED (TREE_TYPE (op));
185 6852 : if (type == NULL_TREE
186 2534 : || TYPE_PRECISION (type) != prec
187 9386 : || TYPE_UNSIGNED (type) != uns)
188 4318 : type = build_nonstandard_integer_type (prec, uns);
189 :
190 6852 : if (TREE_CODE (op) != SSA_NAME)
191 : {
192 2346 : tree nop = fold_convert (type, op);
193 2346 : if (is_gimple_val (nop))
194 : return nop;
195 : }
196 :
197 4506 : tree nop = make_ssa_name (type);
198 4506 : gimple *g = gimple_build_assign (nop, NOP_EXPR, op);
199 4506 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
200 4506 : return nop;
201 : }
202 :
203 : /* Return true if STMT can be handled in a loop from least to most
204 : significant limb together with its dependencies. */
205 :
206 : bool
207 46417 : mergeable_op (gimple *stmt)
208 : {
209 46417 : if (!is_gimple_assign (stmt))
210 : return false;
211 37803 : switch (gimple_assign_rhs_code (stmt))
212 : {
213 : case PLUS_EXPR:
214 : case MINUS_EXPR:
215 : case NEGATE_EXPR:
216 : case BIT_AND_EXPR:
217 : case BIT_IOR_EXPR:
218 : case BIT_XOR_EXPR:
219 : case BIT_NOT_EXPR:
220 : case SSA_NAME:
221 : case INTEGER_CST:
222 : case BIT_FIELD_REF:
223 : return true;
224 383 : case LSHIFT_EXPR:
225 383 : {
226 383 : tree cnt = gimple_assign_rhs2 (stmt);
227 383 : if (tree_fits_uhwi_p (cnt)
228 186 : && tree_to_uhwi (cnt) < (unsigned HOST_WIDE_INT) limb_prec)
229 : return true;
230 : }
231 : break;
232 6017 : CASE_CONVERT:
233 6017 : case VIEW_CONVERT_EXPR:
234 6017 : {
235 6017 : tree lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
236 6017 : tree rhs_type = TREE_TYPE (gimple_assign_rhs1 (stmt));
237 6017 : if (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
238 5979 : && TREE_CODE (lhs_type) == BITINT_TYPE
239 3531 : && TREE_CODE (rhs_type) == BITINT_TYPE
240 3185 : && bitint_precision_kind (lhs_type) >= bitint_prec_large
241 3156 : && bitint_precision_kind (rhs_type) >= bitint_prec_large
242 9035 : && (CEIL (TYPE_PRECISION (lhs_type), limb_prec)
243 3018 : == CEIL (TYPE_PRECISION (rhs_type), limb_prec)))
244 : {
245 2144 : if (TYPE_PRECISION (rhs_type) >= TYPE_PRECISION (lhs_type))
246 : return true;
247 168 : if ((unsigned) TYPE_PRECISION (lhs_type) % (2 * limb_prec) != 0)
248 : return true;
249 17 : if (bitint_precision_kind (lhs_type) == bitint_prec_large)
250 : return true;
251 : }
252 : break;
253 : }
254 : default:
255 : break;
256 : }
257 : return false;
258 : }
259 :
260 : /* Return non-zero if stmt is .{ADD,SUB,MUL}_OVERFLOW call with
261 : _Complex large/huge _BitInt lhs which has at most two immediate uses,
262 : at most one use in REALPART_EXPR stmt in the same bb and exactly one
263 : IMAGPART_EXPR use in the same bb with a single use which casts it to
264 : non-BITINT_TYPE integral type. If there is a REALPART_EXPR use,
265 : return 2. Such cases (most common uses of those builtins) can be
266 : optimized by marking their lhs and lhs of IMAGPART_EXPR and maybe lhs
267 : of REALPART_EXPR as not needed to be backed up by a stack variable.
268 : For .UBSAN_CHECK_{ADD,SUB,MUL} return 3. */
269 :
270 : int
271 20312 : optimizable_arith_overflow (gimple *stmt)
272 : {
273 20312 : bool is_ubsan = false;
274 20312 : if (!is_gimple_call (stmt) || !gimple_call_internal_p (stmt))
275 : return false;
276 4955 : switch (gimple_call_internal_fn (stmt))
277 : {
278 : case IFN_ADD_OVERFLOW:
279 : case IFN_SUB_OVERFLOW:
280 : case IFN_MUL_OVERFLOW:
281 : break;
282 48 : case IFN_UBSAN_CHECK_ADD:
283 48 : case IFN_UBSAN_CHECK_SUB:
284 48 : case IFN_UBSAN_CHECK_MUL:
285 48 : is_ubsan = true;
286 48 : break;
287 : default:
288 : return 0;
289 : }
290 4955 : tree lhs = gimple_call_lhs (stmt);
291 4955 : if (!lhs)
292 : return 0;
293 4955 : if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
294 : return 0;
295 4955 : tree type = is_ubsan ? TREE_TYPE (lhs) : TREE_TYPE (TREE_TYPE (lhs));
296 4955 : if (TREE_CODE (type) != BITINT_TYPE
297 4955 : || bitint_precision_kind (type) < bitint_prec_large)
298 0 : return 0;
299 :
300 4955 : if (is_ubsan)
301 : {
302 48 : use_operand_p use_p;
303 48 : gimple *use_stmt;
304 48 : if (!single_imm_use (lhs, &use_p, &use_stmt)
305 48 : || gimple_bb (use_stmt) != gimple_bb (stmt)
306 48 : || !gimple_store_p (use_stmt)
307 48 : || !is_gimple_assign (use_stmt)
308 48 : || gimple_has_volatile_ops (use_stmt)
309 96 : || stmt_ends_bb_p (use_stmt))
310 0 : return 0;
311 : return 3;
312 : }
313 :
314 4907 : imm_use_iterator ui;
315 4907 : use_operand_p use_p;
316 4907 : int seen = 0;
317 4907 : gimple *realpart = NULL, *cast = NULL;
318 19347 : FOR_EACH_IMM_USE_FAST (use_p, ui, lhs)
319 : {
320 9537 : gimple *g = USE_STMT (use_p);
321 9537 : if (is_gimple_debug (g))
322 0 : continue;
323 9537 : if (!is_gimple_assign (g) || gimple_bb (g) != gimple_bb (stmt))
324 : return 0;
325 9537 : if (gimple_assign_rhs_code (g) == REALPART_EXPR)
326 : {
327 4630 : if ((seen & 1) != 0)
328 : return 0;
329 4630 : seen |= 1;
330 4630 : realpart = g;
331 : }
332 4907 : else if (gimple_assign_rhs_code (g) == IMAGPART_EXPR)
333 : {
334 4907 : if ((seen & 2) != 0)
335 4 : return 0;
336 4907 : seen |= 2;
337 :
338 4907 : use_operand_p use2_p;
339 4907 : gimple *use_stmt;
340 4907 : tree lhs2 = gimple_assign_lhs (g);
341 4907 : if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs2))
342 : return 0;
343 4907 : if (!single_imm_use (lhs2, &use2_p, &use_stmt)
344 4907 : || gimple_bb (use_stmt) != gimple_bb (stmt)
345 9814 : || !gimple_assign_cast_p (use_stmt))
346 : return 0;
347 :
348 4907 : lhs2 = gimple_assign_lhs (use_stmt);
349 9814 : if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs2))
350 9814 : || TREE_CODE (TREE_TYPE (lhs2)) == BITINT_TYPE)
351 : return 0;
352 4903 : cast = use_stmt;
353 : }
354 : else
355 : return 0;
356 4 : }
357 4903 : if ((seen & 2) == 0)
358 : return 0;
359 4903 : if (seen == 3)
360 : {
361 : /* Punt if the cast stmt appears before realpart stmt, because
362 : if both appear, the lowering wants to emit all the code
363 : at the location of realpart stmt. */
364 4630 : gimple_stmt_iterator gsi = gsi_for_stmt (realpart);
365 4630 : unsigned int cnt = 0;
366 4633 : do
367 : {
368 4633 : gsi_prev_nondebug (&gsi);
369 4633 : if (gsi_end_p (gsi) || gsi_stmt (gsi) == cast)
370 : return 0;
371 4630 : if (gsi_stmt (gsi) == stmt)
372 : return 2;
373 : /* If realpart is too far from stmt, punt as well.
374 : Usually it will appear right after it. */
375 3 : if (++cnt == 32)
376 : return 0;
377 : }
378 : while (1);
379 : }
380 : return 1;
381 : }
382 :
383 : /* If STMT is some kind of comparison (GIMPLE_COND, comparison assignment)
384 : comparing large/huge _BitInt types, return the comparison code and if
385 : non-NULL fill in the comparison operands to *POP1 and *POP2. */
386 :
387 : tree_code
388 35026 : comparison_op (gimple *stmt, tree *pop1, tree *pop2)
389 : {
390 35026 : tree op1 = NULL_TREE, op2 = NULL_TREE;
391 35026 : tree_code code = ERROR_MARK;
392 35026 : if (gimple_code (stmt) == GIMPLE_COND)
393 : {
394 6507 : code = gimple_cond_code (stmt);
395 6507 : op1 = gimple_cond_lhs (stmt);
396 6507 : op2 = gimple_cond_rhs (stmt);
397 : }
398 28519 : else if (is_gimple_assign (stmt))
399 : {
400 28504 : code = gimple_assign_rhs_code (stmt);
401 28504 : op1 = gimple_assign_rhs1 (stmt);
402 28504 : if (TREE_CODE_CLASS (code) == tcc_comparison
403 28504 : || TREE_CODE_CLASS (code) == tcc_binary)
404 2112 : op2 = gimple_assign_rhs2 (stmt);
405 : }
406 35026 : if (TREE_CODE_CLASS (code) != tcc_comparison)
407 : return ERROR_MARK;
408 7281 : tree type = TREE_TYPE (op1);
409 7281 : if (TREE_CODE (type) != BITINT_TYPE
410 7281 : || bitint_precision_kind (type) < bitint_prec_large)
411 0 : return ERROR_MARK;
412 7281 : if (pop1)
413 : {
414 7219 : *pop1 = op1;
415 7219 : *pop2 = op2;
416 : }
417 : return code;
418 : }
419 :
420 : /* Class used during large/huge _BitInt lowering containing all the
421 : state for the methods. */
422 :
423 : struct bitint_large_huge
424 : {
425 7141 : bitint_large_huge ()
426 7141 : : m_names (NULL), m_loads (NULL), m_preserved (NULL),
427 7141 : m_single_use_names (NULL), m_map (NULL), m_vars (NULL),
428 7141 : m_limb_type (NULL_TREE), m_data (vNULL),
429 7141 : m_returns_twice_calls (vNULL) {}
430 :
431 : ~bitint_large_huge ();
432 :
433 : void insert_before (gimple *);
434 : tree limb_access_type (tree, tree);
435 : tree limb_access (tree, tree, tree, bool, bool = false);
436 : tree build_bit_field_ref (tree, tree, unsigned HOST_WIDE_INT,
437 : unsigned HOST_WIDE_INT);
438 : void if_then (gimple *, profile_probability, edge &, edge &);
439 : void if_then_else (gimple *, profile_probability, edge &, edge &);
440 : void if_then_if_then_else (gimple *g, gimple *,
441 : profile_probability, profile_probability,
442 : edge &, edge &, edge &);
443 : tree handle_operand (tree, tree);
444 : tree prepare_data_in_out (tree, tree, tree *, tree = NULL_TREE);
445 : tree add_cast (tree, tree);
446 : tree handle_plus_minus (tree_code, tree, tree, tree);
447 : tree handle_lshift (tree, tree, tree);
448 : tree handle_cast (tree, tree, tree);
449 : tree handle_bit_field_ref (tree, tree);
450 : tree handle_load (gimple *, tree);
451 : tree handle_stmt (gimple *, tree);
452 : tree handle_operand_addr (tree, gimple *, int *, int *);
453 : tree create_loop (tree, tree *);
454 : tree lower_mergeable_stmt (gimple *, tree_code &, tree, tree);
455 : tree lower_comparison_stmt (gimple *, tree_code &, tree, tree);
456 : void lower_shift_stmt (tree, gimple *);
457 : void lower_muldiv_stmt (tree, gimple *);
458 : void lower_float_conv_stmt (tree, gimple *);
459 : tree arith_overflow_extract_bits (unsigned int, unsigned int, tree,
460 : unsigned int, bool);
461 : void finish_arith_overflow (tree, tree, tree, tree, tree, tree, gimple *,
462 : unsigned, tree_code);
463 : void lower_addsub_overflow (tree, gimple *);
464 : void lower_mul_overflow (tree, gimple *);
465 : void lower_cplxpart_stmt (tree, gimple *);
466 : void lower_complexexpr_stmt (gimple *);
467 : void lower_bit_query (gimple *);
468 : void lower_call (tree, gimple *);
469 : void lower_asm (gimple *);
470 : void lower_stmt (gimple *);
471 :
472 : /* Bitmap of large/huge _BitInt SSA_NAMEs except those can be
473 : merged with their uses. */
474 : bitmap m_names;
475 : /* Subset of those for lhs of load statements. These will be
476 : cleared in m_names if the loads will be mergeable with all
477 : their uses. */
478 : bitmap m_loads;
479 : /* Bitmap of large/huge _BitInt SSA_NAMEs that should survive
480 : to later passes (arguments or return values of calls). */
481 : bitmap m_preserved;
482 : /* Subset of m_names which have a single use. As the lowering
483 : can replace various original statements with their lowered
484 : form even before it is done iterating over all basic blocks,
485 : testing has_single_use for the purpose of emitting clobbers
486 : doesn't work properly. */
487 : bitmap m_single_use_names;
488 : /* Used for coalescing/partitioning of large/huge _BitInt SSA_NAMEs
489 : set in m_names. */
490 : var_map m_map;
491 : /* Mapping of the partitions to corresponding decls. */
492 : tree *m_vars;
493 : /* Unsigned integer type with limb precision. */
494 : tree m_limb_type;
495 : /* Its TYPE_SIZE_UNIT. */
496 : unsigned HOST_WIDE_INT m_limb_size;
497 : /* Location of a gimple stmt which is being currently lowered. */
498 : location_t m_loc;
499 : /* Current stmt iterator where code is being lowered currently. */
500 : gimple_stmt_iterator m_gsi;
501 : /* Statement after which any clobbers should be added if non-NULL. */
502 : gimple *m_after_stmt;
503 : /* Set when creating loops to the loop header bb and its preheader. */
504 : basic_block m_bb, m_preheader_bb;
505 : /* Stmt iterator after which initialization statements should be emitted. */
506 : gimple_stmt_iterator m_init_gsi;
507 : /* Decl into which a mergeable statement stores result. */
508 : tree m_lhs;
509 : /* handle_operand/handle_stmt can be invoked in various ways.
510 :
511 : lower_mergeable_stmt for large _BitInt calls those with constant
512 : idx only, expanding to straight line code, for huge _BitInt
513 : emits a loop from least significant limb upwards, where each loop
514 : iteration handles 2 limbs, plus there can be up to one full limb
515 : and one partial limb processed after the loop, where handle_operand
516 : and/or handle_stmt are called with constant idx. m_upwards_2limb
517 : is set for this case, false otherwise. m_upwards is true if it
518 : is either large or huge _BitInt handled by lower_mergeable_stmt,
519 : i.e. indexes always increase.
520 :
521 : Another way is used by lower_comparison_stmt, which walks limbs
522 : from most significant to least significant, partial limb if any
523 : processed first with constant idx and then loop processing a single
524 : limb per iteration with non-constant idx.
525 :
526 : Another way is used in lower_shift_stmt, where for LSHIFT_EXPR
527 : destination limbs are processed from most significant to least
528 : significant or for RSHIFT_EXPR the other way around, in loops or
529 : straight line code, but idx usually is non-constant (so from
530 : handle_operand/handle_stmt POV random access). The LSHIFT_EXPR
531 : handling there can access even partial limbs using non-constant
532 : idx (then m_var_msb should be true, for all the other cases
533 : including lower_mergeable_stmt/lower_comparison_stmt that is
534 : not the case and so m_var_msb should be false.
535 :
536 : m_first should be set the first time handle_operand/handle_stmt
537 : is called and clear when it is called for some other limb with
538 : the same argument. If the lowering of an operand (e.g. INTEGER_CST)
539 : or statement (e.g. +/-/<< with < limb_prec constant) needs some
540 : state between the different calls, when m_first is true it should
541 : push some trees to m_data vector and also make sure m_data_cnt is
542 : incremented by how many trees were pushed, and when m_first is
543 : false, it can use the m_data[m_data_cnt] etc. data or update them,
544 : just needs to bump m_data_cnt by the same amount as when it was
545 : called with m_first set. The toplevel calls to
546 : handle_operand/handle_stmt should set m_data_cnt to 0 and truncate
547 : m_data vector when setting m_first to true.
548 :
549 : m_cast_conditional and m_bitfld_load are used when handling a
550 : bit-field load inside of a widening cast. handle_cast sometimes
551 : needs to do runtime comparisons and handle_operand only conditionally
552 : or even in two separate conditional blocks for one idx (once with
553 : constant index after comparing the runtime one for equality with the
554 : constant). In these cases, m_cast_conditional is set to true and
555 : the bit-field load then communicates its m_data_cnt to handle_cast
556 : using m_bitfld_load. */
557 : bool m_first;
558 : bool m_var_msb;
559 : unsigned m_upwards_2limb;
560 : bool m_upwards;
561 : bool m_cast_conditional;
562 : unsigned m_bitfld_load;
563 : vec<tree> m_data;
564 : unsigned int m_data_cnt;
565 : vec<gimple *> m_returns_twice_calls;
566 : };
567 :
568 7141 : bitint_large_huge::~bitint_large_huge ()
569 : {
570 7141 : BITMAP_FREE (m_names);
571 7141 : BITMAP_FREE (m_loads);
572 7141 : BITMAP_FREE (m_preserved);
573 7141 : BITMAP_FREE (m_single_use_names);
574 7141 : if (m_map)
575 5505 : delete_var_map (m_map);
576 7141 : XDELETEVEC (m_vars);
577 7141 : m_data.release ();
578 7141 : m_returns_twice_calls.release ();
579 7141 : }
580 :
581 : /* Insert gimple statement G before current location
582 : and set its gimple_location. */
583 :
584 : void
585 351041 : bitint_large_huge::insert_before (gimple *g)
586 : {
587 351041 : gimple_set_location (g, m_loc);
588 351041 : gsi_insert_before (&m_gsi, g, GSI_SAME_STMT);
589 351041 : }
590 :
591 : /* Return type for accessing limb IDX of BITINT_TYPE TYPE.
592 : This is normally m_limb_type, except for a partial most
593 : significant limb if any. */
594 :
595 : tree
596 128739 : bitint_large_huge::limb_access_type (tree type, tree idx)
597 : {
598 128739 : if (type == NULL_TREE)
599 5596 : return m_limb_type;
600 123143 : unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
601 123143 : unsigned int prec = TYPE_PRECISION (type);
602 123143 : gcc_assert (i * limb_prec < prec);
603 246286 : if (bitint_big_endian
604 123143 : ? (i != 0 || (prec % limb_prec) == 0)
605 123143 : : (i + 1) * limb_prec <= prec)
606 80930 : return m_limb_type;
607 : else
608 84426 : return build_nonstandard_integer_type (prec % limb_prec,
609 42213 : TYPE_UNSIGNED (type));
610 : }
611 :
612 : /* Return a tree how to access limb IDX of VAR corresponding to BITINT_TYPE
613 : TYPE. If WRITE_P is true, it will be a store, otherwise a read. */
614 :
615 : tree
616 151983 : bitint_large_huge::limb_access (tree type, tree var, tree idx, bool write_p,
617 : bool abi_load_p)
618 : {
619 151983 : tree atype = (tree_fits_uhwi_p (idx)
620 151983 : ? limb_access_type (type, idx) : m_limb_type);
621 :
622 151983 : tree ltype = (bitint_extended && abi_load_p) ? atype : m_limb_type;
623 :
624 151983 : addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (var));
625 151983 : tree ret;
626 151983 : if (DECL_P (var) && tree_fits_uhwi_p (idx))
627 : {
628 94196 : if (as != TYPE_ADDR_SPACE (ltype))
629 0 : ltype = build_qualified_type (ltype, TYPE_QUALS (ltype)
630 0 : | ENCODE_QUAL_ADDR_SPACE (as));
631 94196 : tree ptype = build_pointer_type (strip_array_types (TREE_TYPE (var)));
632 94196 : unsigned HOST_WIDE_INT off = tree_to_uhwi (idx) * m_limb_size;
633 94196 : if (bitint_big_endian)
634 0 : off += m_limb_size - tree_to_uhwi (TYPE_SIZE_UNIT (ltype));
635 94196 : ret = build2 (MEM_REF, ltype,
636 : build_fold_addr_expr (var),
637 94196 : build_int_cst (ptype, off));
638 94196 : TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
639 94196 : TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (var);
640 94196 : }
641 57787 : else if (TREE_CODE (var) == MEM_REF && tree_fits_uhwi_p (idx))
642 : {
643 4302 : if (as != TYPE_ADDR_SPACE (ltype))
644 0 : ltype = build_qualified_type (ltype, TYPE_QUALS (ltype)
645 0 : | ENCODE_QUAL_ADDR_SPACE (as));
646 4302 : unsigned HOST_WIDE_INT off = tree_to_uhwi (idx) * m_limb_size;
647 4302 : if (bitint_big_endian)
648 0 : off += m_limb_size - tree_to_uhwi (TYPE_SIZE_UNIT (ltype));
649 4302 : ret
650 8604 : = build2 (MEM_REF, ltype, unshare_expr (TREE_OPERAND (var, 0)),
651 8604 : size_binop (PLUS_EXPR, TREE_OPERAND (var, 1),
652 : build_int_cst (TREE_TYPE (TREE_OPERAND (var, 1)),
653 : off)));
654 4302 : TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
655 4302 : TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (var);
656 4302 : TREE_THIS_NOTRAP (ret) = TREE_THIS_NOTRAP (var);
657 4302 : }
658 : else
659 : {
660 53485 : ltype = m_limb_type;
661 53485 : if (as != TYPE_ADDR_SPACE (ltype))
662 17 : ltype = build_qualified_type (ltype, TYPE_QUALS (ltype)
663 17 : | ENCODE_QUAL_ADDR_SPACE (as));
664 53485 : var = unshare_expr (var);
665 53485 : if (TREE_CODE (TREE_TYPE (var)) != ARRAY_TYPE
666 80444 : || !useless_type_conversion_p (m_limb_type,
667 26959 : TREE_TYPE (TREE_TYPE (var))))
668 : {
669 27450 : unsigned HOST_WIDE_INT nelts
670 27450 : = CEIL (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var))), limb_prec);
671 27450 : tree atype = build_array_type_nelts (ltype, nelts);
672 27450 : var = build1 (VIEW_CONVERT_EXPR, atype, var);
673 : }
674 53485 : ret = build4 (ARRAY_REF, ltype, var, idx, NULL_TREE, NULL_TREE);
675 : }
676 151983 : if (!write_p && !useless_type_conversion_p (atype, ltype))
677 : {
678 18449 : gimple *g = gimple_build_assign (make_ssa_name (m_limb_type), ret);
679 18449 : insert_before (g);
680 18449 : ret = gimple_assign_lhs (g);
681 18449 : ret = build1 (NOP_EXPR, atype, ret);
682 : }
683 151983 : return ret;
684 : }
685 :
686 : /* Build a BIT_FIELD_REF to access BITSIZE bits with FTYPE type at
687 : offset BITPOS inside of OBJ. */
688 :
689 : tree
690 265 : bitint_large_huge::build_bit_field_ref (tree ftype, tree obj,
691 : unsigned HOST_WIDE_INT bitsize,
692 : unsigned HOST_WIDE_INT bitpos)
693 : {
694 530 : if (INTEGRAL_TYPE_P (TREE_TYPE (obj))
695 274 : && !type_has_mode_precision_p (TREE_TYPE (obj)))
696 : {
697 9 : unsigned HOST_WIDE_INT nelts
698 9 : = CEIL (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))), limb_prec);
699 9 : tree ltype = m_limb_type;
700 9 : addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (obj));
701 9 : if (as != TYPE_ADDR_SPACE (ltype))
702 0 : ltype = build_qualified_type (ltype, TYPE_QUALS (ltype)
703 0 : | ENCODE_QUAL_ADDR_SPACE (as));
704 9 : tree atype = build_array_type_nelts (ltype, nelts);
705 9 : obj = build1 (VIEW_CONVERT_EXPR, atype, obj);
706 : }
707 265 : return build3 (BIT_FIELD_REF, ftype, obj, bitsize_int (bitsize),
708 265 : bitsize_int (bitpos));
709 : }
710 :
711 : /* Emit a half diamond,
712 : if (COND)
713 : |\
714 : | \
715 : | \
716 : | new_bb1
717 : | /
718 : | /
719 : |/
720 : or if (COND) new_bb1;
721 : PROB is the probability that the condition is true.
722 : Updates m_gsi to start of new_bb1.
723 : Sets EDGE_TRUE to edge from new_bb1 to successor and
724 : EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
725 :
726 : void
727 3995 : bitint_large_huge::if_then (gimple *cond, profile_probability prob,
728 : edge &edge_true, edge &edge_false)
729 : {
730 3995 : insert_before (cond);
731 3995 : edge e1 = split_block (gsi_bb (m_gsi), cond);
732 3995 : edge e2 = split_block (e1->dest, (gimple *) NULL);
733 3995 : edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
734 3995 : e1->flags = EDGE_TRUE_VALUE;
735 3995 : e1->probability = prob;
736 3995 : e3->probability = prob.invert ();
737 3995 : set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
738 3995 : edge_true = e2;
739 3995 : edge_false = e3;
740 3995 : m_gsi = gsi_after_labels (e1->dest);
741 3995 : }
742 :
743 : /* Emit a full diamond,
744 : if (COND)
745 : /\
746 : / \
747 : / \
748 : new_bb1 new_bb2
749 : \ /
750 : \ /
751 : \/
752 : or if (COND) new_bb2; else new_bb1;
753 : PROB is the probability that the condition is true.
754 : Updates m_gsi to start of new_bb2.
755 : Sets EDGE_TRUE to edge from new_bb1 to successor and
756 : EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
757 :
758 : void
759 110 : bitint_large_huge::if_then_else (gimple *cond, profile_probability prob,
760 : edge &edge_true, edge &edge_false)
761 : {
762 110 : insert_before (cond);
763 110 : edge e1 = split_block (gsi_bb (m_gsi), cond);
764 110 : edge e2 = split_block (e1->dest, (gimple *) NULL);
765 110 : basic_block bb = create_empty_bb (e1->dest);
766 110 : add_bb_to_loop (bb, e1->dest->loop_father);
767 110 : edge e3 = make_edge (e1->src, bb, EDGE_TRUE_VALUE);
768 110 : e1->flags = EDGE_FALSE_VALUE;
769 110 : e3->probability = prob;
770 110 : e1->probability = prob.invert ();
771 110 : bb->count = e1->src->count.apply_probability (prob);
772 110 : set_immediate_dominator (CDI_DOMINATORS, bb, e1->src);
773 110 : set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
774 110 : edge_true = make_single_succ_edge (bb, e2->dest, EDGE_FALLTHRU);
775 110 : edge_false = e2;
776 110 : m_gsi = gsi_after_labels (bb);
777 110 : }
778 :
779 : /* Emit a half diamond with full diamond in it
780 : if (COND1)
781 : |\
782 : | \
783 : | \
784 : | if (COND2)
785 : | / \
786 : | / \
787 : |new_bb1 new_bb2
788 : | | /
789 : \ | /
790 : \ | /
791 : \ | /
792 : \|/
793 : or if (COND1) { if (COND2) new_bb2; else new_bb1; }
794 : PROB1 is the probability that the condition 1 is true.
795 : PROB2 is the probability that the condition 2 is true.
796 : Updates m_gsi to start of new_bb1.
797 : Sets EDGE_TRUE_TRUE to edge from new_bb2 to successor,
798 : EDGE_TRUE_FALSE to edge from new_bb1 to successor and
799 : EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND1) bb.
800 : If COND2 is NULL, this is equivalent to
801 : if_then (COND1, PROB1, EDGE_TRUE_FALSE, EDGE_FALSE);
802 : EDGE_TRUE_TRUE = NULL; */
803 :
804 : void
805 1904 : bitint_large_huge::if_then_if_then_else (gimple *cond1, gimple *cond2,
806 : profile_probability prob1,
807 : profile_probability prob2,
808 : edge &edge_true_true,
809 : edge &edge_true_false,
810 : edge &edge_false)
811 : {
812 1904 : edge e2, e3, e4 = NULL;
813 1904 : if_then (cond1, prob1, e2, e3);
814 1904 : if (cond2 == NULL)
815 : {
816 1152 : edge_true_true = NULL;
817 1152 : edge_true_false = e2;
818 1152 : edge_false = e3;
819 1152 : return;
820 : }
821 752 : insert_before (cond2);
822 752 : e2 = split_block (gsi_bb (m_gsi), cond2);
823 752 : basic_block bb = create_empty_bb (e2->dest);
824 752 : add_bb_to_loop (bb, e2->dest->loop_father);
825 752 : e4 = make_edge (e2->src, bb, EDGE_TRUE_VALUE);
826 752 : set_immediate_dominator (CDI_DOMINATORS, bb, e2->src);
827 752 : e4->probability = prob2;
828 752 : e2->flags = EDGE_FALSE_VALUE;
829 752 : e2->probability = prob2.invert ();
830 752 : bb->count = e2->src->count.apply_probability (prob2);
831 752 : e4 = make_single_succ_edge (bb, e3->dest, EDGE_FALLTHRU);
832 752 : e2 = find_edge (e2->dest, e3->dest);
833 752 : edge_true_true = e4;
834 752 : edge_true_false = e2;
835 752 : edge_false = e3;
836 752 : m_gsi = gsi_after_labels (e2->src);
837 : }
838 :
839 : /* Emit code to access limb IDX from OP. */
840 :
841 : tree
842 108603 : bitint_large_huge::handle_operand (tree op, tree idx)
843 : {
844 108603 : switch (TREE_CODE (op))
845 : {
846 74427 : case SSA_NAME:
847 74427 : if (m_names == NULL
848 74427 : || !bitmap_bit_p (m_names, SSA_NAME_VERSION (op)))
849 : {
850 14035 : if (SSA_NAME_IS_DEFAULT_DEF (op))
851 : {
852 5 : if (m_first)
853 : {
854 2 : tree v = create_tmp_reg (m_limb_type);
855 2 : if (SSA_NAME_VAR (op) && VAR_P (SSA_NAME_VAR (op)))
856 : {
857 2 : DECL_NAME (v) = DECL_NAME (SSA_NAME_VAR (op));
858 2 : DECL_SOURCE_LOCATION (v)
859 2 : = DECL_SOURCE_LOCATION (SSA_NAME_VAR (op));
860 : }
861 2 : v = get_or_create_ssa_default_def (cfun, v);
862 2 : m_data.safe_push (v);
863 : }
864 5 : tree ret = m_data[m_data_cnt];
865 5 : m_data_cnt++;
866 5 : if (tree_fits_uhwi_p (idx))
867 : {
868 3 : tree type = limb_access_type (TREE_TYPE (op), idx);
869 3 : ret = add_cast (type, ret);
870 : }
871 5 : return ret;
872 : }
873 14030 : location_t loc_save = m_loc;
874 14030 : m_loc = gimple_location (SSA_NAME_DEF_STMT (op));
875 14030 : tree ret = handle_stmt (SSA_NAME_DEF_STMT (op), idx);
876 14030 : m_loc = loc_save;
877 14030 : return ret;
878 : }
879 60392 : int p;
880 60392 : gimple *g;
881 60392 : tree t;
882 60392 : p = var_to_partition (m_map, op);
883 60392 : gcc_assert (m_vars[p] != NULL_TREE);
884 60392 : t = limb_access (TREE_TYPE (op), m_vars[p], idx, false);
885 60392 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
886 60392 : insert_before (g);
887 60392 : t = gimple_assign_lhs (g);
888 60392 : if (m_first
889 21846 : && m_single_use_names
890 21038 : && m_vars[p] != m_lhs
891 20941 : && m_after_stmt
892 68845 : && bitmap_bit_p (m_single_use_names, SSA_NAME_VERSION (op)))
893 : {
894 8235 : tree clobber = build_clobber (TREE_TYPE (m_vars[p]),
895 : CLOBBER_STORAGE_END);
896 8235 : g = gimple_build_assign (m_vars[p], clobber);
897 8235 : gimple_stmt_iterator gsi = gsi_for_stmt (m_after_stmt);
898 8235 : gsi_insert_after (&gsi, g, GSI_SAME_STMT);
899 : }
900 : return t;
901 34176 : case INTEGER_CST:
902 34176 : if (tree_fits_uhwi_p (idx))
903 : {
904 23709 : tree c, type = limb_access_type (TREE_TYPE (op), idx);
905 23709 : unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
906 23709 : if (m_first)
907 : {
908 6191 : m_data.safe_push (NULL_TREE);
909 6191 : m_data.safe_push (NULL_TREE);
910 : }
911 23709 : if (bitint_big_endian)
912 0 : i = CEIL (TYPE_PRECISION (TREE_TYPE (op)), limb_prec) - 1 - i;
913 23709 : if (limb_prec != HOST_BITS_PER_WIDE_INT)
914 : {
915 0 : wide_int w = wi::rshift (wi::to_wide (op), i * limb_prec,
916 0 : TYPE_SIGN (TREE_TYPE (op)));
917 0 : c = wide_int_to_tree (type,
918 0 : wide_int::from (w, TYPE_PRECISION (type),
919 : UNSIGNED));
920 0 : }
921 23709 : else if (i >= TREE_INT_CST_EXT_NUNITS (op))
922 7450 : c = build_int_cst (type,
923 13243 : tree_int_cst_sgn (op) < 0 ? -1 : 0);
924 : else
925 16259 : c = build_int_cst (type, TREE_INT_CST_ELT (op, i));
926 23709 : m_data_cnt += 2;
927 23709 : return c;
928 : }
929 10467 : if (m_first
930 10467 : || (m_data[m_data_cnt] == NULL_TREE
931 159 : && m_data[m_data_cnt + 1] == NULL_TREE))
932 : {
933 5279 : unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
934 5279 : unsigned int rem = prec % ((m_upwards_2limb ? 2 : 1) * limb_prec);
935 5279 : int ext;
936 5279 : unsigned min_prec = bitint_min_cst_precision (op, ext);
937 5279 : if (m_first)
938 : {
939 5120 : m_data.safe_push (NULL_TREE);
940 5120 : m_data.safe_push (NULL_TREE);
941 : }
942 5279 : if (integer_zerop (op))
943 : {
944 834 : tree c = build_zero_cst (m_limb_type);
945 834 : m_data[m_data_cnt] = c;
946 834 : m_data[m_data_cnt + 1] = c;
947 : }
948 4445 : else if (integer_all_onesp (op))
949 : {
950 667 : tree c = build_all_ones_cst (m_limb_type);
951 667 : m_data[m_data_cnt] = c;
952 667 : m_data[m_data_cnt + 1] = c;
953 : }
954 3778 : else if (m_upwards_2limb && min_prec <= (unsigned) limb_prec)
955 : {
956 : /* Single limb constant. Use a phi with that limb from
957 : the preheader edge and 0 or -1 constant from the other edge
958 : and for the second limb in the loop. */
959 858 : tree out;
960 858 : gcc_assert (m_first);
961 858 : m_data.pop ();
962 858 : m_data.pop ();
963 858 : prepare_data_in_out (fold_convert (m_limb_type, op), idx, &out,
964 858 : build_int_cst (m_limb_type, ext));
965 858 : }
966 2920 : else if (min_prec > prec - rem - 2 * limb_prec)
967 : {
968 : /* Constant which has enough significant bits that it isn't
969 : worth trying to save .rodata space by extending from smaller
970 : number. */
971 2401 : tree type;
972 2401 : if (m_var_msb)
973 25 : type = TREE_TYPE (op);
974 : else
975 : /* If we have a guarantee the most significant partial limb
976 : (if any) will be only accessed through handle_operand
977 : with INTEGER_CST idx, we don't need to include the partial
978 : limb in .rodata. */
979 2376 : type = build_bitint_type (prec - rem, 1);
980 2401 : tree c = tree_output_constant_def (fold_convert (type, op));
981 2401 : m_data[m_data_cnt] = c;
982 2401 : m_data[m_data_cnt + 1] = NULL_TREE;
983 : }
984 519 : else if (m_upwards_2limb)
985 : {
986 : /* Constant with smaller number of bits. Trade conditional
987 : code for .rodata space by extending from smaller number. */
988 444 : min_prec = CEIL (min_prec, 2 * limb_prec) * (2 * limb_prec);
989 444 : tree type = build_bitint_type (min_prec, 1);
990 444 : tree c = tree_output_constant_def (fold_convert (type, op));
991 444 : tree ridx = idx;
992 444 : if (bitint_big_endian)
993 : {
994 0 : ridx = make_ssa_name (sizetype);
995 0 : g = gimple_build_assign (ridx, PLUS_EXPR, idx,
996 0 : size_int (min_prec / limb_prec
997 : - ((HOST_WIDE_INT)
998 : CEIL (prec,
999 : limb_prec))));
1000 0 : insert_before (g);
1001 : }
1002 444 : tree ridx2 = make_ssa_name (sizetype);
1003 444 : g = gimple_build_assign (ridx2, PLUS_EXPR, ridx,
1004 : bitint_big_endian
1005 0 : ? size_int (-1) : size_one_node);
1006 444 : insert_before (g);
1007 444 : if (bitint_big_endian)
1008 0 : g = gimple_build_cond (GE_EXPR, idx,
1009 0 : size_int (CEIL (prec, limb_prec)
1010 : - min_prec / limb_prec),
1011 : NULL_TREE, NULL_TREE);
1012 : else
1013 444 : g = gimple_build_cond (LT_EXPR, idx,
1014 444 : size_int (min_prec / limb_prec),
1015 : NULL_TREE, NULL_TREE);
1016 444 : edge edge_true, edge_false;
1017 888 : if_then (g, (min_prec >= (prec - rem) / 2
1018 312 : ? profile_probability::likely ()
1019 132 : : profile_probability::unlikely ()),
1020 : edge_true, edge_false);
1021 444 : tree c1 = limb_access (TREE_TYPE (op), c, ridx, false);
1022 444 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (c1)), c1);
1023 444 : insert_before (g);
1024 444 : c1 = gimple_assign_lhs (g);
1025 444 : tree c2 = limb_access (TREE_TYPE (op), c, ridx2, false);
1026 444 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (c2)), c2);
1027 444 : insert_before (g);
1028 444 : c2 = gimple_assign_lhs (g);
1029 444 : tree c3 = build_int_cst (m_limb_type, ext);
1030 444 : m_gsi = gsi_after_labels (edge_true->dest);
1031 444 : m_data[m_data_cnt] = make_ssa_name (m_limb_type);
1032 444 : m_data[m_data_cnt + 1] = make_ssa_name (m_limb_type);
1033 444 : gphi *phi = create_phi_node (m_data[m_data_cnt],
1034 : edge_true->dest);
1035 444 : add_phi_arg (phi, c1, edge_true, UNKNOWN_LOCATION);
1036 444 : add_phi_arg (phi, c3, edge_false, UNKNOWN_LOCATION);
1037 444 : phi = create_phi_node (m_data[m_data_cnt + 1], edge_true->dest);
1038 444 : add_phi_arg (phi, c2, edge_true, UNKNOWN_LOCATION);
1039 444 : add_phi_arg (phi, c3, edge_false, UNKNOWN_LOCATION);
1040 : }
1041 : else
1042 : {
1043 : /* Constant with smaller number of bits. Trade conditional
1044 : code for .rodata space by extending from smaller number.
1045 : Version for loops with random access to the limbs or
1046 : downwards loops. */
1047 75 : min_prec = CEIL (min_prec, limb_prec) * limb_prec;
1048 75 : tree c;
1049 75 : if (min_prec <= (unsigned) limb_prec)
1050 21 : c = fold_convert (m_limb_type, op);
1051 : else
1052 : {
1053 54 : tree type = build_bitint_type (min_prec, 1);
1054 54 : c = tree_output_constant_def (fold_convert (type, op));
1055 : }
1056 75 : m_data[m_data_cnt] = c;
1057 75 : m_data[m_data_cnt + 1] = integer_type_node;
1058 : }
1059 5279 : t = m_data[m_data_cnt];
1060 : }
1061 : else
1062 5188 : t = m_data[m_data_cnt + 1];
1063 10467 : if (m_data[m_data_cnt + 1] == NULL_TREE)
1064 : {
1065 4746 : tree ridx = idx;
1066 4746 : unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
1067 4746 : tree c = m_data[m_data_cnt];
1068 4746 : unsigned int min_prec = TYPE_PRECISION (TREE_TYPE (c));
1069 4746 : if (bitint_big_endian
1070 0 : && CEIL (min_prec, limb_prec) != CEIL (prec, limb_prec))
1071 : {
1072 0 : ridx = make_ssa_name (sizetype);
1073 0 : g = gimple_build_assign (ridx, PLUS_EXPR, idx,
1074 0 : size_int (CEIL (min_prec, limb_prec)
1075 : - ((HOST_WIDE_INT)
1076 : CEIL (prec, limb_prec))));
1077 0 : insert_before (g);
1078 : }
1079 4746 : t = limb_access (TREE_TYPE (op), c, ridx, false);
1080 4746 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
1081 4746 : insert_before (g);
1082 4746 : t = gimple_assign_lhs (g);
1083 : }
1084 5721 : else if (m_data[m_data_cnt + 1] == integer_type_node)
1085 : {
1086 115 : unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
1087 115 : unsigned rem = prec % ((m_upwards_2limb ? 2 : 1) * limb_prec);
1088 115 : int ext = wi::neg_p (wi::to_wide (op)) ? -1 : 0;
1089 115 : tree c = m_data[m_data_cnt];
1090 115 : unsigned min_prec = TYPE_PRECISION (TREE_TYPE (c));
1091 115 : if (bitint_big_endian)
1092 0 : g = gimple_build_cond (GE_EXPR, idx,
1093 0 : size_int (CEIL (prec, limb_prec)
1094 : - min_prec / limb_prec),
1095 : NULL_TREE, NULL_TREE);
1096 : else
1097 115 : g = gimple_build_cond (LT_EXPR, idx,
1098 115 : size_int (min_prec / limb_prec),
1099 : NULL_TREE, NULL_TREE);
1100 115 : edge edge_true, edge_false;
1101 230 : if_then (g, (min_prec >= (prec - rem) / 2
1102 29 : ? profile_probability::likely ()
1103 86 : : profile_probability::unlikely ()),
1104 : edge_true, edge_false);
1105 115 : if (min_prec > (unsigned) limb_prec)
1106 : {
1107 70 : tree ridx = idx;
1108 70 : if (bitint_big_endian)
1109 : {
1110 0 : ridx = make_ssa_name (sizetype);
1111 0 : g = gimple_build_assign (ridx, PLUS_EXPR, idx,
1112 0 : size_int (min_prec / limb_prec
1113 : - ((HOST_WIDE_INT)
1114 : CEIL (prec,
1115 : limb_prec))));
1116 0 : insert_before (g);
1117 : }
1118 70 : c = limb_access (TREE_TYPE (op), c, ridx, false);
1119 70 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (c)), c);
1120 70 : insert_before (g);
1121 70 : c = gimple_assign_lhs (g);
1122 : }
1123 115 : tree c2 = build_int_cst (m_limb_type, ext);
1124 115 : m_gsi = gsi_after_labels (edge_true->dest);
1125 115 : t = make_ssa_name (m_limb_type);
1126 115 : gphi *phi = create_phi_node (t, edge_true->dest);
1127 115 : add_phi_arg (phi, c, edge_true, UNKNOWN_LOCATION);
1128 115 : add_phi_arg (phi, c2, edge_false, UNKNOWN_LOCATION);
1129 : }
1130 10467 : m_data_cnt += 2;
1131 10467 : return t;
1132 0 : default:
1133 0 : gcc_unreachable ();
1134 : }
1135 : }
1136 :
1137 : /* Helper method, add a PHI node with VAL from preheader edge if
1138 : inside of a loop and m_first. Keep state in a pair of m_data
1139 : elements. If VAL_OUT is non-NULL, use that as PHI argument from
1140 : the latch edge, otherwise create a new SSA_NAME for it and let
1141 : caller initialize it. */
1142 :
1143 : tree
1144 15111 : bitint_large_huge::prepare_data_in_out (tree val, tree idx, tree *data_out,
1145 : tree val_out)
1146 : {
1147 15111 : if (!m_first)
1148 : {
1149 9009 : *data_out = tree_fits_uhwi_p (idx) ? NULL_TREE : m_data[m_data_cnt + 1];
1150 9009 : return m_data[m_data_cnt];
1151 : }
1152 :
1153 6102 : *data_out = NULL_TREE;
1154 6102 : if (tree_fits_uhwi_p (idx))
1155 : {
1156 1986 : m_data.safe_push (val);
1157 1986 : m_data.safe_push (NULL_TREE);
1158 1986 : return val;
1159 : }
1160 :
1161 4116 : tree in = make_ssa_name (TREE_TYPE (val));
1162 4116 : gphi *phi = create_phi_node (in, m_bb);
1163 4116 : edge e1 = find_edge (m_preheader_bb, m_bb);
1164 4116 : edge e2 = EDGE_PRED (m_bb, 0);
1165 4116 : if (e1 == e2)
1166 4116 : e2 = EDGE_PRED (m_bb, 1);
1167 4116 : add_phi_arg (phi, val, e1, UNKNOWN_LOCATION);
1168 4116 : tree out = val_out ? val_out : make_ssa_name (TREE_TYPE (val));
1169 4116 : add_phi_arg (phi, out, e2, UNKNOWN_LOCATION);
1170 4116 : m_data.safe_push (in);
1171 4116 : m_data.safe_push (out);
1172 4116 : return in;
1173 : }
1174 :
1175 : /* Return VAL cast to TYPE. If VAL is INTEGER_CST, just
1176 : convert it without emitting any code, otherwise emit
1177 : the conversion statement before the current location. */
1178 :
1179 : tree
1180 36918 : bitint_large_huge::add_cast (tree type, tree val)
1181 : {
1182 36918 : if (TREE_CODE (val) == INTEGER_CST)
1183 4474 : return fold_convert (type, val);
1184 :
1185 32444 : tree lhs = make_ssa_name (type);
1186 32444 : gimple *g = gimple_build_assign (lhs, NOP_EXPR, val);
1187 32444 : insert_before (g);
1188 32444 : return lhs;
1189 : }
1190 :
1191 : /* Helper of handle_stmt method, handle PLUS_EXPR or MINUS_EXPR. */
1192 :
1193 : tree
1194 12774 : bitint_large_huge::handle_plus_minus (tree_code code, tree rhs1, tree rhs2,
1195 : tree idx)
1196 : {
1197 12774 : tree lhs, data_out, ctype;
1198 12774 : tree rhs1_type = TREE_TYPE (rhs1);
1199 12774 : gimple *g;
1200 12774 : tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
1201 : &data_out);
1202 :
1203 18288 : if (optab_handler (code == PLUS_EXPR ? uaddc5_optab : usubc5_optab,
1204 12774 : TYPE_MODE (m_limb_type)) != CODE_FOR_nothing)
1205 : {
1206 12774 : ctype = build_complex_type (m_limb_type);
1207 12774 : if (!types_compatible_p (rhs1_type, m_limb_type))
1208 : {
1209 974 : if (!TYPE_UNSIGNED (rhs1_type))
1210 : {
1211 297 : tree type = unsigned_type_for (rhs1_type);
1212 297 : rhs1 = add_cast (type, rhs1);
1213 297 : rhs2 = add_cast (type, rhs2);
1214 : }
1215 974 : rhs1 = add_cast (m_limb_type, rhs1);
1216 974 : rhs2 = add_cast (m_limb_type, rhs2);
1217 : }
1218 12774 : lhs = make_ssa_name (ctype);
1219 18288 : g = gimple_build_call_internal (code == PLUS_EXPR
1220 : ? IFN_UADDC : IFN_USUBC,
1221 : 3, rhs1, rhs2, data_in);
1222 12774 : gimple_call_set_lhs (g, lhs);
1223 12774 : insert_before (g);
1224 12774 : if (data_out == NULL_TREE)
1225 10725 : data_out = make_ssa_name (m_limb_type);
1226 12774 : g = gimple_build_assign (data_out, IMAGPART_EXPR,
1227 : build1 (IMAGPART_EXPR, m_limb_type, lhs));
1228 12774 : insert_before (g);
1229 : }
1230 0 : else if (types_compatible_p (rhs1_type, m_limb_type))
1231 : {
1232 0 : ctype = build_complex_type (m_limb_type);
1233 0 : lhs = make_ssa_name (ctype);
1234 0 : g = gimple_build_call_internal (code == PLUS_EXPR
1235 : ? IFN_ADD_OVERFLOW : IFN_SUB_OVERFLOW,
1236 : 2, rhs1, rhs2);
1237 0 : gimple_call_set_lhs (g, lhs);
1238 0 : insert_before (g);
1239 0 : if (data_out == NULL_TREE)
1240 0 : data_out = make_ssa_name (m_limb_type);
1241 0 : if (!integer_zerop (data_in))
1242 : {
1243 0 : rhs1 = make_ssa_name (m_limb_type);
1244 0 : g = gimple_build_assign (rhs1, REALPART_EXPR,
1245 : build1 (REALPART_EXPR, m_limb_type, lhs));
1246 0 : insert_before (g);
1247 0 : rhs2 = make_ssa_name (m_limb_type);
1248 0 : g = gimple_build_assign (rhs2, IMAGPART_EXPR,
1249 : build1 (IMAGPART_EXPR, m_limb_type, lhs));
1250 0 : insert_before (g);
1251 0 : lhs = make_ssa_name (ctype);
1252 0 : g = gimple_build_call_internal (code == PLUS_EXPR
1253 : ? IFN_ADD_OVERFLOW
1254 : : IFN_SUB_OVERFLOW,
1255 : 2, rhs1, data_in);
1256 0 : gimple_call_set_lhs (g, lhs);
1257 0 : insert_before (g);
1258 0 : data_in = make_ssa_name (m_limb_type);
1259 0 : g = gimple_build_assign (data_in, IMAGPART_EXPR,
1260 : build1 (IMAGPART_EXPR, m_limb_type, lhs));
1261 0 : insert_before (g);
1262 0 : g = gimple_build_assign (data_out, PLUS_EXPR, rhs2, data_in);
1263 0 : insert_before (g);
1264 : }
1265 : else
1266 : {
1267 0 : g = gimple_build_assign (data_out, IMAGPART_EXPR,
1268 : build1 (IMAGPART_EXPR, m_limb_type, lhs));
1269 0 : insert_before (g);
1270 : }
1271 : }
1272 : else
1273 : {
1274 0 : tree in = add_cast (rhs1_type, data_in);
1275 0 : lhs = make_ssa_name (rhs1_type);
1276 0 : g = gimple_build_assign (lhs, code, rhs1, rhs2);
1277 0 : insert_before (g);
1278 0 : rhs1 = make_ssa_name (rhs1_type);
1279 0 : g = gimple_build_assign (rhs1, code, lhs, in);
1280 0 : insert_before (g);
1281 0 : m_data[m_data_cnt] = NULL_TREE;
1282 0 : m_data_cnt += 2;
1283 0 : return rhs1;
1284 : }
1285 12774 : rhs1 = make_ssa_name (m_limb_type);
1286 12774 : g = gimple_build_assign (rhs1, REALPART_EXPR,
1287 : build1 (REALPART_EXPR, m_limb_type, lhs));
1288 12774 : insert_before (g);
1289 12774 : if (!types_compatible_p (rhs1_type, m_limb_type))
1290 974 : rhs1 = add_cast (rhs1_type, rhs1);
1291 12774 : m_data[m_data_cnt] = data_out;
1292 12774 : m_data_cnt += 2;
1293 12774 : return rhs1;
1294 : }
1295 :
1296 : /* Helper function for handle_stmt method, handle LSHIFT_EXPR by
1297 : count in [0, limb_prec - 1] range. */
1298 :
1299 : tree
1300 140 : bitint_large_huge::handle_lshift (tree rhs1, tree rhs2, tree idx)
1301 : {
1302 140 : unsigned HOST_WIDE_INT cnt = tree_to_uhwi (rhs2);
1303 140 : gcc_checking_assert (cnt < (unsigned) limb_prec);
1304 140 : if (cnt == 0)
1305 : return rhs1;
1306 :
1307 140 : tree lhs, data_out, rhs1_type = TREE_TYPE (rhs1);
1308 140 : gimple *g;
1309 140 : tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
1310 : &data_out);
1311 :
1312 140 : if (!integer_zerop (data_in))
1313 : {
1314 124 : lhs = make_ssa_name (m_limb_type);
1315 124 : g = gimple_build_assign (lhs, RSHIFT_EXPR, data_in,
1316 : build_int_cst (unsigned_type_node,
1317 124 : limb_prec - cnt));
1318 124 : insert_before (g);
1319 124 : if (!types_compatible_p (rhs1_type, m_limb_type))
1320 31 : lhs = add_cast (rhs1_type, lhs);
1321 : data_in = lhs;
1322 : }
1323 140 : if (types_compatible_p (rhs1_type, m_limb_type))
1324 : {
1325 109 : if (data_out == NULL_TREE)
1326 78 : data_out = make_ssa_name (m_limb_type);
1327 109 : g = gimple_build_assign (data_out, rhs1);
1328 109 : insert_before (g);
1329 : }
1330 140 : if (cnt < (unsigned) TYPE_PRECISION (rhs1_type))
1331 : {
1332 129 : lhs = make_ssa_name (rhs1_type);
1333 129 : g = gimple_build_assign (lhs, LSHIFT_EXPR, rhs1, rhs2);
1334 129 : insert_before (g);
1335 129 : if (!integer_zerop (data_in))
1336 : {
1337 113 : rhs1 = lhs;
1338 113 : lhs = make_ssa_name (rhs1_type);
1339 113 : g = gimple_build_assign (lhs, BIT_IOR_EXPR, rhs1, data_in);
1340 113 : insert_before (g);
1341 : }
1342 : }
1343 : else
1344 : lhs = data_in;
1345 140 : m_data[m_data_cnt] = data_out;
1346 140 : m_data_cnt += 2;
1347 140 : return lhs;
1348 : }
1349 :
1350 : /* Helper function for handle_stmt method, handle an integral
1351 : to integral conversion. */
1352 :
1353 : tree
1354 7359 : bitint_large_huge::handle_cast (tree lhs_type, tree rhs1, tree idx)
1355 : {
1356 7359 : tree rhs_type = TREE_TYPE (rhs1);
1357 7359 : gimple *g;
1358 7359 : if ((TREE_CODE (rhs1) == SSA_NAME || TREE_CODE (rhs1) == INTEGER_CST)
1359 7359 : && TREE_CODE (lhs_type) == BITINT_TYPE
1360 7359 : && TREE_CODE (rhs_type) == BITINT_TYPE
1361 6370 : && bitint_precision_kind (lhs_type) >= bitint_prec_large
1362 13729 : && bitint_precision_kind (rhs_type) >= bitint_prec_large)
1363 : {
1364 5781 : if (TYPE_PRECISION (rhs_type) >= TYPE_PRECISION (lhs_type)
1365 : /* If lhs has bigger precision than rhs, we can use
1366 : the simple case only if there is a guarantee that
1367 : the most significant limb is handled in straight
1368 : line code. If m_var_msb (on left shifts) or
1369 : if m_upwards_2limb * limb_prec is equal to
1370 : lhs precision or if not m_upwards_2limb and lhs_type
1371 : has precision which is multiple of limb_prec that is
1372 : not the case. */
1373 5781 : || (!m_var_msb
1374 1433 : && (CEIL (TYPE_PRECISION (lhs_type), limb_prec)
1375 1433 : == CEIL (TYPE_PRECISION (rhs_type), limb_prec))
1376 346 : && ((!m_upwards_2limb
1377 182 : && (TYPE_PRECISION (lhs_type) % limb_prec != 0))
1378 243 : || (m_upwards_2limb
1379 328 : && (m_upwards_2limb * limb_prec
1380 164 : < TYPE_PRECISION (lhs_type))))))
1381 : {
1382 4589 : tree ridx = idx;
1383 4589 : if (bitint_big_endian
1384 4589 : && (CEIL (TYPE_PRECISION (lhs_type), limb_prec)
1385 0 : != CEIL (TYPE_PRECISION (rhs_type), limb_prec)))
1386 : {
1387 0 : HOST_WIDE_INT diff = CEIL (TYPE_PRECISION (rhs_type), limb_prec);
1388 0 : diff -= CEIL (TYPE_PRECISION (lhs_type), limb_prec);
1389 0 : if (tree_fits_uhwi_p (idx))
1390 0 : ridx = size_int (tree_to_uhwi (idx) + diff);
1391 : else
1392 : {
1393 0 : tree t = make_ssa_name (sizetype);
1394 0 : g = gimple_build_assign (t, PLUS_EXPR, idx, size_int (diff));
1395 0 : insert_before (g);
1396 0 : ridx = t;
1397 : }
1398 : }
1399 4589 : rhs1 = handle_operand (rhs1, ridx);
1400 4589 : if (tree_fits_uhwi_p (idx))
1401 : {
1402 2356 : tree type = limb_access_type (lhs_type, idx);
1403 2356 : if (!types_compatible_p (type, TREE_TYPE (rhs1)))
1404 1225 : rhs1 = add_cast (type, rhs1);
1405 : }
1406 4589 : return rhs1;
1407 : }
1408 1192 : tree t;
1409 : /* Indexes lower than this don't need any special processing. */
1410 1192 : unsigned low = ((unsigned) TYPE_PRECISION (rhs_type)
1411 1192 : - !TYPE_UNSIGNED (rhs_type)) / limb_prec;
1412 : /* Indexes >= than this always contain an extension. */
1413 1192 : unsigned high = CEIL ((unsigned) TYPE_PRECISION (rhs_type), limb_prec);
1414 1192 : unsigned lcnt = CEIL ((unsigned) TYPE_PRECISION (lhs_type), limb_prec);
1415 1192 : unsigned lowe = bitint_big_endian ? lcnt - 1 - low : low;
1416 1192 : bool save_first = m_first;
1417 1192 : if (m_first)
1418 : {
1419 389 : m_data.safe_push (NULL_TREE);
1420 389 : m_data.safe_push (NULL_TREE);
1421 389 : m_data.safe_push (NULL_TREE);
1422 389 : if (TYPE_UNSIGNED (rhs_type))
1423 : /* No need to keep state between iterations. */
1424 : ;
1425 184 : else if (m_upwards && !m_upwards_2limb)
1426 : /* We need to keep state between iterations, but
1427 : not within any loop, everything is straight line
1428 : code with only increasing indexes. */
1429 : ;
1430 144 : else if (!m_upwards_2limb)
1431 : {
1432 3 : unsigned save_data_cnt = m_data_cnt;
1433 3 : gimple_stmt_iterator save_gsi = m_gsi;
1434 3 : m_gsi = m_init_gsi;
1435 3 : if (gsi_end_p (m_gsi))
1436 0 : m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1437 : else
1438 3 : gsi_next (&m_gsi);
1439 3 : m_data_cnt = save_data_cnt + 3;
1440 3 : t = handle_operand (rhs1, size_int (bitint_big_endian
1441 : ? high - 1 - low : low));
1442 3 : m_first = false;
1443 3 : m_data[save_data_cnt + 2]
1444 3 : = build_int_cst (NULL_TREE, m_data_cnt);
1445 3 : m_data_cnt = save_data_cnt;
1446 3 : t = add_cast (signed_type_for (m_limb_type), t);
1447 3 : tree lpm1 = build_int_cst (unsigned_type_node, limb_prec - 1);
1448 3 : tree n = make_ssa_name (TREE_TYPE (t));
1449 3 : g = gimple_build_assign (n, RSHIFT_EXPR, t, lpm1);
1450 3 : insert_before (g);
1451 3 : m_data[save_data_cnt + 1] = add_cast (m_limb_type, n);
1452 3 : m_init_gsi = m_gsi;
1453 3 : if (gsi_end_p (m_init_gsi))
1454 0 : m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1455 : else
1456 3 : gsi_prev (&m_init_gsi);
1457 3 : m_gsi = save_gsi;
1458 : }
1459 141 : else if (m_upwards_2limb * limb_prec < TYPE_PRECISION (rhs_type))
1460 : /* We need to keep state between iterations, but
1461 : fortunately not within the loop, only afterwards. */
1462 : ;
1463 : else
1464 : {
1465 137 : tree out;
1466 137 : m_data.truncate (m_data_cnt);
1467 137 : prepare_data_in_out (build_zero_cst (m_limb_type), idx, &out);
1468 137 : m_data.safe_push (NULL_TREE);
1469 : }
1470 : }
1471 :
1472 1192 : unsigned save_data_cnt = m_data_cnt;
1473 1192 : m_data_cnt += 3;
1474 1192 : if (!tree_fits_uhwi_p (idx))
1475 : {
1476 638 : if (m_upwards_2limb
1477 618 : && low >= m_upwards_2limb - m_first)
1478 : {
1479 158 : if (bitint_big_endian
1480 158 : && (CEIL (TYPE_PRECISION (lhs_type), limb_prec)
1481 0 : != CEIL (TYPE_PRECISION (rhs_type), limb_prec)))
1482 : {
1483 0 : HOST_WIDE_INT diff
1484 0 : = CEIL (TYPE_PRECISION (rhs_type), limb_prec);
1485 0 : diff -= CEIL (TYPE_PRECISION (lhs_type), limb_prec);
1486 0 : tree t = make_ssa_name (sizetype);
1487 0 : g = gimple_build_assign (t, PLUS_EXPR, idx, size_int (diff));
1488 0 : insert_before (g);
1489 0 : idx = t;
1490 : }
1491 158 : rhs1 = handle_operand (rhs1, idx);
1492 158 : if (m_first)
1493 131 : m_data[save_data_cnt + 2]
1494 262 : = build_int_cst (NULL_TREE, m_data_cnt);
1495 158 : m_first = save_first;
1496 158 : return rhs1;
1497 : }
1498 1209 : bool single_comparison
1499 480 : = low == high || (m_upwards_2limb && (low & 1) == m_first);
1500 249 : tree idxc = idx;
1501 249 : if (!single_comparison
1502 249 : && m_upwards_2limb
1503 229 : && !m_first
1504 112 : && low + 1 == m_upwards_2limb)
1505 : /* In this case we know that idx <= low always,
1506 : so effectively we just needs a single comparison,
1507 : idx < low or idx == low, but we'd need to emit different
1508 : code for the 2 branches than single_comparison normally
1509 : emits. So, instead of special-casing that, emit a
1510 : low <= low comparison which cfg cleanup will clean up
1511 : at the end of the pass. */
1512 89 : idxc = size_int (lowe);
1513 480 : if (bitint_big_endian)
1514 0 : g = gimple_build_cond (single_comparison ? GT_EXPR : GE_EXPR,
1515 0 : idxc, size_int (lowe),
1516 : NULL_TREE, NULL_TREE);
1517 : else
1518 729 : g = gimple_build_cond (single_comparison ? LT_EXPR : LE_EXPR,
1519 480 : idxc, size_int (low), NULL_TREE, NULL_TREE);
1520 480 : edge edge_true_true, edge_true_false, edge_false;
1521 729 : if_then_if_then_else (g, (single_comparison ? NULL
1522 249 : : gimple_build_cond (EQ_EXPR, idx,
1523 249 : size_int (lowe),
1524 : NULL_TREE,
1525 : NULL_TREE)),
1526 : profile_probability::likely (),
1527 : profile_probability::unlikely (),
1528 : edge_true_true, edge_true_false, edge_false);
1529 480 : bool save_cast_conditional = m_cast_conditional;
1530 480 : m_cast_conditional = true;
1531 480 : m_bitfld_load = 0;
1532 480 : tree t1 = idx, t2 = NULL_TREE;
1533 480 : if (bitint_big_endian
1534 480 : && (CEIL (TYPE_PRECISION (lhs_type), limb_prec)
1535 0 : != CEIL (TYPE_PRECISION (rhs_type), limb_prec)))
1536 : {
1537 0 : HOST_WIDE_INT diff = CEIL (TYPE_PRECISION (rhs_type), limb_prec);
1538 0 : diff -= CEIL (TYPE_PRECISION (lhs_type), limb_prec);
1539 0 : t1 = make_ssa_name (sizetype);
1540 0 : g = gimple_build_assign (t1, PLUS_EXPR, idx, size_int (diff));
1541 0 : insert_before (g);
1542 : }
1543 480 : t1 = handle_operand (rhs1, t1);
1544 480 : if (m_first)
1545 183 : m_data[save_data_cnt + 2]
1546 366 : = build_int_cst (NULL_TREE, m_data_cnt);
1547 480 : tree ext = NULL_TREE;
1548 480 : tree bitfld = NULL_TREE;
1549 480 : if (!single_comparison)
1550 : {
1551 249 : m_gsi = gsi_after_labels (edge_true_true->src);
1552 249 : m_first = false;
1553 249 : m_data_cnt = save_data_cnt + 3;
1554 249 : if (m_bitfld_load)
1555 : {
1556 4 : bitfld = m_data[m_bitfld_load];
1557 4 : m_data[m_bitfld_load] = m_data[m_bitfld_load + 2];
1558 4 : m_bitfld_load = 0;
1559 : }
1560 249 : t2 = handle_operand (rhs1, size_int (bitint_big_endian
1561 : ? high - 1 - low : low));
1562 249 : if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t2)))
1563 204 : t2 = add_cast (m_limb_type, t2);
1564 249 : if (!TYPE_UNSIGNED (rhs_type) && m_upwards_2limb)
1565 : {
1566 137 : ext = add_cast (signed_type_for (m_limb_type), t2);
1567 274 : tree lpm1 = build_int_cst (unsigned_type_node,
1568 137 : limb_prec - 1);
1569 137 : tree n = make_ssa_name (TREE_TYPE (ext));
1570 137 : g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
1571 137 : insert_before (g);
1572 137 : ext = add_cast (m_limb_type, n);
1573 : }
1574 : }
1575 480 : tree t3;
1576 480 : if (TYPE_UNSIGNED (rhs_type))
1577 246 : t3 = build_zero_cst (m_limb_type);
1578 234 : else if (m_upwards_2limb && (save_first || ext != NULL_TREE))
1579 159 : t3 = m_data[save_data_cnt];
1580 : else
1581 75 : t3 = m_data[save_data_cnt + 1];
1582 480 : m_gsi = gsi_after_labels (edge_true_false->dest);
1583 480 : t = make_ssa_name (m_limb_type);
1584 480 : gphi *phi = create_phi_node (t, edge_true_false->dest);
1585 480 : add_phi_arg (phi, t1, edge_true_false, UNKNOWN_LOCATION);
1586 480 : add_phi_arg (phi, t3, edge_false, UNKNOWN_LOCATION);
1587 480 : if (edge_true_true)
1588 249 : add_phi_arg (phi, t2, edge_true_true, UNKNOWN_LOCATION);
1589 480 : if (ext)
1590 : {
1591 137 : tree t4 = make_ssa_name (m_limb_type);
1592 137 : phi = create_phi_node (t4, edge_true_false->dest);
1593 137 : add_phi_arg (phi, build_zero_cst (m_limb_type), edge_true_false,
1594 : UNKNOWN_LOCATION);
1595 137 : add_phi_arg (phi, m_data[save_data_cnt], edge_false,
1596 : UNKNOWN_LOCATION);
1597 137 : add_phi_arg (phi, ext, edge_true_true, UNKNOWN_LOCATION);
1598 137 : if (!save_cast_conditional)
1599 : {
1600 127 : g = gimple_build_assign (m_data[save_data_cnt + 1], t4);
1601 127 : insert_before (g);
1602 : }
1603 : else
1604 10 : for (basic_block bb = gsi_bb (m_gsi);;)
1605 : {
1606 10 : edge e1 = single_succ_edge (bb);
1607 10 : edge e2 = find_edge (e1->dest, m_bb), e3;
1608 10 : tree t5 = (e2 ? m_data[save_data_cnt + 1]
1609 10 : : make_ssa_name (m_limb_type));
1610 10 : phi = create_phi_node (t5, e1->dest);
1611 10 : edge_iterator ei;
1612 30 : FOR_EACH_EDGE (e3, ei, e1->dest->preds)
1613 30 : add_phi_arg (phi, (e3 == e1 ? t4
1614 10 : : build_zero_cst (m_limb_type)),
1615 : e3, UNKNOWN_LOCATION);
1616 10 : if (e2)
1617 : break;
1618 0 : t4 = t5;
1619 0 : bb = e1->dest;
1620 0 : }
1621 : }
1622 480 : if (m_bitfld_load)
1623 : {
1624 8 : tree t4;
1625 8 : if (!save_first && !save_cast_conditional)
1626 2 : t4 = m_data[m_bitfld_load + 1];
1627 : else
1628 6 : t4 = make_ssa_name (m_limb_type);
1629 8 : phi = create_phi_node (t4, edge_true_false->dest);
1630 12 : add_phi_arg (phi,
1631 4 : edge_true_true ? bitfld : m_data[m_bitfld_load],
1632 : edge_true_false, UNKNOWN_LOCATION);
1633 8 : add_phi_arg (phi, m_data[m_bitfld_load + 2],
1634 : edge_false, UNKNOWN_LOCATION);
1635 8 : if (edge_true_true)
1636 4 : add_phi_arg (phi, m_data[m_bitfld_load], edge_true_true,
1637 : UNKNOWN_LOCATION);
1638 8 : if (save_cast_conditional)
1639 4 : for (basic_block bb = gsi_bb (m_gsi);;)
1640 : {
1641 4 : edge e1 = single_succ_edge (bb);
1642 4 : edge e2 = find_edge (e1->dest, m_bb), e3;
1643 4 : tree t5 = ((e2 && !save_first) ? m_data[m_bitfld_load + 1]
1644 4 : : make_ssa_name (m_limb_type));
1645 4 : phi = create_phi_node (t5, e1->dest);
1646 4 : edge_iterator ei;
1647 14 : FOR_EACH_EDGE (e3, ei, e1->dest->preds)
1648 16 : add_phi_arg (phi, (e3 == e1 ? t4
1649 6 : : build_zero_cst (m_limb_type)),
1650 : e3, UNKNOWN_LOCATION);
1651 4 : t4 = t5;
1652 4 : if (e2)
1653 : break;
1654 0 : bb = e1->dest;
1655 0 : }
1656 8 : m_data[m_bitfld_load] = t4;
1657 8 : m_data[m_bitfld_load + 2] = t4;
1658 8 : m_bitfld_load = 0;
1659 : }
1660 480 : m_cast_conditional = save_cast_conditional;
1661 480 : m_first = save_first;
1662 480 : return t;
1663 : }
1664 : else
1665 : {
1666 554 : unsigned tidx = tree_to_uhwi (idx);
1667 554 : if (bitint_big_endian)
1668 0 : tidx = lcnt - 1 - tidx;
1669 554 : if (tidx < low)
1670 : {
1671 152 : t = handle_operand (rhs1, (bitint_big_endian
1672 0 : ? size_int (high - 1 - tidx) : idx));
1673 152 : if (m_first)
1674 71 : m_data[save_data_cnt + 2]
1675 142 : = build_int_cst (NULL_TREE, m_data_cnt);
1676 : }
1677 402 : else if (tidx < high)
1678 : {
1679 68 : t = handle_operand (rhs1, size_int (bitint_big_endian
1680 : ? high - 1 - low : low));
1681 68 : if (m_first)
1682 1 : m_data[save_data_cnt + 2]
1683 2 : = build_int_cst (NULL_TREE, m_data_cnt);
1684 68 : if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t)))
1685 60 : t = add_cast (m_limb_type, t);
1686 68 : tree ext = NULL_TREE;
1687 68 : if (!TYPE_UNSIGNED (rhs_type) && m_upwards)
1688 : {
1689 44 : ext = add_cast (signed_type_for (m_limb_type), t);
1690 88 : tree lpm1 = build_int_cst (unsigned_type_node,
1691 44 : limb_prec - 1);
1692 44 : tree n = make_ssa_name (TREE_TYPE (ext));
1693 44 : g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
1694 44 : insert_before (g);
1695 44 : ext = add_cast (m_limb_type, n);
1696 44 : m_data[save_data_cnt + 1] = ext;
1697 : }
1698 : }
1699 : else
1700 : {
1701 334 : if (TYPE_UNSIGNED (rhs_type) && m_first)
1702 : {
1703 0 : handle_operand (rhs1, (bitint_big_endian
1704 0 : ? size_int (high - 1)
1705 : : size_zero_node));
1706 0 : m_data[save_data_cnt + 2]
1707 0 : = build_int_cst (NULL_TREE, m_data_cnt);
1708 : }
1709 : else
1710 334 : m_data_cnt = tree_to_uhwi (m_data[save_data_cnt + 2]);
1711 334 : if (TYPE_UNSIGNED (rhs_type))
1712 176 : t = build_zero_cst (m_limb_type);
1713 158 : else if (m_bb
1714 16 : && m_data[save_data_cnt]
1715 171 : && ((tidx & 1) == 0 || tidx != low + 1))
1716 : t = m_data[save_data_cnt];
1717 : else
1718 152 : t = m_data[save_data_cnt + 1];
1719 : }
1720 554 : tree type = limb_access_type (lhs_type, idx);
1721 554 : if (!useless_type_conversion_p (type, m_limb_type))
1722 275 : t = add_cast (type, t);
1723 554 : m_first = save_first;
1724 554 : return t;
1725 : }
1726 : }
1727 1578 : else if (TREE_CODE (lhs_type) == BITINT_TYPE
1728 1578 : && bitint_precision_kind (lhs_type) >= bitint_prec_large
1729 3156 : && INTEGRAL_TYPE_P (rhs_type))
1730 : {
1731 : /* Add support for 3 or more limbs filled in from normal integral
1732 : type if this assert fails. If no target chooses limb mode smaller
1733 : than half of largest supported normal integral type, this will not
1734 : be needed. */
1735 1578 : gcc_assert (TYPE_PRECISION (rhs_type) <= 2 * limb_prec);
1736 1578 : tree r1 = NULL_TREE, r2 = NULL_TREE, rext = NULL_TREE;
1737 1578 : if (m_first)
1738 : {
1739 576 : gimple_stmt_iterator save_gsi = m_gsi;
1740 576 : m_gsi = m_init_gsi;
1741 576 : if (gsi_end_p (m_gsi))
1742 56 : m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1743 : else
1744 520 : gsi_next (&m_gsi);
1745 576 : if (TREE_CODE (rhs_type) == BITINT_TYPE
1746 576 : && bitint_precision_kind (rhs_type) == bitint_prec_middle)
1747 : {
1748 63 : tree type = NULL_TREE;
1749 63 : rhs1 = maybe_cast_middle_bitint (&m_gsi, rhs1, type);
1750 63 : rhs_type = TREE_TYPE (rhs1);
1751 : }
1752 576 : r1 = rhs1;
1753 576 : if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
1754 508 : r1 = add_cast (m_limb_type, rhs1);
1755 576 : if (TYPE_PRECISION (rhs_type) > limb_prec)
1756 : {
1757 109 : g = gimple_build_assign (make_ssa_name (rhs_type),
1758 : RSHIFT_EXPR, rhs1,
1759 : build_int_cst (unsigned_type_node,
1760 109 : limb_prec));
1761 109 : insert_before (g);
1762 109 : r2 = add_cast (m_limb_type, gimple_assign_lhs (g));
1763 : }
1764 576 : if (TYPE_UNSIGNED (rhs_type))
1765 279 : rext = build_zero_cst (m_limb_type);
1766 : else
1767 : {
1768 297 : rext = add_cast (signed_type_for (m_limb_type), r2 ? r2 : r1);
1769 297 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (rext)),
1770 : RSHIFT_EXPR, rext,
1771 : build_int_cst (unsigned_type_node,
1772 297 : limb_prec - 1));
1773 297 : insert_before (g);
1774 297 : rext = add_cast (m_limb_type, gimple_assign_lhs (g));
1775 : }
1776 576 : m_init_gsi = m_gsi;
1777 576 : if (gsi_end_p (m_init_gsi))
1778 562 : m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1779 : else
1780 295 : gsi_prev (&m_init_gsi);
1781 576 : m_gsi = save_gsi;
1782 : }
1783 1578 : tree t;
1784 1578 : if (m_upwards_2limb)
1785 : {
1786 722 : if (m_first)
1787 : {
1788 279 : tree out1, out2;
1789 279 : prepare_data_in_out (r1, idx, &out1, rext);
1790 279 : if (TYPE_PRECISION (rhs_type) > limb_prec)
1791 : {
1792 70 : prepare_data_in_out (r2, idx, &out2, rext);
1793 70 : m_data.pop ();
1794 70 : t = m_data.pop ();
1795 70 : m_data[m_data_cnt + 1] = t;
1796 : }
1797 : else
1798 209 : m_data[m_data_cnt + 1] = rext;
1799 279 : m_data.safe_push (rext);
1800 279 : t = m_data[m_data_cnt];
1801 : }
1802 443 : else if (!tree_fits_uhwi_p (idx))
1803 279 : t = m_data[m_data_cnt + 1];
1804 : else
1805 : {
1806 164 : tree type = limb_access_type (lhs_type, idx);
1807 164 : t = m_data[m_data_cnt + 2];
1808 164 : if (!useless_type_conversion_p (type, m_limb_type))
1809 136 : t = add_cast (type, t);
1810 : }
1811 722 : m_data_cnt += 3;
1812 722 : return t;
1813 : }
1814 856 : else if (m_first)
1815 : {
1816 297 : m_data.safe_push (r1);
1817 297 : m_data.safe_push (r2);
1818 297 : m_data.safe_push (rext);
1819 : }
1820 856 : unsigned lcnt = CEIL ((unsigned) TYPE_PRECISION (lhs_type), limb_prec);
1821 856 : if (tree_fits_uhwi_p (idx))
1822 : {
1823 812 : tree type = limb_access_type (lhs_type, idx);
1824 812 : if (bitint_big_endian
1825 812 : ? tree_to_uhwi (idx) == lcnt - 1 : integer_zerop (idx))
1826 269 : t = m_data[m_data_cnt];
1827 543 : else if (TYPE_PRECISION (rhs_type) > limb_prec
1828 543 : && (bitint_big_endian
1829 72 : ? tree_to_uhwi (idx) == lcnt - 2
1830 72 : : integer_onep (idx)))
1831 33 : t = m_data[m_data_cnt + 1];
1832 : else
1833 510 : t = m_data[m_data_cnt + 2];
1834 812 : if (!useless_type_conversion_p (type, m_limb_type))
1835 250 : t = add_cast (type, t);
1836 812 : m_data_cnt += 3;
1837 812 : return t;
1838 : }
1839 44 : g = gimple_build_cond (NE_EXPR, idx,
1840 : bitint_big_endian
1841 0 : ? size_int (lcnt - 1) : size_zero_node,
1842 : NULL_TREE, NULL_TREE);
1843 44 : edge e2, e3, e4 = NULL;
1844 44 : if_then (g, profile_probability::likely (), e2, e3);
1845 44 : if (m_data[m_data_cnt + 1])
1846 : {
1847 14 : g = gimple_build_cond (EQ_EXPR, idx,
1848 : bitint_big_endian
1849 0 : ? size_int (lcnt - 2) : size_one_node,
1850 : NULL_TREE, NULL_TREE);
1851 14 : insert_before (g);
1852 14 : edge e5 = split_block (gsi_bb (m_gsi), g);
1853 14 : e4 = make_edge (e5->src, e2->dest, EDGE_TRUE_VALUE);
1854 14 : e2 = find_edge (e5->dest, e2->dest);
1855 14 : e4->probability = profile_probability::unlikely ();
1856 14 : e5->flags = EDGE_FALSE_VALUE;
1857 14 : e5->probability = e4->probability.invert ();
1858 : }
1859 44 : m_gsi = gsi_after_labels (e2->dest);
1860 44 : t = make_ssa_name (m_limb_type);
1861 44 : gphi *phi = create_phi_node (t, e2->dest);
1862 44 : add_phi_arg (phi, m_data[m_data_cnt + 2], e2, UNKNOWN_LOCATION);
1863 44 : add_phi_arg (phi, m_data[m_data_cnt], e3, UNKNOWN_LOCATION);
1864 44 : if (e4)
1865 14 : add_phi_arg (phi, m_data[m_data_cnt + 1], e4, UNKNOWN_LOCATION);
1866 44 : m_data_cnt += 3;
1867 44 : return t;
1868 : }
1869 : return NULL_TREE;
1870 : }
1871 :
1872 : /* Helper function for handle_stmt method, handle a BIT_FIELD_REF. */
1873 :
1874 : tree
1875 31 : bitint_large_huge::handle_bit_field_ref (tree op, tree idx)
1876 : {
1877 31 : if (tree_fits_uhwi_p (idx))
1878 : {
1879 21 : if (m_first)
1880 6 : m_data.safe_push (NULL);
1881 21 : ++m_data_cnt;
1882 21 : unsigned HOST_WIDE_INT sz = tree_to_uhwi (TYPE_SIZE (m_limb_type));
1883 21 : unsigned i = tree_to_uhwi (idx);
1884 21 : if (bitint_big_endian)
1885 0 : i = CEIL (TYPE_PRECISION (TREE_TYPE (op)), limb_prec) - 1 - i;
1886 42 : tree bfr = build3 (BIT_FIELD_REF, m_limb_type,
1887 21 : TREE_OPERAND (op, 0),
1888 21 : TYPE_SIZE (m_limb_type),
1889 21 : size_binop (PLUS_EXPR, TREE_OPERAND (op, 2),
1890 : bitsize_int (i * sz)));
1891 21 : tree r = make_ssa_name (m_limb_type);
1892 21 : gimple *g = gimple_build_assign (r, bfr);
1893 21 : insert_before (g);
1894 21 : tree type = limb_access_type (TREE_TYPE (op), idx);
1895 21 : if (!useless_type_conversion_p (type, m_limb_type))
1896 0 : r = add_cast (type, r);
1897 21 : return r;
1898 : }
1899 10 : tree var;
1900 10 : if (m_first)
1901 : {
1902 5 : unsigned HOST_WIDE_INT sz = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (op)));
1903 5 : machine_mode mode;
1904 5 : tree type, bfr;
1905 5 : if (bitwise_mode_for_size (sz).exists (&mode)
1906 2 : && known_eq (GET_MODE_BITSIZE (mode), sz))
1907 1 : type = bitwise_type_for_mode (mode);
1908 : else
1909 : {
1910 4 : mode = VOIDmode;
1911 4 : type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (op, 0)));
1912 : }
1913 5 : if (TYPE_ALIGN (type) < TYPE_ALIGN (TREE_TYPE (op)))
1914 0 : type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op)));
1915 5 : var = create_tmp_var (type);
1916 5 : TREE_ADDRESSABLE (var) = 1;
1917 5 : gimple *g;
1918 5 : if (mode != VOIDmode)
1919 : {
1920 1 : bfr = build3 (BIT_FIELD_REF, type, TREE_OPERAND (op, 0),
1921 1 : TYPE_SIZE (type), TREE_OPERAND (op, 2));
1922 1 : g = gimple_build_assign (make_ssa_name (type),
1923 : BIT_FIELD_REF, bfr);
1924 1 : gimple_set_location (g, m_loc);
1925 1 : gsi_insert_after (&m_init_gsi, g, GSI_NEW_STMT);
1926 1 : bfr = gimple_assign_lhs (g);
1927 : }
1928 : else
1929 4 : bfr = TREE_OPERAND (op, 0);
1930 5 : g = gimple_build_assign (var, bfr);
1931 5 : gimple_set_location (g, m_loc);
1932 5 : gsi_insert_after (&m_init_gsi, g, GSI_NEW_STMT);
1933 5 : if (mode == VOIDmode)
1934 : {
1935 4 : unsigned HOST_WIDE_INT nelts
1936 4 : = CEIL (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (op))), limb_prec);
1937 4 : tree atype = build_array_type_nelts (m_limb_type, nelts);
1938 4 : var = build2 (MEM_REF, atype, build_fold_addr_expr (var),
1939 : build_int_cst (build_pointer_type (type),
1940 4 : tree_to_uhwi (TREE_OPERAND (op, 2))
1941 4 : / BITS_PER_UNIT));
1942 : }
1943 5 : m_data.safe_push (var);
1944 : }
1945 : else
1946 5 : var = unshare_expr (m_data[m_data_cnt]);
1947 10 : ++m_data_cnt;
1948 10 : var = limb_access (TREE_TYPE (op), var, idx, false);
1949 10 : tree r = make_ssa_name (m_limb_type);
1950 10 : gimple *g = gimple_build_assign (r, var);
1951 10 : insert_before (g);
1952 10 : return r;
1953 : }
1954 :
1955 : /* Add a new EH edge from SRC to EH_EDGE->dest, where EH_EDGE
1956 : is an older EH edge, and except for virtual PHIs duplicate the
1957 : PHI argument from the EH_EDGE to the new EH edge. */
1958 :
1959 : static void
1960 20 : add_eh_edge (basic_block src, edge eh_edge)
1961 : {
1962 20 : edge e = make_edge (src, eh_edge->dest, EDGE_EH);
1963 20 : e->probability = profile_probability::very_unlikely ();
1964 20 : for (gphi_iterator gsi = gsi_start_phis (eh_edge->dest);
1965 27 : !gsi_end_p (gsi); gsi_next (&gsi))
1966 : {
1967 7 : gphi *phi = gsi.phi ();
1968 7 : tree lhs = gimple_phi_result (phi);
1969 14 : if (virtual_operand_p (lhs))
1970 4 : continue;
1971 3 : const phi_arg_d *arg = gimple_phi_arg (phi, eh_edge->dest_idx);
1972 3 : add_phi_arg (phi, arg->def, e, arg->locus);
1973 : }
1974 20 : }
1975 :
1976 : /* Helper function for handle_stmt method, handle a load from memory. */
1977 :
1978 : tree
1979 21261 : bitint_large_huge::handle_load (gimple *stmt, tree idx)
1980 : {
1981 21261 : tree rhs1 = gimple_assign_rhs1 (stmt);
1982 21261 : tree rhs_type = TREE_TYPE (rhs1);
1983 21261 : bool eh = stmt_ends_bb_p (stmt);
1984 21261 : bool load_bitfield_p = false;
1985 21261 : edge eh_edge = NULL;
1986 21261 : gimple *g;
1987 :
1988 21261 : if (eh)
1989 : {
1990 10 : edge_iterator ei;
1991 10 : basic_block bb = gimple_bb (stmt);
1992 :
1993 10 : FOR_EACH_EDGE (eh_edge, ei, bb->succs)
1994 10 : if (eh_edge->flags & EDGE_EH)
1995 : break;
1996 : }
1997 :
1998 21261 : if (TREE_CODE (rhs1) == COMPONENT_REF
1999 21261 : && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
2000 : {
2001 1213 : tree fld = TREE_OPERAND (rhs1, 1);
2002 : /* For little-endian, we can allow as inputs bit-fields
2003 : which start at a limb boundary. */
2004 1213 : gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
2005 1213 : if (!bitint_big_endian
2006 1213 : && DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
2007 2426 : && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % limb_prec) == 0)
2008 : {
2009 739 : load_bitfield_p = true;
2010 751 : goto normal_load;
2011 : }
2012 : /* Even if DECL_FIELD_BIT_OFFSET (fld) is a multiple of BITS_PER_UNIT,
2013 : handle it normally for now. */
2014 474 : if (!bitint_big_endian
2015 474 : && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % BITS_PER_UNIT) == 0)
2016 : {
2017 12 : load_bitfield_p = true;
2018 12 : goto normal_load;
2019 : }
2020 462 : tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
2021 462 : poly_int64 bitoffset;
2022 462 : poly_uint64 field_offset, repr_offset;
2023 462 : bool var_field_off = false;
2024 462 : if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
2025 924 : && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
2026 462 : bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
2027 : else
2028 : {
2029 : bitoffset = 0;
2030 : var_field_off = true;
2031 : }
2032 462 : bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
2033 462 : - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
2034 924 : tree nrhs1 = build3 (COMPONENT_REF, TREE_TYPE (repr),
2035 462 : TREE_OPERAND (rhs1, 0), repr,
2036 0 : var_field_off ? TREE_OPERAND (rhs1, 2) : NULL_TREE);
2037 462 : HOST_WIDE_INT bo = bitoffset.to_constant ();
2038 462 : unsigned bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
2039 462 : unsigned bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
2040 462 : unsigned bo_last = 0;
2041 462 : unsigned bo_shift = bo_bit;
2042 462 : unsigned nelts = CEIL (TYPE_PRECISION (rhs_type), limb_prec);
2043 462 : if (bitint_big_endian)
2044 : {
2045 0 : bo_last = CEIL (TYPE_PRECISION (rhs_type) + bo_bit, limb_prec) - 1;
2046 0 : bo_shift = (TYPE_PRECISION (rhs_type) + bo_bit) % limb_prec;
2047 0 : if (bo_shift)
2048 0 : bo_shift = limb_prec - bo_shift;
2049 : }
2050 462 : if (m_first)
2051 : {
2052 137 : if (m_upwards && bo_shift)
2053 : {
2054 134 : gimple_stmt_iterator save_gsi = m_gsi;
2055 134 : m_gsi = m_init_gsi;
2056 134 : if (gsi_end_p (m_gsi))
2057 53 : m_gsi = gsi_after_labels (gsi_bb (m_gsi));
2058 : else
2059 81 : gsi_next (&m_gsi);
2060 134 : tree t = limb_access (NULL_TREE, nrhs1,
2061 134 : size_int (bo_idx + bo_last), true);
2062 134 : tree iv = make_ssa_name (m_limb_type);
2063 134 : g = gimple_build_assign (iv, t);
2064 134 : insert_before (g);
2065 134 : if (eh)
2066 : {
2067 2 : maybe_duplicate_eh_stmt (g, stmt);
2068 2 : if (eh_edge)
2069 : {
2070 2 : edge e = split_block (gsi_bb (m_gsi), g);
2071 2 : add_eh_edge (e->src, eh_edge);
2072 2 : m_gsi = gsi_after_labels (e->dest);
2073 2 : if (gsi_bb (save_gsi) == e->src)
2074 : {
2075 1 : if (gsi_end_p (save_gsi))
2076 0 : save_gsi = gsi_end_bb (e->dest);
2077 : else
2078 1 : save_gsi = gsi_for_stmt (gsi_stmt (save_gsi));
2079 : }
2080 2 : if (m_preheader_bb == e->src)
2081 1 : m_preheader_bb = e->dest;
2082 : }
2083 : }
2084 134 : m_init_gsi = m_gsi;
2085 134 : if (gsi_end_p (m_init_gsi))
2086 218 : m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
2087 : else
2088 25 : gsi_prev (&m_init_gsi);
2089 134 : m_gsi = save_gsi;
2090 134 : tree out;
2091 134 : prepare_data_in_out (iv, idx, &out);
2092 134 : out = m_data[m_data_cnt];
2093 134 : m_data.safe_push (out);
2094 134 : }
2095 : else
2096 : {
2097 3 : m_data.safe_push (NULL_TREE);
2098 3 : m_data.safe_push (NULL_TREE);
2099 3 : m_data.safe_push (NULL_TREE);
2100 : }
2101 : }
2102 :
2103 462 : tree nidx0 = NULL_TREE, nidx1 = NULL_TREE;
2104 462 : tree iv = m_data[m_data_cnt];
2105 462 : if (m_cast_conditional && iv)
2106 : {
2107 12 : gcc_assert (!m_bitfld_load);
2108 12 : m_bitfld_load = m_data_cnt;
2109 : }
2110 462 : if (tree_fits_uhwi_p (idx))
2111 : {
2112 264 : unsigned prec = TYPE_PRECISION (rhs_type);
2113 264 : unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
2114 264 : if (bitint_big_endian)
2115 0 : i = nelts - 1 - i;
2116 264 : gcc_assert (i * limb_prec < prec);
2117 264 : if (bo_shift)
2118 264 : nidx1 = size_int (bo_idx + (bitint_big_endian
2119 : ? bo_last - i - 1 : i + 1));
2120 264 : if ((i + 1) * limb_prec > prec)
2121 : {
2122 96 : prec %= limb_prec;
2123 96 : if (prec + bo_bit <= (unsigned) limb_prec)
2124 264 : nidx1 = NULL_TREE;
2125 : }
2126 264 : if (!iv)
2127 4 : nidx0 = size_int (bo_idx + (bitint_big_endian ? bo_last - i : i));
2128 : }
2129 : else
2130 : {
2131 198 : HOST_WIDE_INT adj = bo_idx;
2132 198 : if (bitint_big_endian)
2133 0 : adj += (HOST_WIDE_INT) bo_last + 1 - nelts;
2134 198 : if (!iv)
2135 : {
2136 4 : if (adj == 0)
2137 : nidx0 = idx;
2138 : else
2139 : {
2140 0 : nidx0 = make_ssa_name (sizetype);
2141 0 : g = gimple_build_assign (nidx0, PLUS_EXPR, idx,
2142 0 : size_int (adj));
2143 0 : insert_before (g);
2144 : }
2145 : }
2146 198 : if (bo_shift)
2147 : {
2148 198 : if (bitint_big_endian && adj == 1)
2149 : nidx1 = idx;
2150 : else
2151 : {
2152 198 : nidx1 = make_ssa_name (sizetype);
2153 198 : g = gimple_build_assign (nidx1, PLUS_EXPR, idx,
2154 396 : size_int (adj + (bitint_big_endian
2155 : ? -1 : 1)));
2156 198 : insert_before (g);
2157 : }
2158 : }
2159 : }
2160 :
2161 664 : tree iv2 = NULL_TREE;
2162 202 : if (nidx0)
2163 : {
2164 8 : tree t = limb_access (NULL_TREE, nrhs1, nidx0, true);
2165 8 : iv = make_ssa_name (m_limb_type);
2166 8 : g = gimple_build_assign (iv, t);
2167 8 : insert_before (g);
2168 8 : if (eh)
2169 : {
2170 0 : maybe_duplicate_eh_stmt (g, stmt);
2171 0 : if (eh_edge)
2172 : {
2173 0 : edge e = split_block (gsi_bb (m_gsi), g);
2174 0 : m_gsi = gsi_after_labels (e->dest);
2175 0 : add_eh_edge (e->src, eh_edge);
2176 : }
2177 : }
2178 : }
2179 462 : if (nidx1)
2180 : {
2181 377 : bool conditional = m_var_msb && !tree_fits_uhwi_p (idx);
2182 377 : unsigned prec = TYPE_PRECISION (rhs_type);
2183 377 : if (conditional)
2184 : {
2185 3 : if ((prec % limb_prec) == 0
2186 3 : || ((prec % limb_prec) + bo_bit > (unsigned) limb_prec))
2187 374 : conditional = false;
2188 : }
2189 377 : edge edge_true = NULL, edge_false = NULL;
2190 377 : if (conditional)
2191 : {
2192 6 : g = gimple_build_cond (NE_EXPR, idx,
2193 : bitint_big_endian
2194 : ? size_zero_node
2195 3 : : size_int (prec / limb_prec),
2196 : NULL_TREE, NULL_TREE);
2197 3 : if_then (g, profile_probability::likely (),
2198 : edge_true, edge_false);
2199 : }
2200 377 : tree t = limb_access (NULL_TREE, nrhs1, nidx1, true);
2201 377 : if (m_upwards_2limb
2202 279 : && !m_first
2203 182 : && !m_bitfld_load
2204 176 : && !tree_fits_uhwi_p (idx))
2205 93 : iv2 = m_data[m_data_cnt + 1];
2206 : else
2207 284 : iv2 = make_ssa_name (m_limb_type);
2208 377 : g = gimple_build_assign (iv2, t);
2209 377 : insert_before (g);
2210 377 : if (eh)
2211 : {
2212 5 : maybe_duplicate_eh_stmt (g, stmt);
2213 5 : if (eh_edge)
2214 : {
2215 5 : edge e = split_block (gsi_bb (m_gsi), g);
2216 5 : m_gsi = gsi_after_labels (e->dest);
2217 5 : add_eh_edge (e->src, eh_edge);
2218 : }
2219 : }
2220 377 : if (conditional)
2221 : {
2222 3 : tree iv3 = make_ssa_name (m_limb_type);
2223 3 : if (eh)
2224 0 : edge_true = find_edge (gsi_bb (m_gsi), edge_false->dest);
2225 3 : gphi *phi = create_phi_node (iv3, edge_true->dest);
2226 3 : add_phi_arg (phi, iv2, edge_true, UNKNOWN_LOCATION);
2227 3 : add_phi_arg (phi, build_zero_cst (m_limb_type),
2228 : edge_false, UNKNOWN_LOCATION);
2229 3 : m_gsi = gsi_after_labels (edge_true->dest);
2230 3 : iv2 = iv3;
2231 : }
2232 : }
2233 462 : if (bo_shift)
2234 : {
2235 462 : g = gimple_build_assign (make_ssa_name (m_limb_type), RSHIFT_EXPR,
2236 : iv, build_int_cst (unsigned_type_node,
2237 462 : bo_shift));
2238 462 : insert_before (g);
2239 462 : iv = gimple_assign_lhs (g);
2240 : }
2241 462 : if (iv2)
2242 : {
2243 377 : g = gimple_build_assign (make_ssa_name (m_limb_type), LSHIFT_EXPR,
2244 : iv2, build_int_cst (unsigned_type_node,
2245 377 : limb_prec - bo_shift));
2246 377 : insert_before (g);
2247 377 : g = gimple_build_assign (make_ssa_name (m_limb_type), BIT_IOR_EXPR,
2248 : gimple_assign_lhs (g), iv);
2249 377 : insert_before (g);
2250 377 : iv = gimple_assign_lhs (g);
2251 377 : if (m_data[m_data_cnt])
2252 371 : m_data[m_data_cnt] = iv2;
2253 : }
2254 462 : if (tree_fits_uhwi_p (idx))
2255 : {
2256 264 : tree atype = limb_access_type (rhs_type, idx);
2257 264 : if (!useless_type_conversion_p (atype, TREE_TYPE (iv)))
2258 96 : iv = add_cast (atype, iv);
2259 : }
2260 462 : m_data_cnt += 3;
2261 462 : return iv;
2262 : }
2263 :
2264 20799 : normal_load:
2265 : /* Use write_p = true for loads with EH edges to make
2266 : sure limb_access doesn't add a cast as separate
2267 : statement after it. */
2268 20799 : rhs1 = limb_access (rhs_type, rhs1, idx, eh, !load_bitfield_p);
2269 20799 : tree ret = make_ssa_name (TREE_TYPE (rhs1));
2270 20799 : g = gimple_build_assign (ret, rhs1);
2271 20799 : insert_before (g);
2272 20799 : if (eh)
2273 : {
2274 3 : maybe_duplicate_eh_stmt (g, stmt);
2275 3 : if (eh_edge)
2276 : {
2277 3 : edge e = split_block (gsi_bb (m_gsi), g);
2278 3 : m_gsi = gsi_after_labels (e->dest);
2279 3 : add_eh_edge (e->src, eh_edge);
2280 : }
2281 3 : if (tree_fits_uhwi_p (idx))
2282 : {
2283 1 : tree atype = limb_access_type (rhs_type, idx);
2284 1 : if (!useless_type_conversion_p (atype, TREE_TYPE (rhs1)))
2285 1 : ret = add_cast (atype, ret);
2286 : }
2287 : }
2288 : return ret;
2289 : }
2290 :
2291 : /* Return a limb IDX from a mergeable statement STMT. */
2292 :
2293 : tree
2294 38427 : bitint_large_huge::handle_stmt (gimple *stmt, tree idx)
2295 : {
2296 38427 : tree lhs, rhs1, rhs2 = NULL_TREE;
2297 38427 : gimple *g;
2298 38427 : switch (gimple_code (stmt))
2299 : {
2300 38427 : case GIMPLE_ASSIGN:
2301 38427 : if (gimple_assign_load_p (stmt))
2302 21261 : return handle_load (stmt, idx);
2303 17166 : switch (gimple_assign_rhs_code (stmt))
2304 : {
2305 916 : case BIT_AND_EXPR:
2306 916 : case BIT_IOR_EXPR:
2307 916 : case BIT_XOR_EXPR:
2308 916 : rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
2309 : /* FALLTHRU */
2310 1312 : case BIT_NOT_EXPR:
2311 1312 : rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
2312 1312 : lhs = make_ssa_name (TREE_TYPE (rhs1));
2313 1312 : g = gimple_build_assign (lhs, gimple_assign_rhs_code (stmt),
2314 : rhs1, rhs2);
2315 1312 : insert_before (g);
2316 1312 : return lhs;
2317 3752 : case PLUS_EXPR:
2318 3752 : case MINUS_EXPR:
2319 3752 : rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
2320 3752 : rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
2321 3752 : return handle_plus_minus (gimple_assign_rhs_code (stmt),
2322 3752 : rhs1, rhs2, idx);
2323 123 : case NEGATE_EXPR:
2324 123 : rhs2 = handle_operand (gimple_assign_rhs1 (stmt), idx);
2325 123 : rhs1 = build_zero_cst (TREE_TYPE (rhs2));
2326 123 : return handle_plus_minus (MINUS_EXPR, rhs1, rhs2, idx);
2327 140 : case LSHIFT_EXPR:
2328 140 : return handle_lshift (handle_operand (gimple_assign_rhs1 (stmt),
2329 : idx),
2330 140 : gimple_assign_rhs2 (stmt), idx);
2331 5033 : case SSA_NAME:
2332 5033 : case PAREN_EXPR:
2333 5033 : case INTEGER_CST:
2334 5033 : return handle_operand (gimple_assign_rhs1 (stmt), idx);
2335 6767 : CASE_CONVERT:
2336 6767 : return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt)),
2337 6767 : gimple_assign_rhs1 (stmt), idx);
2338 8 : case VIEW_CONVERT_EXPR:
2339 8 : return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt)),
2340 8 : TREE_OPERAND (gimple_assign_rhs1 (stmt), 0),
2341 8 : idx);
2342 31 : case BIT_FIELD_REF:
2343 31 : return handle_bit_field_ref (gimple_assign_rhs1 (stmt), idx);
2344 : default:
2345 : break;
2346 : }
2347 : break;
2348 : default:
2349 : break;
2350 : }
2351 0 : gcc_unreachable ();
2352 : }
2353 :
2354 : /* Return minimum precision of OP at STMT.
2355 : Positive value is minimum precision above which all bits
2356 : are zero, negative means all bits above negation of the
2357 : value are copies of the sign bit. */
2358 :
2359 : static int
2360 8026 : range_to_prec (tree op, gimple *stmt)
2361 : {
2362 8026 : int_range_max r;
2363 8026 : wide_int w;
2364 8026 : tree type = TREE_TYPE (op);
2365 8026 : unsigned int prec = TYPE_PRECISION (type);
2366 :
2367 8026 : if (!optimize
2368 7026 : || !get_range_query (cfun)->range_of_expr (r, op, stmt)
2369 11539 : || r.undefined_p ())
2370 : {
2371 4514 : if (TYPE_UNSIGNED (type))
2372 1824 : return prec;
2373 : else
2374 2690 : return MIN ((int) -prec, -2);
2375 : }
2376 :
2377 3512 : if (!TYPE_UNSIGNED (TREE_TYPE (op)))
2378 : {
2379 2192 : w = r.lower_bound ();
2380 2192 : if (wi::neg_p (w))
2381 : {
2382 1792 : int min_prec1 = wi::min_precision (w, SIGNED);
2383 1792 : w = r.upper_bound ();
2384 1792 : int min_prec2 = wi::min_precision (w, SIGNED);
2385 1792 : int min_prec = MAX (min_prec1, min_prec2);
2386 1792 : return MIN (-min_prec, -2);
2387 : }
2388 : }
2389 :
2390 1720 : w = r.upper_bound ();
2391 1720 : int min_prec = wi::min_precision (w, UNSIGNED);
2392 1720 : return MAX (min_prec, 1);
2393 8026 : }
2394 :
2395 : /* Return address of the first limb of OP and write into *PREC
2396 : its precision. If positive, the operand is zero extended
2397 : from that precision, if it is negative, the operand is sign-extended
2398 : from -*PREC. If PREC_STORED is NULL, it is the toplevel call,
2399 : otherwise *PREC_STORED is prec from the innermost call without
2400 : range optimizations (0 for uninitialized SSA_NAME). */
2401 :
2402 : tree
2403 3562 : bitint_large_huge::handle_operand_addr (tree op, gimple *stmt,
2404 : int *prec_stored, int *prec)
2405 : {
2406 3562 : wide_int w;
2407 3562 : location_t loc_save = m_loc;
2408 3562 : tree ret = NULL_TREE;
2409 3562 : int precs = 0;
2410 3562 : if ((TREE_CODE (TREE_TYPE (op)) != BITINT_TYPE
2411 3555 : || bitint_precision_kind (TREE_TYPE (op)) < bitint_prec_large)
2412 3657 : && TREE_CODE (op) != INTEGER_CST)
2413 : {
2414 108 : do_int:
2415 108 : *prec = range_to_prec (op, stmt);
2416 108 : bitint_prec_kind kind = bitint_prec_small;
2417 108 : gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (op)));
2418 108 : if (TREE_CODE (TREE_TYPE (op)) == BITINT_TYPE)
2419 98 : kind = bitint_precision_kind (TREE_TYPE (op));
2420 98 : if (kind == bitint_prec_middle)
2421 : {
2422 12 : tree type = NULL_TREE;
2423 12 : op = maybe_cast_middle_bitint (&m_gsi, op, type);
2424 : }
2425 108 : tree op_type = TREE_TYPE (op);
2426 108 : unsigned HOST_WIDE_INT nelts
2427 108 : = CEIL (TYPE_PRECISION (op_type), limb_prec);
2428 : /* Add support for 3 or more limbs filled in from normal
2429 : integral type if this assert fails. If no target chooses
2430 : limb mode smaller than half of largest supported normal
2431 : integral type, this will not be needed. */
2432 108 : gcc_assert (nelts <= 2);
2433 108 : precs = (TYPE_UNSIGNED (op_type)
2434 108 : ? TYPE_PRECISION (op_type) : -TYPE_PRECISION (op_type));
2435 108 : if (*prec <= limb_prec && *prec >= -limb_prec)
2436 : {
2437 95 : nelts = 1;
2438 95 : if (TYPE_UNSIGNED (op_type))
2439 : {
2440 26 : if (precs > limb_prec)
2441 108 : precs = limb_prec;
2442 : }
2443 69 : else if (precs < -limb_prec)
2444 108 : precs = -limb_prec;
2445 : }
2446 108 : if (prec_stored)
2447 0 : *prec_stored = precs;
2448 108 : tree atype = build_array_type_nelts (m_limb_type, nelts);
2449 108 : tree var = create_tmp_var (atype);
2450 108 : tree t1 = op;
2451 108 : if (!useless_type_conversion_p (m_limb_type, op_type))
2452 108 : t1 = add_cast (m_limb_type, t1);
2453 108 : tree v = build4 (ARRAY_REF, m_limb_type, var,
2454 0 : bitint_big_endian && nelts > 1
2455 : ? size_one_node : size_zero_node,
2456 : NULL_TREE, NULL_TREE);
2457 108 : gimple *g = gimple_build_assign (v, t1);
2458 108 : insert_before (g);
2459 108 : if (nelts > 1)
2460 : {
2461 13 : tree lp = build_int_cst (unsigned_type_node, limb_prec);
2462 13 : g = gimple_build_assign (make_ssa_name (op_type),
2463 : RSHIFT_EXPR, op, lp);
2464 13 : insert_before (g);
2465 13 : tree t2 = gimple_assign_lhs (g);
2466 13 : t2 = add_cast (m_limb_type, t2);
2467 13 : v = build4 (ARRAY_REF, m_limb_type, var,
2468 : bitint_big_endian ? size_zero_node : size_one_node,
2469 : NULL_TREE, NULL_TREE);
2470 13 : g = gimple_build_assign (v, t2);
2471 13 : insert_before (g);
2472 : }
2473 108 : ret = build_fold_addr_expr (var);
2474 108 : if (!stmt_ends_bb_p (gsi_stmt (m_gsi)))
2475 : {
2476 107 : tree clobber = build_clobber (atype, CLOBBER_STORAGE_END);
2477 107 : g = gimple_build_assign (var, clobber);
2478 107 : gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
2479 : }
2480 108 : m_loc = loc_save;
2481 108 : goto do_ret;
2482 : }
2483 3489 : switch (TREE_CODE (op))
2484 : {
2485 2665 : case SSA_NAME:
2486 2665 : if (m_names == NULL
2487 2665 : || !bitmap_bit_p (m_names, SSA_NAME_VERSION (op)))
2488 : {
2489 89 : gimple *g = SSA_NAME_DEF_STMT (op);
2490 89 : m_loc = gimple_location (g);
2491 89 : if (gimple_assign_load_p (g))
2492 : {
2493 36 : *prec = range_to_prec (op, NULL);
2494 36 : precs = (TYPE_UNSIGNED (TREE_TYPE (op))
2495 36 : ? TYPE_PRECISION (TREE_TYPE (op))
2496 25 : : -TYPE_PRECISION (TREE_TYPE (op)));
2497 36 : if (prec_stored)
2498 6 : *prec_stored = precs;
2499 36 : ret = build_fold_addr_expr (gimple_assign_rhs1 (g));
2500 36 : ret = force_gimple_operand_gsi (&m_gsi, ret, true,
2501 : NULL_TREE, true, GSI_SAME_STMT);
2502 : }
2503 53 : else if (gimple_code (g) == GIMPLE_NOP)
2504 : {
2505 2 : *prec = TYPE_UNSIGNED (TREE_TYPE (op)) ? limb_prec : -limb_prec;
2506 2 : precs = *prec;
2507 2 : if (prec_stored)
2508 1 : *prec_stored = 0;
2509 2 : tree var = create_tmp_var (m_limb_type);
2510 2 : TREE_ADDRESSABLE (var) = 1;
2511 2 : ret = build_fold_addr_expr (var);
2512 2 : if (!stmt_ends_bb_p (gsi_stmt (m_gsi)))
2513 : {
2514 2 : tree clobber = build_clobber (m_limb_type,
2515 : CLOBBER_STORAGE_END);
2516 2 : g = gimple_build_assign (var, clobber);
2517 2 : gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
2518 : }
2519 : }
2520 : else
2521 : {
2522 51 : gcc_assert (gimple_assign_cast_p (g));
2523 51 : tree rhs1 = gimple_assign_rhs1 (g);
2524 51 : bitint_prec_kind kind = bitint_prec_small;
2525 51 : if (TREE_CODE (rhs1) == VIEW_CONVERT_EXPR)
2526 1 : rhs1 = TREE_OPERAND (rhs1, 0);
2527 51 : gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)));
2528 51 : if (TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE)
2529 43 : kind = bitint_precision_kind (TREE_TYPE (rhs1));
2530 43 : if (kind >= bitint_prec_large)
2531 : {
2532 16 : tree lhs_type = TREE_TYPE (op);
2533 16 : tree rhs_type = TREE_TYPE (rhs1);
2534 16 : int prec_stored_val = 0;
2535 16 : ret = handle_operand_addr (rhs1, g, &prec_stored_val, prec);
2536 16 : precs = prec_stored_val;
2537 16 : if (prec_stored)
2538 0 : *prec_stored = prec_stored_val;
2539 16 : if (precs == 0)
2540 : {
2541 1 : gcc_assert (*prec == limb_prec || *prec == -limb_prec);
2542 : precs = *prec;
2543 : }
2544 16 : if (TYPE_PRECISION (lhs_type) > TYPE_PRECISION (rhs_type))
2545 : {
2546 4 : if (TYPE_UNSIGNED (lhs_type)
2547 4 : && !TYPE_UNSIGNED (rhs_type))
2548 1 : gcc_assert (*prec >= 0 || prec_stored == NULL);
2549 : }
2550 : else
2551 : {
2552 12 : if (prec_stored_val == 0)
2553 : /* Non-widening cast of uninitialized value. */;
2554 11 : else if (*prec > 0 && *prec < TYPE_PRECISION (lhs_type))
2555 : ;
2556 11 : else if (TYPE_UNSIGNED (lhs_type))
2557 : {
2558 8 : gcc_assert (*prec > 0
2559 : || prec_stored_val > 0
2560 : || (-prec_stored_val
2561 : >= TYPE_PRECISION (lhs_type)));
2562 8 : *prec = TYPE_PRECISION (lhs_type);
2563 : }
2564 3 : else if (*prec < 0 && -*prec < TYPE_PRECISION (lhs_type))
2565 : ;
2566 : else
2567 3 : *prec = -TYPE_PRECISION (lhs_type);
2568 : }
2569 : }
2570 : else
2571 : {
2572 35 : op = rhs1;
2573 35 : stmt = g;
2574 35 : goto do_int;
2575 : }
2576 : }
2577 54 : m_loc = loc_save;
2578 54 : goto do_ret;
2579 : }
2580 : else
2581 : {
2582 2576 : int p = var_to_partition (m_map, op);
2583 2576 : gcc_assert (m_vars[p] != NULL_TREE);
2584 2576 : *prec = range_to_prec (op, stmt);
2585 2576 : precs = (TYPE_UNSIGNED (TREE_TYPE (op))
2586 2576 : ? TYPE_PRECISION (TREE_TYPE (op))
2587 1534 : : -TYPE_PRECISION (TREE_TYPE (op)));
2588 2576 : if (prec_stored)
2589 9 : *prec_stored = precs;
2590 2576 : ret = build_fold_addr_expr (m_vars[p]);
2591 2576 : goto do_ret;
2592 : }
2593 824 : case INTEGER_CST:
2594 824 : unsigned int min_prec, mp;
2595 824 : tree type;
2596 824 : w = wi::to_wide (op);
2597 824 : if (tree_int_cst_sgn (op) >= 0)
2598 : {
2599 609 : min_prec = wi::min_precision (w, UNSIGNED);
2600 609 : *prec = MAX (min_prec, 1);
2601 : }
2602 : else
2603 : {
2604 215 : min_prec = wi::min_precision (w, SIGNED);
2605 215 : *prec = MIN ((int) -min_prec, -2);
2606 : }
2607 824 : mp = CEIL (min_prec, limb_prec) * limb_prec;
2608 824 : if (mp == 0)
2609 : mp = 1;
2610 824 : if (mp >= (unsigned) TYPE_PRECISION (TREE_TYPE (op))
2611 824 : && (TREE_CODE (TREE_TYPE (op)) == BITINT_TYPE
2612 5 : || TYPE_PRECISION (TREE_TYPE (op)) <= limb_prec))
2613 288 : type = TREE_TYPE (op);
2614 : else
2615 536 : type = build_bitint_type (mp, 1);
2616 824 : if (TREE_CODE (type) != BITINT_TYPE
2617 824 : || bitint_precision_kind (type) == bitint_prec_small)
2618 : {
2619 554 : if (TYPE_PRECISION (type) <= limb_prec)
2620 554 : type = m_limb_type;
2621 : else
2622 : {
2623 0 : while (bitint_precision_kind (mp) == bitint_prec_small)
2624 0 : mp += limb_prec;
2625 : /* This case is for targets which e.g. have 64-bit
2626 : limb but categorize up to 128-bits _BitInts as
2627 : small. We could use type of m_limb_type[2] and
2628 : similar instead to save space. */
2629 0 : type = build_bitint_type (mp, 1);
2630 : }
2631 : }
2632 824 : if (tree_int_cst_sgn (op) >= 0)
2633 609 : precs = MAX (TYPE_PRECISION (type), 1);
2634 : else
2635 215 : precs = MIN ((int) -TYPE_PRECISION (type), -2);
2636 824 : if (prec_stored)
2637 0 : *prec_stored = precs;
2638 824 : op = tree_output_constant_def (fold_convert (type, op));
2639 824 : ret = build_fold_addr_expr (op);
2640 824 : goto do_ret;
2641 0 : default:
2642 0 : gcc_unreachable ();
2643 : }
2644 3562 : do_ret:
2645 3562 : if (bitint_big_endian && prec_stored == NULL)
2646 : {
2647 0 : int p1 = *prec < 0 ? -*prec : *prec;
2648 0 : int p2 = precs < 0 ? -precs : precs;
2649 0 : int c1 = CEIL (p1, limb_prec);
2650 0 : int c2 = CEIL (p2, limb_prec);
2651 0 : gcc_assert (c1 <= c2);
2652 0 : if (c1 != c2)
2653 : {
2654 0 : gimple *g
2655 0 : = gimple_build_assign (make_ssa_name (TREE_TYPE (ret)),
2656 : POINTER_PLUS_EXPR, ret,
2657 0 : size_int ((c2 - c1) * m_limb_size));
2658 0 : insert_before (g);
2659 0 : ret = gimple_assign_lhs (g);
2660 : }
2661 : }
2662 3562 : return ret;
2663 3562 : }
2664 :
2665 : /* Helper function, create a loop before the current location,
2666 : start with sizetype INIT value from the preheader edge. Return
2667 : a PHI result and set *IDX_NEXT to SSA_NAME it creates and uses
2668 : from the latch edge. */
2669 :
2670 : tree
2671 14044 : bitint_large_huge::create_loop (tree init, tree *idx_next)
2672 : {
2673 14044 : if (!gsi_end_p (m_gsi))
2674 12022 : gsi_prev (&m_gsi);
2675 : else
2676 4044 : m_gsi = gsi_last_bb (gsi_bb (m_gsi));
2677 14044 : edge e1 = split_block (gsi_bb (m_gsi), gsi_stmt (m_gsi));
2678 14044 : edge e2 = split_block (e1->dest, (gimple *) NULL);
2679 14044 : edge e3 = make_edge (e1->dest, e1->dest, EDGE_TRUE_VALUE);
2680 14044 : e3->probability = profile_probability::very_unlikely ();
2681 14044 : e2->flags = EDGE_FALSE_VALUE;
2682 14044 : e2->probability = e3->probability.invert ();
2683 14044 : tree idx = make_ssa_name (sizetype);
2684 14044 : gphi *phi = create_phi_node (idx, e1->dest);
2685 14044 : add_phi_arg (phi, init, e1, UNKNOWN_LOCATION);
2686 14044 : *idx_next = make_ssa_name (sizetype);
2687 14044 : add_phi_arg (phi, *idx_next, e3, UNKNOWN_LOCATION);
2688 14044 : m_gsi = gsi_after_labels (e1->dest);
2689 14044 : m_bb = e1->dest;
2690 14044 : m_preheader_bb = e1->src;
2691 14044 : class loop *loop = alloc_loop ();
2692 14044 : loop->header = e1->dest;
2693 14044 : add_loop (loop, e1->src->loop_father);
2694 14044 : return idx;
2695 : }
2696 :
2697 : /* Lower large/huge _BitInt statement mergeable or similar STMT which can be
2698 : lowered using iteration from the least significant limb up to the most
2699 : significant limb. For large _BitInt it is emitted as straight line code
2700 : before current location, for huge _BitInt as a loop handling two limbs
2701 : at once, followed by handling up to limbs in straight line code (at most
2702 : one full and one partial limb). It can also handle EQ_EXPR/NE_EXPR
2703 : comparisons, in that case CMP_CODE should be the comparison code and
2704 : CMP_OP1/CMP_OP2 the comparison operands. */
2705 :
2706 : tree
2707 22972 : bitint_large_huge::lower_mergeable_stmt (gimple *stmt, tree_code &cmp_code,
2708 : tree cmp_op1, tree cmp_op2)
2709 : {
2710 22972 : bool eq_p = cmp_code != ERROR_MARK;
2711 22972 : tree type;
2712 22972 : if (eq_p)
2713 6497 : type = TREE_TYPE (cmp_op1);
2714 : else
2715 16475 : type = TREE_TYPE (gimple_assign_lhs (stmt));
2716 22972 : gcc_assert (TREE_CODE (type) == BITINT_TYPE);
2717 22972 : bitint_prec_kind kind = bitint_precision_kind (type);
2718 22972 : gcc_assert (kind >= bitint_prec_large);
2719 22972 : gimple *g;
2720 22972 : tree lhs = gimple_get_lhs (stmt);
2721 22972 : tree rhs1, lhs_type = lhs ? TREE_TYPE (lhs) : NULL_TREE;
2722 16907 : if (lhs
2723 16907 : && TREE_CODE (lhs) == SSA_NAME
2724 8624 : && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
2725 8192 : && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
2726 : {
2727 8192 : int p = var_to_partition (m_map, lhs);
2728 8192 : gcc_assert (m_vars[p] != NULL_TREE);
2729 8192 : m_lhs = lhs = m_vars[p];
2730 : }
2731 22972 : unsigned cnt, rem = 0, end = 0, prec = TYPE_PRECISION (type);
2732 22972 : bool sext = false;
2733 22972 : tree ext = NULL_TREE, store_operand = NULL_TREE;
2734 22972 : bool eh = false;
2735 22972 : basic_block eh_pad = NULL;
2736 22972 : tree nlhs = NULL_TREE;
2737 22972 : unsigned HOST_WIDE_INT bo_idx = 0;
2738 22972 : unsigned HOST_WIDE_INT bo_bit = 0;
2739 22972 : unsigned bo_shift = 0;
2740 22972 : unsigned bo_last = 0;
2741 22972 : bool bo_be_p = false;
2742 22972 : tree bf_cur = NULL_TREE, bf_next = NULL_TREE;
2743 22972 : if (gimple_store_p (stmt))
2744 : {
2745 8283 : store_operand = gimple_assign_rhs1 (stmt);
2746 8283 : eh = stmt_ends_bb_p (stmt);
2747 8283 : if (eh)
2748 : {
2749 2 : edge e;
2750 2 : edge_iterator ei;
2751 2 : basic_block bb = gimple_bb (stmt);
2752 :
2753 2 : FOR_EACH_EDGE (e, ei, bb->succs)
2754 2 : if (e->flags & EDGE_EH)
2755 : {
2756 2 : eh_pad = e->dest;
2757 2 : break;
2758 : }
2759 : }
2760 8283 : if (TREE_CODE (lhs) == COMPONENT_REF
2761 8283 : && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
2762 : {
2763 158 : tree fld = TREE_OPERAND (lhs, 1);
2764 158 : gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
2765 158 : tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
2766 158 : poly_int64 bitoffset;
2767 158 : poly_uint64 field_offset, repr_offset;
2768 158 : if (!bitint_big_endian
2769 158 : && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
2770 158 : % BITS_PER_UNIT) == 0)
2771 : nlhs = lhs;
2772 : else
2773 : {
2774 139 : bool var_field_off = false;
2775 139 : if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
2776 278 : && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
2777 139 : bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
2778 : else
2779 : {
2780 : bitoffset = 0;
2781 : var_field_off = true;
2782 : }
2783 139 : bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
2784 139 : - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
2785 278 : nlhs = build3 (COMPONENT_REF, TREE_TYPE (repr),
2786 139 : TREE_OPERAND (lhs, 0), repr,
2787 : var_field_off
2788 0 : ? TREE_OPERAND (lhs, 2) : NULL_TREE);
2789 139 : HOST_WIDE_INT bo = bitoffset.to_constant ();
2790 139 : bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
2791 139 : bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
2792 139 : bo_shift = bo_bit;
2793 139 : if (bitint_big_endian)
2794 : {
2795 0 : bo_last = CEIL (prec + bo_bit, limb_prec) - 1;
2796 0 : bo_shift = (prec + bo_bit) % limb_prec;
2797 0 : bo_be_p = true;
2798 0 : if (bo_shift)
2799 0 : bo_shift = limb_prec - bo_shift;
2800 : }
2801 : }
2802 : }
2803 : }
2804 8283 : if ((store_operand
2805 8283 : && TREE_CODE (store_operand) == SSA_NAME
2806 7317 : && (m_names == NULL
2807 7299 : || !bitmap_bit_p (m_names, SSA_NAME_VERSION (store_operand)))
2808 1091 : && gimple_assign_cast_p (SSA_NAME_DEF_STMT (store_operand)))
2809 30733 : || gimple_assign_cast_p (stmt))
2810 : {
2811 2061 : rhs1 = gimple_assign_rhs1 (store_operand
2812 522 : ? SSA_NAME_DEF_STMT (store_operand)
2813 : : stmt);
2814 1539 : if (TREE_CODE (rhs1) == VIEW_CONVERT_EXPR)
2815 2 : rhs1 = TREE_OPERAND (rhs1, 0);
2816 : /* Optimize mergeable ops ending with widening cast to _BitInt
2817 : (or followed by store). We can lower just the limbs of the
2818 : cast operand and widen afterwards. */
2819 1539 : if (TREE_CODE (rhs1) == SSA_NAME
2820 1539 : && (m_names == NULL
2821 1532 : || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1)))
2822 584 : && TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE
2823 446 : && bitint_precision_kind (TREE_TYPE (rhs1)) >= bitint_prec_large
2824 1939 : && (CEIL ((unsigned) TYPE_PRECISION (TREE_TYPE (rhs1)),
2825 400 : limb_prec) < CEIL (prec, limb_prec)
2826 300 : || (kind == bitint_prec_huge
2827 254 : && TYPE_PRECISION (TREE_TYPE (rhs1)) < prec)))
2828 : {
2829 106 : store_operand = rhs1;
2830 106 : prec = TYPE_PRECISION (TREE_TYPE (rhs1));
2831 106 : kind = bitint_precision_kind (TREE_TYPE (rhs1));
2832 106 : if (!TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2833 38 : sext = true;
2834 : }
2835 : }
2836 22972 : tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
2837 22972 : if (kind == bitint_prec_large)
2838 12158 : cnt = CEIL (prec, limb_prec);
2839 : else
2840 : {
2841 10814 : rem = (prec % (2 * limb_prec));
2842 10814 : end = (prec - rem) / limb_prec;
2843 10814 : cnt = 2 + CEIL (rem, limb_prec);
2844 10814 : idx = idx_first = create_loop (bitint_big_endian
2845 10814 : ? size_int (cnt - 2 + end - 1)
2846 : : size_zero_node, &idx_next);
2847 : }
2848 :
2849 22972 : basic_block edge_bb = NULL;
2850 22972 : if (eq_p)
2851 : {
2852 6497 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2853 6497 : gsi_prev (&gsi);
2854 6497 : edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
2855 6497 : edge_bb = e->src;
2856 6497 : if (kind == bitint_prec_large)
2857 7156 : m_gsi = gsi_end_bb (edge_bb);
2858 : }
2859 : else
2860 16475 : m_after_stmt = stmt;
2861 22972 : if (kind != bitint_prec_large)
2862 10814 : m_upwards_2limb = end;
2863 22972 : m_upwards = true;
2864 :
2865 22972 : bool separate_ext
2866 22972 : = (prec != (unsigned) TYPE_PRECISION (type)
2867 22972 : && (CEIL ((unsigned) TYPE_PRECISION (type), limb_prec)
2868 106 : > CEIL (prec, limb_prec)));
2869 100 : unsigned dst_idx_off = 0;
2870 100 : if (separate_ext && bitint_big_endian)
2871 0 : dst_idx_off = (CEIL ((unsigned) TYPE_PRECISION (type), limb_prec)
2872 : - CEIL (prec, limb_prec));
2873 :
2874 92102 : for (unsigned i = 0; i < cnt; i++)
2875 : {
2876 69130 : m_data_cnt = 0;
2877 69130 : if (kind == bitint_prec_large)
2878 37085 : idx = size_int (bitint_big_endian ? cnt - 1 - i : i);
2879 32045 : else if (i >= 2)
2880 10417 : idx = size_int (bitint_big_endian ? cnt - 1 - i : end + (i > 2));
2881 69130 : if (eq_p)
2882 : {
2883 19682 : rhs1 = handle_operand (cmp_op1, idx);
2884 19682 : tree rhs2 = handle_operand (cmp_op2, idx);
2885 19682 : g = gimple_build_cond (NE_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2886 19682 : insert_before (g);
2887 19682 : edge e1 = split_block (gsi_bb (m_gsi), g);
2888 19682 : e1->flags = EDGE_FALSE_VALUE;
2889 19682 : edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2890 19682 : e1->probability = profile_probability::unlikely ();
2891 19682 : e2->probability = e1->probability.invert ();
2892 19682 : if (i == 0)
2893 6497 : set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
2894 19682 : m_gsi = gsi_after_labels (e1->dest);
2895 : }
2896 : else
2897 : {
2898 49448 : if (store_operand)
2899 25051 : rhs1 = handle_operand (store_operand, idx);
2900 : else
2901 24397 : rhs1 = handle_stmt (stmt, idx);
2902 49448 : if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
2903 11729 : rhs1 = add_cast (m_limb_type, rhs1);
2904 49448 : if (sext && i == cnt - 1)
2905 49448 : ext = rhs1;
2906 49448 : tree nidx = idx;
2907 49448 : HOST_WIDE_INT adj = bo_idx;
2908 49448 : if (bo_be_p)
2909 0 : adj += bo_last - (CEIL (prec, limb_prec) - 1);
2910 : else
2911 49448 : adj += dst_idx_off;
2912 49448 : if (adj)
2913 : {
2914 208 : if (tree_fits_uhwi_p (idx))
2915 108 : nidx = size_int (tree_to_uhwi (idx) + adj);
2916 : else
2917 : {
2918 100 : nidx = make_ssa_name (sizetype);
2919 100 : g = gimple_build_assign (nidx, PLUS_EXPR, idx,
2920 100 : size_int (adj));
2921 100 : insert_before (g);
2922 : }
2923 : }
2924 49448 : bool done = false;
2925 49448 : basic_block new_bb = NULL;
2926 : /* Handle stores into bit-fields. */
2927 49448 : if (bo_shift)
2928 : {
2929 443 : if (i == 0)
2930 : {
2931 139 : edge e2 = NULL;
2932 139 : if (kind != bitint_prec_large)
2933 : {
2934 110 : prepare_data_in_out (build_zero_cst (m_limb_type),
2935 : idx, &bf_next);
2936 110 : bf_next = m_data.pop ();
2937 110 : bf_cur = m_data.pop ();
2938 110 : g = gimple_build_cond (EQ_EXPR, idx,
2939 : bitint_big_endian
2940 0 : ? size_int (CEIL (prec,
2941 : limb_prec) - 1)
2942 : : size_zero_node,
2943 : NULL_TREE, NULL_TREE);
2944 110 : edge edge_true;
2945 110 : if_then_else (g, profile_probability::unlikely (),
2946 : edge_true, e2);
2947 110 : new_bb = e2->dest;
2948 : }
2949 139 : tree ftype
2950 139 : = build_nonstandard_integer_type (limb_prec - bo_shift, 1);
2951 278 : tree bfr = build_bit_field_ref (ftype, unshare_expr (nlhs),
2952 139 : limb_prec - bo_shift,
2953 : bitint_big_endian
2954 0 : ? (bo_idx + bo_last)
2955 0 : * limb_prec
2956 139 : : bo_idx * limb_prec
2957 : + bo_bit);
2958 139 : tree t = add_cast (ftype, rhs1);
2959 139 : g = gimple_build_assign (bfr, t);
2960 139 : insert_before (g);
2961 139 : if (eh)
2962 : {
2963 0 : maybe_duplicate_eh_stmt (g, stmt);
2964 0 : if (eh_pad)
2965 : {
2966 0 : edge e = split_block (gsi_bb (m_gsi), g);
2967 0 : m_gsi = gsi_after_labels (e->dest);
2968 0 : add_eh_edge (e->src,
2969 : find_edge (gimple_bb (stmt), eh_pad));
2970 : }
2971 : }
2972 139 : if (kind == bitint_prec_large)
2973 : {
2974 : bf_cur = rhs1;
2975 : done = true;
2976 : }
2977 110 : else if (e2)
2978 110 : m_gsi = gsi_after_labels (e2->src);
2979 : }
2980 110 : if (!done)
2981 : {
2982 414 : tree t1 = make_ssa_name (m_limb_type);
2983 414 : tree t2 = make_ssa_name (m_limb_type);
2984 414 : tree t3 = make_ssa_name (m_limb_type);
2985 414 : g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
2986 : build_int_cst (unsigned_type_node,
2987 414 : limb_prec
2988 414 : - bo_shift));
2989 414 : insert_before (g);
2990 414 : g = gimple_build_assign (t2, LSHIFT_EXPR, rhs1,
2991 : build_int_cst (unsigned_type_node,
2992 414 : bo_shift));
2993 414 : insert_before (g);
2994 414 : bf_cur = rhs1;
2995 414 : g = gimple_build_assign (t3, BIT_IOR_EXPR, t1, t2);
2996 414 : insert_before (g);
2997 414 : rhs1 = t3;
2998 414 : if (bf_next && i == 1)
2999 : {
3000 110 : g = gimple_build_assign (bf_next, bf_cur);
3001 110 : insert_before (g);
3002 : }
3003 : }
3004 : }
3005 29 : if (!done)
3006 : {
3007 : /* Handle bit-field access to partial last limb if needed. */
3008 49419 : if (nlhs
3009 470 : && i == cnt - 1
3010 158 : && !separate_ext
3011 134 : && tree_fits_uhwi_p (idx))
3012 : {
3013 119 : unsigned int tprec = TYPE_PRECISION (type);
3014 119 : unsigned int rprec = (tprec - 1) % limb_prec + 1;
3015 119 : if (rprec + bo_shift < (unsigned) limb_prec)
3016 : {
3017 42 : tree ftype
3018 42 : = build_nonstandard_integer_type (rprec + bo_shift, 1);
3019 42 : tree bfr
3020 84 : = build_bit_field_ref (ftype, unshare_expr (nlhs),
3021 : rprec + bo_shift,
3022 : bitint_big_endian
3023 0 : ? bo_idx * limb_prec + bo_bit
3024 42 : : (bo_idx + tprec / limb_prec)
3025 42 : * limb_prec);
3026 42 : tree t = add_cast (ftype, rhs1);
3027 42 : g = gimple_build_assign (bfr, t);
3028 42 : done = true;
3029 42 : bf_cur = NULL_TREE;
3030 : }
3031 77 : else if (rprec + bo_shift == (unsigned) limb_prec)
3032 49377 : bf_cur = NULL_TREE;
3033 : }
3034 : /* Otherwise, stores to any other lhs. */
3035 42 : if (!done)
3036 : {
3037 98326 : tree l = limb_access (nlhs ? NULL_TREE : lhs_type,
3038 : nlhs ? nlhs : lhs, nidx, true);
3039 49377 : g = gimple_build_assign (l, rhs1);
3040 : }
3041 49419 : insert_before (g);
3042 49419 : if (eh)
3043 : {
3044 6 : maybe_duplicate_eh_stmt (g, stmt);
3045 6 : if (eh_pad)
3046 : {
3047 6 : edge e = split_block (gsi_bb (m_gsi), g);
3048 6 : m_gsi = gsi_after_labels (e->dest);
3049 6 : add_eh_edge (e->src,
3050 : find_edge (gimple_bb (stmt), eh_pad));
3051 : }
3052 : }
3053 49419 : if (new_bb)
3054 110 : m_gsi = gsi_after_labels (new_bb);
3055 : }
3056 : }
3057 69130 : m_first = false;
3058 69130 : if (kind == bitint_prec_huge && i <= 1)
3059 : {
3060 21628 : if (i == 0)
3061 : {
3062 10814 : idx = make_ssa_name (sizetype);
3063 10814 : g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
3064 : bitint_big_endian
3065 0 : ? size_int (-1) : size_one_node);
3066 10814 : insert_before (g);
3067 : }
3068 : else
3069 : {
3070 10814 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
3071 21628 : size_int (bitint_big_endian ? -2 : 2));
3072 10814 : insert_before (g);
3073 10814 : if (bitint_big_endian)
3074 0 : g = gimple_build_cond (NE_EXPR, idx_first, size_int (cnt - 1),
3075 : NULL_TREE, NULL_TREE);
3076 : else
3077 10814 : g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
3078 : NULL_TREE, NULL_TREE);
3079 10814 : insert_before (g);
3080 10814 : if (eq_p)
3081 2919 : m_gsi = gsi_after_labels (edge_bb);
3082 : else
3083 7895 : m_gsi = gsi_for_stmt (stmt);
3084 10814 : m_bb = NULL;
3085 : }
3086 : }
3087 : }
3088 :
3089 22972 : if (separate_ext)
3090 : {
3091 100 : if (sext)
3092 : {
3093 35 : ext = add_cast (signed_type_for (m_limb_type), ext);
3094 70 : tree lpm1 = build_int_cst (unsigned_type_node,
3095 35 : limb_prec - 1);
3096 35 : tree n = make_ssa_name (TREE_TYPE (ext));
3097 35 : g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
3098 35 : insert_before (g);
3099 35 : ext = add_cast (m_limb_type, n);
3100 : }
3101 : else
3102 65 : ext = build_zero_cst (m_limb_type);
3103 100 : kind = bitint_precision_kind (type);
3104 100 : unsigned start = CEIL (prec, limb_prec);
3105 100 : prec = TYPE_PRECISION (type);
3106 100 : unsigned total = CEIL (prec, limb_prec);
3107 100 : idx = idx_first = idx_next = NULL_TREE;
3108 100 : if (prec <= (start + 2 + (bo_shift != 0)) * limb_prec)
3109 : kind = bitint_prec_large;
3110 62 : if (kind == bitint_prec_large)
3111 38 : cnt = total - start;
3112 : else
3113 : {
3114 62 : rem = prec % limb_prec;
3115 62 : end = (prec - rem) / limb_prec;
3116 116 : cnt = (bo_shift != 0) + 1 + (rem != 0);
3117 : }
3118 100 : if (bitint_big_endian && bo_shift != 0 && (prec % limb_prec) == 0)
3119 0 : ++total;
3120 248 : for (unsigned i = 0; i < cnt; i++)
3121 : {
3122 148 : if (kind == bitint_prec_large || (i == 0 && bo_shift != 0))
3123 48 : idx = size_int (bo_idx
3124 : + (bitint_big_endian
3125 : ? total - 1 - start - i : start + i));
3126 100 : else if (i == cnt - 1 && rem != 0)
3127 76 : idx = size_int (bo_idx + (bitint_big_endian ? 0 : end));
3128 62 : else if (i == (bo_shift != 0))
3129 62 : idx = create_loop (size_int (bo_idx
3130 : + (bitint_big_endian
3131 : ? total - 1 - start - i
3132 : : start + i)), &idx_next);
3133 148 : rhs1 = ext;
3134 148 : if (bf_cur != NULL_TREE && bf_cur != ext)
3135 : {
3136 20 : tree t1 = make_ssa_name (m_limb_type);
3137 20 : g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
3138 : build_int_cst (unsigned_type_node,
3139 20 : limb_prec - bo_shift));
3140 20 : insert_before (g);
3141 20 : if (integer_zerop (ext))
3142 : rhs1 = t1;
3143 : else
3144 : {
3145 10 : tree t2 = make_ssa_name (m_limb_type);
3146 10 : rhs1 = make_ssa_name (m_limb_type);
3147 10 : g = gimple_build_assign (t2, LSHIFT_EXPR, ext,
3148 : build_int_cst (unsigned_type_node,
3149 10 : bo_shift));
3150 10 : insert_before (g);
3151 10 : g = gimple_build_assign (rhs1, BIT_IOR_EXPR, t1, t2);
3152 10 : insert_before (g);
3153 : }
3154 : bf_cur = ext;
3155 : }
3156 148 : bool done = false;
3157 : /* Handle bit-field access to partial last limb if needed. */
3158 148 : if (nlhs && i == cnt - 1)
3159 : {
3160 24 : unsigned int tprec = TYPE_PRECISION (type);
3161 24 : unsigned int rprec = (tprec - 1) % limb_prec + 1;
3162 24 : if (rprec + bo_shift < (unsigned) limb_prec)
3163 : {
3164 12 : tree ftype
3165 12 : = build_nonstandard_integer_type (rprec + bo_shift, 1);
3166 12 : tree bfr
3167 24 : = build_bit_field_ref (ftype, unshare_expr (nlhs),
3168 : rprec + bo_shift,
3169 : bitint_big_endian
3170 0 : ? bo_idx * limb_prec + bo_bit
3171 12 : : (bo_idx + tprec / limb_prec)
3172 12 : * limb_prec);
3173 12 : tree t = add_cast (ftype, rhs1);
3174 12 : g = gimple_build_assign (bfr, t);
3175 12 : done = true;
3176 12 : bf_cur = NULL_TREE;
3177 : }
3178 12 : else if (rprec + bo_shift == (unsigned) limb_prec)
3179 : bf_cur = NULL_TREE;
3180 : }
3181 : /* Otherwise, stores to any other lhs. */
3182 12 : if (!done)
3183 : {
3184 242 : tree l = limb_access (nlhs ? NULL_TREE : lhs_type,
3185 : nlhs ? nlhs : lhs, idx, true);
3186 :
3187 136 : if (bitint_extended
3188 0 : && sext
3189 0 : && TYPE_UNSIGNED (lhs_type)
3190 0 : && tree_fits_uhwi_p (idx)
3191 136 : && !nlhs)
3192 : {
3193 0 : rhs1 = add_cast (limb_access_type (lhs_type, idx), rhs1);
3194 0 : rhs1 = add_cast (TREE_TYPE (l), rhs1);
3195 : }
3196 :
3197 136 : g = gimple_build_assign (l, rhs1);
3198 : }
3199 148 : insert_before (g);
3200 148 : if (eh)
3201 : {
3202 0 : maybe_duplicate_eh_stmt (g, stmt);
3203 0 : if (eh_pad)
3204 : {
3205 0 : edge e = split_block (gsi_bb (m_gsi), g);
3206 0 : m_gsi = gsi_after_labels (e->dest);
3207 0 : add_eh_edge (e->src, find_edge (gimple_bb (stmt), eh_pad));
3208 : }
3209 : }
3210 148 : if (kind == bitint_prec_huge && i == (bo_shift != 0))
3211 : {
3212 62 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
3213 : bitint_big_endian
3214 0 : ? size_int (-1) : size_one_node);
3215 62 : insert_before (g);
3216 62 : if (bitint_big_endian && rem != 0)
3217 0 : g = gimple_build_cond (NE_EXPR, idx,
3218 0 : size_int (bo_idx + 1),
3219 : NULL_TREE, NULL_TREE);
3220 : else
3221 62 : g = gimple_build_cond (NE_EXPR, idx_next,
3222 124 : size_int (bo_idx
3223 : + (bitint_big_endian
3224 : ? 0 : end)),
3225 : NULL_TREE, NULL_TREE);
3226 62 : insert_before (g);
3227 62 : m_gsi = gsi_for_stmt (stmt);
3228 62 : m_bb = NULL;
3229 : }
3230 : }
3231 : }
3232 22972 : if (bf_cur != NULL_TREE)
3233 : {
3234 72 : unsigned int tprec = TYPE_PRECISION (type);
3235 72 : unsigned int rprec = (tprec + bo_shift) % limb_prec;
3236 72 : tree ftype = build_nonstandard_integer_type (rprec, 1);
3237 144 : tree bfr = build_bit_field_ref (ftype, unshare_expr (nlhs),
3238 : rprec,
3239 : bitint_big_endian
3240 0 : ? bo_idx * limb_prec + bo_bit
3241 72 : : (bo_idx + (tprec + bo_bit) / limb_prec)
3242 : * limb_prec);
3243 72 : rhs1 = bf_cur;
3244 72 : if (bf_cur != ext)
3245 : {
3246 64 : rhs1 = make_ssa_name (TREE_TYPE (rhs1));
3247 64 : g = gimple_build_assign (rhs1, RSHIFT_EXPR, bf_cur,
3248 : build_int_cst (unsigned_type_node,
3249 64 : limb_prec - bo_shift));
3250 64 : insert_before (g);
3251 : }
3252 72 : rhs1 = add_cast (ftype, rhs1);
3253 72 : g = gimple_build_assign (bfr, rhs1);
3254 72 : insert_before (g);
3255 72 : if (eh)
3256 : {
3257 0 : maybe_duplicate_eh_stmt (g, stmt);
3258 0 : if (eh_pad)
3259 : {
3260 0 : edge e = split_block (gsi_bb (m_gsi), g);
3261 0 : m_gsi = gsi_after_labels (e->dest);
3262 0 : add_eh_edge (e->src, find_edge (gimple_bb (stmt), eh_pad));
3263 : }
3264 : }
3265 : }
3266 :
3267 22972 : if (gimple_store_p (stmt))
3268 : {
3269 8283 : unlink_stmt_vdef (stmt);
3270 16566 : release_ssa_name (gimple_vdef (stmt));
3271 8283 : gsi_remove (&m_gsi, true);
3272 : }
3273 22972 : if (eq_p)
3274 : {
3275 6497 : lhs = make_ssa_name (boolean_type_node);
3276 6497 : basic_block bb = gimple_bb (stmt);
3277 6497 : gphi *phi = create_phi_node (lhs, bb);
3278 6497 : edge e = find_edge (gsi_bb (m_gsi), bb);
3279 6497 : unsigned int n = EDGE_COUNT (bb->preds);
3280 32676 : for (unsigned int i = 0; i < n; i++)
3281 : {
3282 26179 : edge e2 = EDGE_PRED (bb, i);
3283 26179 : add_phi_arg (phi, e == e2 ? boolean_true_node : boolean_false_node,
3284 : e2, UNKNOWN_LOCATION);
3285 : }
3286 6497 : cmp_code = cmp_code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3287 6497 : return lhs;
3288 : }
3289 : else
3290 : return NULL_TREE;
3291 : }
3292 :
3293 : /* Handle a large/huge _BitInt comparison statement STMT other than
3294 : EQ_EXPR/NE_EXPR. CMP_CODE, CMP_OP1 and CMP_OP2 meaning is like in
3295 : lower_mergeable_stmt. The {GT,GE,LT,LE}_EXPR comparisons are
3296 : lowered by iteration from the most significant limb downwards to
3297 : the least significant one, for large _BitInt in straight line code,
3298 : otherwise with most significant limb handled in
3299 : straight line code followed by a loop handling one limb at a time.
3300 : Comparisons with unsigned huge _BitInt with precisions which are
3301 : multiples of limb precision can use just the loop and don't need to
3302 : handle most significant limb before the loop. The loop or straight
3303 : line code jumps to final basic block if a particular pair of limbs
3304 : is not equal. */
3305 :
3306 : tree
3307 722 : bitint_large_huge::lower_comparison_stmt (gimple *stmt, tree_code &cmp_code,
3308 : tree cmp_op1, tree cmp_op2)
3309 : {
3310 722 : tree type = TREE_TYPE (cmp_op1);
3311 722 : gcc_assert (TREE_CODE (type) == BITINT_TYPE);
3312 722 : bitint_prec_kind kind = bitint_precision_kind (type);
3313 722 : gcc_assert (kind >= bitint_prec_large);
3314 722 : gimple *g;
3315 722 : if (!TYPE_UNSIGNED (type)
3316 441 : && integer_zerop (cmp_op2)
3317 750 : && (cmp_code == GE_EXPR || cmp_code == LT_EXPR))
3318 : {
3319 28 : unsigned end = CEIL ((unsigned) TYPE_PRECISION (type), limb_prec) - 1;
3320 56 : tree idx = size_int (bitint_big_endian ? 0 : end);
3321 28 : m_data_cnt = 0;
3322 28 : tree rhs1 = handle_operand (cmp_op1, idx);
3323 28 : if (TYPE_UNSIGNED (TREE_TYPE (rhs1)))
3324 : {
3325 24 : tree stype = signed_type_for (TREE_TYPE (rhs1));
3326 24 : rhs1 = add_cast (stype, rhs1);
3327 : }
3328 28 : tree lhs = make_ssa_name (boolean_type_node);
3329 28 : g = gimple_build_assign (lhs, cmp_code, rhs1,
3330 28 : build_zero_cst (TREE_TYPE (rhs1)));
3331 28 : insert_before (g);
3332 28 : cmp_code = NE_EXPR;
3333 28 : return lhs;
3334 : }
3335 :
3336 694 : unsigned cnt, rem = 0, end = 0;
3337 694 : tree idx = NULL_TREE, idx_next = NULL_TREE;
3338 694 : if (kind == bitint_prec_large)
3339 377 : cnt = CEIL ((unsigned) TYPE_PRECISION (type), limb_prec);
3340 : else
3341 : {
3342 317 : rem = ((unsigned) TYPE_PRECISION (type) % limb_prec);
3343 317 : if (rem == 0 && !TYPE_UNSIGNED (type))
3344 : rem = limb_prec;
3345 317 : end = ((unsigned) TYPE_PRECISION (type) - rem) / limb_prec;
3346 317 : cnt = 1 + (rem != 0);
3347 : }
3348 :
3349 694 : basic_block edge_bb = NULL;
3350 694 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3351 694 : gsi_prev (&gsi);
3352 694 : edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
3353 694 : edge_bb = e->src;
3354 694 : m_gsi = gsi_end_bb (edge_bb);
3355 :
3356 694 : edge *edges = XALLOCAVEC (edge, cnt * 2);
3357 2519 : for (unsigned i = 0; i < cnt; i++)
3358 : {
3359 1825 : m_data_cnt = 0;
3360 1825 : if (kind == bitint_prec_large)
3361 1246 : idx = size_int (bitint_big_endian ? i : cnt - i - 1);
3362 579 : else if (i == cnt - 1)
3363 317 : idx = create_loop (size_int (bitint_big_endian ? cnt - 1 : end - 1),
3364 : &idx_next);
3365 : else
3366 524 : idx = size_int (bitint_big_endian ? 0 : end);
3367 1825 : tree rhs1 = handle_operand (cmp_op1, idx);
3368 1825 : tree rhs2 = handle_operand (cmp_op2, idx);
3369 1825 : if (i == 0
3370 694 : && !TYPE_UNSIGNED (type)
3371 2238 : && TYPE_UNSIGNED (TREE_TYPE (rhs1)))
3372 : {
3373 112 : tree stype = signed_type_for (TREE_TYPE (rhs1));
3374 112 : rhs1 = add_cast (stype, rhs1);
3375 112 : rhs2 = add_cast (stype, rhs2);
3376 : }
3377 1825 : g = gimple_build_cond (GT_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
3378 1825 : insert_before (g);
3379 1825 : edge e1 = split_block (gsi_bb (m_gsi), g);
3380 1825 : e1->flags = EDGE_FALSE_VALUE;
3381 1825 : edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
3382 1825 : e1->probability = profile_probability::likely ();
3383 1825 : e2->probability = e1->probability.invert ();
3384 1825 : if (i == 0)
3385 694 : set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
3386 1825 : m_gsi = gsi_after_labels (e1->dest);
3387 1825 : edges[2 * i] = e2;
3388 1825 : g = gimple_build_cond (LT_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
3389 1825 : insert_before (g);
3390 1825 : e1 = split_block (gsi_bb (m_gsi), g);
3391 1825 : e1->flags = EDGE_FALSE_VALUE;
3392 1825 : e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
3393 1825 : e1->probability = profile_probability::unlikely ();
3394 1825 : e2->probability = e1->probability.invert ();
3395 1825 : m_gsi = gsi_after_labels (e1->dest);
3396 1825 : edges[2 * i + 1] = e2;
3397 1825 : m_first = false;
3398 1825 : if (kind == bitint_prec_huge && i == cnt - 1)
3399 : {
3400 634 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
3401 : bitint_big_endian ? size_one_node
3402 317 : : size_int (-1));
3403 317 : insert_before (g);
3404 317 : g = gimple_build_cond (NE_EXPR, idx,
3405 : bitint_big_endian
3406 0 : ? size_int (end - 1 + (cnt != 1))
3407 : : size_zero_node,
3408 : NULL_TREE, NULL_TREE);
3409 317 : insert_before (g);
3410 317 : edge true_edge, false_edge;
3411 317 : extract_true_false_edges_from_block (gsi_bb (m_gsi),
3412 : &true_edge, &false_edge);
3413 317 : m_gsi = gsi_after_labels (false_edge->dest);
3414 317 : m_bb = NULL;
3415 : }
3416 : }
3417 :
3418 694 : tree lhs = make_ssa_name (boolean_type_node);
3419 694 : basic_block bb = gimple_bb (stmt);
3420 694 : gphi *phi = create_phi_node (lhs, bb);
3421 4344 : for (unsigned int i = 0; i < cnt * 2; i++)
3422 : {
3423 3650 : tree val = ((cmp_code == GT_EXPR || cmp_code == GE_EXPR)
3424 3650 : ^ (i & 1)) ? boolean_true_node : boolean_false_node;
3425 3650 : add_phi_arg (phi, val, edges[i], UNKNOWN_LOCATION);
3426 : }
3427 694 : add_phi_arg (phi, (cmp_code == GE_EXPR || cmp_code == LE_EXPR)
3428 : ? boolean_true_node : boolean_false_node,
3429 : find_edge (gsi_bb (m_gsi), bb), UNKNOWN_LOCATION);
3430 694 : cmp_code = NE_EXPR;
3431 694 : return lhs;
3432 : }
3433 :
3434 : /* Lower large/huge _BitInt left and right shift except for left
3435 : shift by < limb_prec constant. */
3436 :
3437 : void
3438 547 : bitint_large_huge::lower_shift_stmt (tree obj, gimple *stmt)
3439 : {
3440 547 : tree rhs1 = gimple_assign_rhs1 (stmt);
3441 547 : tree lhs = gimple_assign_lhs (stmt);
3442 547 : tree_code rhs_code = gimple_assign_rhs_code (stmt);
3443 547 : tree type = TREE_TYPE (rhs1);
3444 547 : gimple *final_stmt = gsi_stmt (m_gsi);
3445 547 : gcc_assert (TREE_CODE (type) == BITINT_TYPE
3446 : && bitint_precision_kind (type) >= bitint_prec_large);
3447 547 : int prec = TYPE_PRECISION (type);
3448 547 : tree n = gimple_assign_rhs2 (stmt), n1, n2, n3, n4;
3449 547 : gimple *g;
3450 547 : if (obj == NULL_TREE)
3451 : {
3452 443 : int part = var_to_partition (m_map, lhs);
3453 443 : gcc_assert (m_vars[part] != NULL_TREE);
3454 : obj = m_vars[part];
3455 : }
3456 : /* Preparation code common for both left and right shifts.
3457 : unsigned n1 = n % limb_prec;
3458 : size_t n2 = n / limb_prec;
3459 : size_t n3 = n1 != 0;
3460 : unsigned n4 = (limb_prec - n1) % limb_prec;
3461 : (for power of 2 limb_prec n4 can be -n1 & limb_prec). */
3462 547 : if (TREE_CODE (n) == INTEGER_CST)
3463 : {
3464 230 : tree lp = build_int_cst (TREE_TYPE (n), limb_prec);
3465 230 : n1 = int_const_binop (TRUNC_MOD_EXPR, n, lp);
3466 230 : n2 = fold_convert (sizetype, int_const_binop (TRUNC_DIV_EXPR, n, lp));
3467 230 : n3 = size_int (!integer_zerop (n1));
3468 230 : n4 = int_const_binop (TRUNC_MOD_EXPR,
3469 230 : int_const_binop (MINUS_EXPR, lp, n1), lp);
3470 : }
3471 : else
3472 : {
3473 317 : n1 = make_ssa_name (TREE_TYPE (n));
3474 317 : n2 = make_ssa_name (sizetype);
3475 317 : n3 = make_ssa_name (sizetype);
3476 317 : n4 = make_ssa_name (TREE_TYPE (n));
3477 317 : if (pow2p_hwi (limb_prec))
3478 : {
3479 317 : tree lpm1 = build_int_cst (TREE_TYPE (n), limb_prec - 1);
3480 317 : g = gimple_build_assign (n1, BIT_AND_EXPR, n, lpm1);
3481 317 : insert_before (g);
3482 935 : g = gimple_build_assign (useless_type_conversion_p (sizetype,
3483 317 : TREE_TYPE (n))
3484 301 : ? n2 : make_ssa_name (TREE_TYPE (n)),
3485 : RSHIFT_EXPR, n,
3486 317 : build_int_cst (TREE_TYPE (n),
3487 634 : exact_log2 (limb_prec)));
3488 317 : insert_before (g);
3489 317 : if (gimple_assign_lhs (g) != n2)
3490 : {
3491 301 : g = gimple_build_assign (n2, NOP_EXPR, gimple_assign_lhs (g));
3492 301 : insert_before (g);
3493 : }
3494 317 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (n)),
3495 : NEGATE_EXPR, n1);
3496 317 : insert_before (g);
3497 317 : g = gimple_build_assign (n4, BIT_AND_EXPR, gimple_assign_lhs (g),
3498 : lpm1);
3499 317 : insert_before (g);
3500 : }
3501 : else
3502 : {
3503 0 : tree lp = build_int_cst (TREE_TYPE (n), limb_prec);
3504 0 : g = gimple_build_assign (n1, TRUNC_MOD_EXPR, n, lp);
3505 0 : insert_before (g);
3506 0 : g = gimple_build_assign (useless_type_conversion_p (sizetype,
3507 0 : TREE_TYPE (n))
3508 0 : ? n2 : make_ssa_name (TREE_TYPE (n)),
3509 : TRUNC_DIV_EXPR, n, lp);
3510 0 : insert_before (g);
3511 0 : if (gimple_assign_lhs (g) != n2)
3512 : {
3513 0 : g = gimple_build_assign (n2, NOP_EXPR, gimple_assign_lhs (g));
3514 0 : insert_before (g);
3515 : }
3516 0 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (n)),
3517 : MINUS_EXPR, lp, n1);
3518 0 : insert_before (g);
3519 0 : g = gimple_build_assign (n4, TRUNC_MOD_EXPR, gimple_assign_lhs (g),
3520 : lp);
3521 0 : insert_before (g);
3522 : }
3523 317 : g = gimple_build_assign (make_ssa_name (boolean_type_node), NE_EXPR, n1,
3524 317 : build_zero_cst (TREE_TYPE (n)));
3525 317 : insert_before (g);
3526 317 : g = gimple_build_assign (n3, NOP_EXPR, gimple_assign_lhs (g));
3527 317 : insert_before (g);
3528 : }
3529 1094 : tree p = build_int_cst (sizetype,
3530 547 : prec / limb_prec - (prec % limb_prec == 0));
3531 547 : if (rhs_code == RSHIFT_EXPR)
3532 : {
3533 : /* Lower
3534 : dst = src >> n;
3535 : as
3536 : unsigned n1 = n % limb_prec;
3537 : size_t n2 = n / limb_prec;
3538 : size_t n3 = n1 != 0;
3539 : unsigned n4 = (limb_prec - n1) % limb_prec;
3540 : size_t idx;
3541 : size_t p = prec / limb_prec - (prec % limb_prec == 0);
3542 : int signed_p = (typeof (src) -1) < 0;
3543 : for (idx = n2; idx < ((!signed_p && (prec % limb_prec == 0))
3544 : ? p : p - n3); ++idx)
3545 : dst[idx - n2] = (src[idx] >> n1) | (src[idx + n3] << n4);
3546 : limb_type ext;
3547 : if (prec % limb_prec == 0)
3548 : ext = src[p];
3549 : else if (signed_p)
3550 : ext = ((signed limb_type) (src[p] << (limb_prec
3551 : - (prec % limb_prec))))
3552 : >> (limb_prec - (prec % limb_prec));
3553 : else
3554 : ext = src[p] & (((limb_type) 1 << (prec % limb_prec)) - 1);
3555 : if (!signed_p && (prec % limb_prec == 0))
3556 : ;
3557 : else if (idx < prec / 64)
3558 : {
3559 : dst[idx - n2] = (src[idx] >> n1) | (ext << n4);
3560 : ++idx;
3561 : }
3562 : idx -= n2;
3563 : if (signed_p)
3564 : {
3565 : dst[idx] = ((signed limb_type) ext) >> n1;
3566 : ext = ((signed limb_type) ext) >> (limb_prec - 1);
3567 : }
3568 : else
3569 : {
3570 : dst[idx] = ext >> n1;
3571 : ext = 0;
3572 : }
3573 : for (++idx; idx <= p; ++idx)
3574 : dst[idx] = ext; */
3575 364 : tree pmn3;
3576 364 : if (TYPE_UNSIGNED (type) && prec % limb_prec == 0)
3577 100 : pmn3 = bitint_big_endian ? size_zero_node : p;
3578 264 : else if (bitint_big_endian)
3579 : pmn3 = n3;
3580 264 : else if (TREE_CODE (n3) == INTEGER_CST)
3581 99 : pmn3 = int_const_binop (MINUS_EXPR, p, n3);
3582 : else
3583 : {
3584 165 : pmn3 = make_ssa_name (sizetype);
3585 165 : g = gimple_build_assign (pmn3, MINUS_EXPR, p, n3);
3586 165 : insert_before (g);
3587 : }
3588 364 : tree pmn2 = NULL_TREE;
3589 364 : if (bitint_big_endian)
3590 : {
3591 0 : if (TREE_CODE (n2) == INTEGER_CST)
3592 0 : pmn2 = int_const_binop (MINUS_EXPR, p, n2);
3593 : else
3594 : {
3595 0 : pmn2 = make_ssa_name (sizetype);
3596 0 : g = gimple_build_assign (pmn2, MINUS_EXPR, p, n2);
3597 0 : insert_before (g);
3598 : }
3599 0 : g = gimple_build_cond (GT_EXPR, pmn2, pmn3, NULL_TREE, NULL_TREE);
3600 : }
3601 : else
3602 364 : g = gimple_build_cond (LT_EXPR, n2, pmn3, NULL_TREE, NULL_TREE);
3603 364 : edge edge_true, edge_false;
3604 364 : if_then (g, profile_probability::likely (), edge_true, edge_false);
3605 364 : tree idx_next;
3606 728 : tree idx = create_loop (bitint_big_endian ? pmn2 : n2, &idx_next);
3607 364 : tree idxmn2 = make_ssa_name (sizetype);
3608 364 : tree idxpn3 = make_ssa_name (sizetype);
3609 728 : g = gimple_build_assign (idxmn2,
3610 : bitint_big_endian ? PLUS_EXPR : MINUS_EXPR,
3611 : idx, n2);
3612 364 : insert_before (g);
3613 728 : g = gimple_build_assign (idxpn3,
3614 : bitint_big_endian ? MINUS_EXPR : PLUS_EXPR,
3615 : idx, n3);
3616 364 : insert_before (g);
3617 364 : m_data_cnt = 0;
3618 364 : tree t1 = handle_operand (rhs1, idx);
3619 364 : m_first = false;
3620 364 : g = gimple_build_assign (make_ssa_name (m_limb_type),
3621 : RSHIFT_EXPR, t1, n1);
3622 364 : insert_before (g);
3623 364 : t1 = gimple_assign_lhs (g);
3624 364 : if (!integer_zerop (n3))
3625 : {
3626 276 : m_data_cnt = 0;
3627 276 : tree t2 = handle_operand (rhs1, idxpn3);
3628 276 : g = gimple_build_assign (make_ssa_name (m_limb_type),
3629 : LSHIFT_EXPR, t2, n4);
3630 276 : insert_before (g);
3631 276 : t2 = gimple_assign_lhs (g);
3632 276 : g = gimple_build_assign (make_ssa_name (m_limb_type),
3633 : BIT_IOR_EXPR, t1, t2);
3634 276 : insert_before (g);
3635 276 : t1 = gimple_assign_lhs (g);
3636 : }
3637 364 : tree l = limb_access (TREE_TYPE (lhs), obj, idxmn2, true);
3638 364 : g = gimple_build_assign (l, t1);
3639 364 : insert_before (g);
3640 364 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
3641 0 : bitint_big_endian ? size_int (-1)
3642 : : size_one_node);
3643 364 : insert_before (g);
3644 728 : g = gimple_build_cond (bitint_big_endian ? GT_EXPR : LT_EXPR,
3645 : idx_next, pmn3, NULL_TREE, NULL_TREE);
3646 364 : insert_before (g);
3647 364 : idx = make_ssa_name (sizetype);
3648 364 : m_gsi = gsi_for_stmt (final_stmt);
3649 364 : gphi *phi = create_phi_node (idx, gsi_bb (m_gsi));
3650 364 : edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3651 364 : edge_true = EDGE_PRED (gsi_bb (m_gsi),
3652 : EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3653 728 : add_phi_arg (phi, bitint_big_endian ? pmn2 : n2, edge_false,
3654 : UNKNOWN_LOCATION);
3655 364 : add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3656 364 : m_data_cnt = 0;
3657 364 : tree ms = handle_operand (rhs1, bitint_big_endian ? size_zero_node : p);
3658 364 : tree ext = ms;
3659 364 : if (!types_compatible_p (TREE_TYPE (ms), m_limb_type))
3660 207 : ext = add_cast (m_limb_type, ms);
3661 549 : if (!(TYPE_UNSIGNED (type) && prec % limb_prec == 0)
3662 449 : && !integer_zerop (n3))
3663 : {
3664 226 : if (bitint_big_endian)
3665 0 : g = gimple_build_cond (GT_EXPR, idx, size_zero_node,
3666 : NULL_TREE, NULL_TREE);
3667 : else
3668 226 : g = gimple_build_cond (LT_EXPR, idx, p, NULL_TREE, NULL_TREE);
3669 226 : if_then (g, profile_probability::likely (), edge_true, edge_false);
3670 226 : m_data_cnt = 0;
3671 226 : t1 = handle_operand (rhs1, idx);
3672 226 : g = gimple_build_assign (make_ssa_name (m_limb_type),
3673 : RSHIFT_EXPR, t1, n1);
3674 226 : insert_before (g);
3675 226 : t1 = gimple_assign_lhs (g);
3676 226 : g = gimple_build_assign (make_ssa_name (m_limb_type),
3677 : LSHIFT_EXPR, ext, n4);
3678 226 : insert_before (g);
3679 226 : tree t2 = gimple_assign_lhs (g);
3680 226 : g = gimple_build_assign (make_ssa_name (m_limb_type),
3681 : BIT_IOR_EXPR, t1, t2);
3682 226 : insert_before (g);
3683 226 : t1 = gimple_assign_lhs (g);
3684 226 : idxmn2 = make_ssa_name (sizetype);
3685 452 : g = gimple_build_assign (idxmn2, bitint_big_endian
3686 : ? PLUS_EXPR : MINUS_EXPR, idx, n2);
3687 226 : insert_before (g);
3688 226 : l = limb_access (TREE_TYPE (lhs), obj, idxmn2, true);
3689 226 : g = gimple_build_assign (l, t1);
3690 226 : insert_before (g);
3691 226 : idx_next = make_ssa_name (sizetype);
3692 226 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
3693 : bitint_big_endian
3694 0 : ? size_int (-1) : size_one_node);
3695 226 : insert_before (g);
3696 226 : m_gsi = gsi_for_stmt (final_stmt);
3697 226 : tree nidx = make_ssa_name (sizetype);
3698 226 : phi = create_phi_node (nidx, gsi_bb (m_gsi));
3699 226 : edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3700 226 : edge_true = EDGE_PRED (gsi_bb (m_gsi),
3701 : EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3702 226 : add_phi_arg (phi, idx, edge_false, UNKNOWN_LOCATION);
3703 226 : add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3704 226 : idx = nidx;
3705 : }
3706 728 : g = gimple_build_assign (make_ssa_name (sizetype),
3707 : bitint_big_endian ? PLUS_EXPR : MINUS_EXPR,
3708 : idx, n2);
3709 364 : insert_before (g);
3710 364 : idx = gimple_assign_lhs (g);
3711 364 : tree sext = ext;
3712 364 : if (!TYPE_UNSIGNED (type))
3713 179 : sext = add_cast (signed_type_for (m_limb_type), ext);
3714 364 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (sext)),
3715 : RSHIFT_EXPR, sext, n1);
3716 364 : insert_before (g);
3717 364 : t1 = gimple_assign_lhs (g);
3718 364 : if (!TYPE_UNSIGNED (type))
3719 : {
3720 179 : t1 = add_cast (m_limb_type, t1);
3721 179 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (sext)),
3722 : RSHIFT_EXPR, sext,
3723 179 : build_int_cst (TREE_TYPE (n),
3724 179 : limb_prec - 1));
3725 179 : insert_before (g);
3726 179 : ext = add_cast (m_limb_type, gimple_assign_lhs (g));
3727 : }
3728 : else
3729 185 : ext = build_zero_cst (m_limb_type);
3730 364 : l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3731 364 : g = gimple_build_assign (l, t1);
3732 364 : insert_before (g);
3733 364 : g = gimple_build_assign (make_ssa_name (sizetype), PLUS_EXPR, idx,
3734 : bitint_big_endian
3735 0 : ? size_int (-1) : size_one_node);
3736 364 : insert_before (g);
3737 364 : if (bitint_big_endian)
3738 : {
3739 0 : tree new_idx = gimple_assign_lhs (g);
3740 0 : g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
3741 : NULL_TREE, NULL_TREE);
3742 0 : idx = new_idx;
3743 : }
3744 : else
3745 : {
3746 364 : idx = gimple_assign_lhs (g);
3747 364 : g = gimple_build_cond (LE_EXPR, idx, p, NULL_TREE, NULL_TREE);
3748 : }
3749 364 : if_then (g, profile_probability::likely (), edge_true, edge_false);
3750 364 : idx = create_loop (idx, &idx_next);
3751 364 : l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3752 364 : g = gimple_build_assign (l, ext);
3753 364 : insert_before (g);
3754 364 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
3755 : bitint_big_endian
3756 0 : ? size_int (-1) : size_one_node);
3757 364 : insert_before (g);
3758 364 : if (bitint_big_endian)
3759 0 : g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
3760 : NULL_TREE, NULL_TREE);
3761 : else
3762 364 : g = gimple_build_cond (LE_EXPR, idx_next, p, NULL_TREE, NULL_TREE);
3763 364 : insert_before (g);
3764 : }
3765 : else
3766 : {
3767 : /* Lower
3768 : dst = src << n;
3769 : as
3770 : unsigned n1 = n % limb_prec;
3771 : size_t n2 = n / limb_prec;
3772 : size_t n3 = n1 != 0;
3773 : unsigned n4 = (limb_prec - n1) % limb_prec;
3774 : size_t idx;
3775 : size_t p = prec / limb_prec - (prec % limb_prec == 0);
3776 : for (idx = p; (ssize_t) idx >= (ssize_t) (n2 + n3); --idx)
3777 : dst[idx] = (src[idx - n2] << n1) | (src[idx - n2 - n3] >> n4);
3778 : if (n1)
3779 : {
3780 : dst[idx] = src[idx - n2] << n1;
3781 : --idx;
3782 : }
3783 : for (; (ssize_t) idx >= 0; --idx)
3784 : dst[idx] = 0; */
3785 183 : tree n2pn3;
3786 183 : if (TREE_CODE (n2) == INTEGER_CST && TREE_CODE (n3) == INTEGER_CST)
3787 67 : n2pn3 = int_const_binop (PLUS_EXPR, n2, n3);
3788 : else
3789 : {
3790 116 : n2pn3 = make_ssa_name (sizetype);
3791 116 : g = gimple_build_assign (n2pn3, PLUS_EXPR, n2, n3);
3792 116 : insert_before (g);
3793 : }
3794 183 : if (bitint_big_endian)
3795 : {
3796 0 : if (TREE_CODE (n2pn3) == INTEGER_CST)
3797 0 : n2pn3 = int_const_binop (MINUS_EXPR, p, n2pn3);
3798 : else
3799 : {
3800 0 : g = gimple_build_assign (make_ssa_name (sizetype),
3801 : MINUS_EXPR, p, n2pn3);
3802 0 : insert_before (g);
3803 0 : n2pn3 = gimple_assign_lhs (g);
3804 : }
3805 : }
3806 : /* For LSHIFT_EXPR, we can use handle_operand with non-INTEGER_CST
3807 : idx even to access the most significant partial limb. */
3808 183 : m_var_msb = true;
3809 183 : if (integer_zerop (n3))
3810 : /* For n3 == 0 p >= n2 + n3 is always true for all valid shift
3811 : counts. Emit if (true) condition that can be optimized later. */
3812 45 : g = gimple_build_cond (NE_EXPR, boolean_true_node, boolean_false_node,
3813 : NULL_TREE, NULL_TREE);
3814 138 : else if (bitint_big_endian)
3815 0 : g = gimple_build_cond (NE_EXPR, n2pn3, size_int (-1), NULL_TREE,
3816 : NULL_TREE);
3817 : else
3818 138 : g = gimple_build_cond (LE_EXPR, n2pn3, p, NULL_TREE, NULL_TREE);
3819 183 : edge edge_true, edge_false;
3820 183 : if_then (g, profile_probability::likely (), edge_true, edge_false);
3821 183 : tree idx_next;
3822 183 : tree idx = create_loop (bitint_big_endian ? size_zero_node : p,
3823 : &idx_next);
3824 183 : tree idxmn2 = make_ssa_name (sizetype);
3825 183 : tree idxmn2mn3 = make_ssa_name (sizetype);
3826 366 : g = gimple_build_assign (idxmn2,
3827 : bitint_big_endian ? PLUS_EXPR : MINUS_EXPR,
3828 : idx, n2);
3829 183 : insert_before (g);
3830 366 : g = gimple_build_assign (idxmn2mn3,
3831 : bitint_big_endian ? PLUS_EXPR : MINUS_EXPR,
3832 : idxmn2, n3);
3833 183 : insert_before (g);
3834 183 : m_data_cnt = 0;
3835 183 : tree t1 = handle_operand (rhs1, idxmn2);
3836 183 : m_first = false;
3837 183 : g = gimple_build_assign (make_ssa_name (m_limb_type),
3838 : LSHIFT_EXPR, t1, n1);
3839 183 : insert_before (g);
3840 183 : t1 = gimple_assign_lhs (g);
3841 183 : if (!integer_zerop (n3))
3842 : {
3843 138 : m_data_cnt = 0;
3844 138 : tree t2 = handle_operand (rhs1, idxmn2mn3);
3845 138 : g = gimple_build_assign (make_ssa_name (m_limb_type),
3846 : RSHIFT_EXPR, t2, n4);
3847 138 : insert_before (g);
3848 138 : t2 = gimple_assign_lhs (g);
3849 138 : g = gimple_build_assign (make_ssa_name (m_limb_type),
3850 : BIT_IOR_EXPR, t1, t2);
3851 138 : insert_before (g);
3852 138 : t1 = gimple_assign_lhs (g);
3853 : }
3854 183 : tree l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3855 183 : g = gimple_build_assign (l, t1);
3856 183 : insert_before (g);
3857 366 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
3858 : bitint_big_endian
3859 183 : ? size_one_node : size_int (-1));
3860 183 : insert_before (g);
3861 183 : tree sn2pn3 = add_cast (ssizetype, n2pn3);
3862 366 : g = gimple_build_cond (bitint_big_endian ? LE_EXPR : GE_EXPR,
3863 : add_cast (ssizetype, idx_next), sn2pn3,
3864 : NULL_TREE, NULL_TREE);
3865 183 : insert_before (g);
3866 183 : idx = make_ssa_name (sizetype);
3867 183 : m_gsi = gsi_for_stmt (final_stmt);
3868 183 : gphi *phi = create_phi_node (idx, gsi_bb (m_gsi));
3869 183 : edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3870 183 : edge_true = EDGE_PRED (gsi_bb (m_gsi),
3871 : EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3872 183 : add_phi_arg (phi, bitint_big_endian ? size_zero_node : p,
3873 : edge_false, UNKNOWN_LOCATION);
3874 183 : add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3875 183 : m_data_cnt = 0;
3876 183 : if (!integer_zerop (n3))
3877 : {
3878 138 : g = gimple_build_cond (NE_EXPR, n3, size_zero_node,
3879 : NULL_TREE, NULL_TREE);
3880 138 : if_then (g, profile_probability::likely (), edge_true, edge_false);
3881 138 : idxmn2 = make_ssa_name (sizetype);
3882 276 : g = gimple_build_assign (idxmn2,
3883 : bitint_big_endian ? PLUS_EXPR : MINUS_EXPR,
3884 : idx, n2);
3885 138 : insert_before (g);
3886 138 : m_data_cnt = 0;
3887 138 : t1 = handle_operand (rhs1, idxmn2);
3888 138 : g = gimple_build_assign (make_ssa_name (m_limb_type),
3889 : LSHIFT_EXPR, t1, n1);
3890 138 : insert_before (g);
3891 138 : t1 = gimple_assign_lhs (g);
3892 138 : l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3893 138 : g = gimple_build_assign (l, t1);
3894 138 : insert_before (g);
3895 138 : idx_next = make_ssa_name (sizetype);
3896 276 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
3897 : bitint_big_endian
3898 138 : ? size_one_node : size_int (-1));
3899 138 : insert_before (g);
3900 138 : m_gsi = gsi_for_stmt (final_stmt);
3901 138 : tree nidx = make_ssa_name (sizetype);
3902 138 : phi = create_phi_node (nidx, gsi_bb (m_gsi));
3903 138 : edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3904 138 : edge_true = EDGE_PRED (gsi_bb (m_gsi),
3905 : EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3906 138 : add_phi_arg (phi, idx, edge_false, UNKNOWN_LOCATION);
3907 138 : add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3908 138 : idx = nidx;
3909 : }
3910 183 : if (bitint_big_endian)
3911 0 : g = gimple_build_cond (LE_EXPR, idx, p, NULL_TREE, NULL_TREE);
3912 : else
3913 183 : g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx),
3914 : ssize_int (0), NULL_TREE, NULL_TREE);
3915 183 : if_then (g, profile_probability::likely (), edge_true, edge_false);
3916 183 : idx = create_loop (idx, &idx_next);
3917 183 : l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3918 183 : g = gimple_build_assign (l, build_zero_cst (m_limb_type));
3919 183 : insert_before (g);
3920 366 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
3921 : bitint_big_endian
3922 183 : ? size_one_node : size_int (-1));
3923 183 : insert_before (g);
3924 183 : if (bitint_big_endian)
3925 0 : g = gimple_build_cond (LE_EXPR, idx_next, p, NULL_TREE, NULL_TREE);
3926 : else
3927 183 : g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx_next),
3928 : ssize_int (0), NULL_TREE, NULL_TREE);
3929 183 : insert_before (g);
3930 183 : if (bitint_extended && prec % limb_prec != 0)
3931 : {
3932 : /* The most significant limb has been updated either in the
3933 : loop or in the if after it. To simplify the code, just
3934 : read it back from memory and extend. */
3935 0 : m_gsi = gsi_after_labels (edge_false->dest);
3936 0 : idx = bitint_big_endian ? size_zero_node : p;
3937 0 : tree l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3938 0 : tree type = limb_access_type (TREE_TYPE (lhs), idx);
3939 0 : tree v = make_ssa_name (m_limb_type);
3940 0 : g = gimple_build_assign (v, l);
3941 0 : insert_before (g);
3942 0 : v = add_cast (type, v);
3943 0 : l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3944 0 : g = gimple_build_assign (l, add_cast (m_limb_type, v));
3945 0 : insert_before (g);
3946 : }
3947 : }
3948 547 : }
3949 :
3950 : /* Lower large/huge _BitInt multiplication or division. */
3951 :
3952 : void
3953 317 : bitint_large_huge::lower_muldiv_stmt (tree obj, gimple *stmt)
3954 : {
3955 317 : tree rhs1 = gimple_assign_rhs1 (stmt);
3956 317 : tree rhs2 = gimple_assign_rhs2 (stmt);
3957 317 : tree lhs = gimple_assign_lhs (stmt);
3958 317 : tree_code rhs_code = gimple_assign_rhs_code (stmt);
3959 317 : tree type = TREE_TYPE (rhs1);
3960 317 : gcc_assert (TREE_CODE (type) == BITINT_TYPE
3961 : && bitint_precision_kind (type) >= bitint_prec_large);
3962 317 : int prec = TYPE_PRECISION (type), prec1, prec2;
3963 317 : rhs1 = handle_operand_addr (rhs1, stmt, NULL, &prec1);
3964 317 : rhs2 = handle_operand_addr (rhs2, stmt, NULL, &prec2);
3965 317 : if (obj == NULL_TREE)
3966 : {
3967 135 : int part = var_to_partition (m_map, lhs);
3968 135 : gcc_assert (m_vars[part] != NULL_TREE);
3969 135 : obj = m_vars[part];
3970 135 : lhs = build_fold_addr_expr (obj);
3971 : }
3972 : else
3973 : {
3974 182 : lhs = build_fold_addr_expr (obj);
3975 182 : lhs = force_gimple_operand_gsi (&m_gsi, lhs, true,
3976 : NULL_TREE, true, GSI_SAME_STMT);
3977 : }
3978 317 : tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
3979 317 : gimple *g;
3980 317 : switch (rhs_code)
3981 : {
3982 178 : case MULT_EXPR:
3983 178 : g = gimple_build_call_internal (IFN_MULBITINT, 6,
3984 178 : lhs, build_int_cst (sitype, prec),
3985 178 : rhs1, build_int_cst (sitype, prec1),
3986 178 : rhs2, build_int_cst (sitype, prec2));
3987 178 : insert_before (g);
3988 178 : break;
3989 91 : case TRUNC_DIV_EXPR:
3990 91 : case EXACT_DIV_EXPR:
3991 91 : g = gimple_build_call_internal (IFN_DIVMODBITINT, 8,
3992 91 : lhs, build_int_cst (sitype, prec),
3993 : null_pointer_node,
3994 : build_int_cst (sitype, 0),
3995 91 : rhs1, build_int_cst (sitype, prec1),
3996 91 : rhs2, build_int_cst (sitype, prec2));
3997 91 : if (!stmt_ends_bb_p (stmt))
3998 90 : gimple_call_set_nothrow (as_a <gcall *> (g), true);
3999 91 : insert_before (g);
4000 91 : break;
4001 48 : case TRUNC_MOD_EXPR:
4002 48 : g = gimple_build_call_internal (IFN_DIVMODBITINT, 8, null_pointer_node,
4003 : build_int_cst (sitype, 0),
4004 48 : lhs, build_int_cst (sitype, prec),
4005 48 : rhs1, build_int_cst (sitype, prec1),
4006 48 : rhs2, build_int_cst (sitype, prec2));
4007 48 : if (!stmt_ends_bb_p (stmt))
4008 45 : gimple_call_set_nothrow (as_a <gcall *> (g), true);
4009 48 : insert_before (g);
4010 48 : break;
4011 0 : default:
4012 0 : gcc_unreachable ();
4013 : }
4014 317 : if (stmt_ends_bb_p (stmt))
4015 : {
4016 4 : maybe_duplicate_eh_stmt (g, stmt);
4017 4 : edge e1;
4018 4 : edge_iterator ei;
4019 4 : basic_block bb = gimple_bb (stmt);
4020 :
4021 4 : FOR_EACH_EDGE (e1, ei, bb->succs)
4022 4 : if (e1->flags & EDGE_EH)
4023 : break;
4024 4 : if (e1)
4025 : {
4026 4 : edge e2 = split_block (gsi_bb (m_gsi), g);
4027 4 : m_gsi = gsi_after_labels (e2->dest);
4028 4 : add_eh_edge (e2->src, e1);
4029 : }
4030 : }
4031 317 : }
4032 :
4033 : /* Lower large/huge _BitInt conversion to/from floating point. */
4034 :
4035 : void
4036 305 : bitint_large_huge::lower_float_conv_stmt (tree obj, gimple *stmt)
4037 : {
4038 305 : tree rhs1 = gimple_assign_rhs1 (stmt);
4039 305 : tree lhs = gimple_assign_lhs (stmt);
4040 305 : tree_code rhs_code = gimple_assign_rhs_code (stmt);
4041 305 : tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
4042 305 : gimple *g;
4043 305 : if (rhs_code == FIX_TRUNC_EXPR)
4044 : {
4045 167 : int prec = TYPE_PRECISION (TREE_TYPE (lhs));
4046 167 : if (!TYPE_UNSIGNED (TREE_TYPE (lhs)))
4047 87 : prec = -prec;
4048 167 : if (obj == NULL_TREE)
4049 : {
4050 129 : int part = var_to_partition (m_map, lhs);
4051 129 : gcc_assert (m_vars[part] != NULL_TREE);
4052 129 : obj = m_vars[part];
4053 129 : lhs = build_fold_addr_expr (obj);
4054 : }
4055 : else
4056 : {
4057 38 : lhs = build_fold_addr_expr (obj);
4058 38 : lhs = force_gimple_operand_gsi (&m_gsi, lhs, true,
4059 : NULL_TREE, true, GSI_SAME_STMT);
4060 : }
4061 167 : scalar_mode from_mode
4062 167 : = as_a <scalar_mode> (TYPE_MODE (TREE_TYPE (rhs1)));
4063 : #ifdef HAVE_SFmode
4064 : /* IEEE single is a full superset of both IEEE half and
4065 : bfloat formats, convert to float first and then to _BitInt
4066 : to avoid the need of another 2 library routines. */
4067 167 : if ((REAL_MODE_FORMAT (from_mode) == &arm_bfloat_half_format
4068 167 : || REAL_MODE_FORMAT (from_mode) == &ieee_half_format)
4069 179 : && REAL_MODE_FORMAT (SFmode) == &ieee_single_format)
4070 : {
4071 12 : tree type = lang_hooks.types.type_for_mode (SFmode, 0);
4072 12 : if (type)
4073 12 : rhs1 = add_cast (type, rhs1);
4074 : }
4075 : #endif
4076 167 : g = gimple_build_call_internal (IFN_FLOATTOBITINT, 3,
4077 167 : lhs, build_int_cst (sitype, prec),
4078 : rhs1);
4079 167 : insert_before (g);
4080 : }
4081 : else
4082 : {
4083 138 : int prec;
4084 138 : rhs1 = handle_operand_addr (rhs1, stmt, NULL, &prec);
4085 138 : g = gimple_build_call_internal (IFN_BITINTTOFLOAT, 2,
4086 138 : rhs1, build_int_cst (sitype, prec));
4087 138 : gimple_call_set_lhs (g, lhs);
4088 138 : if (!stmt_ends_bb_p (stmt))
4089 137 : gimple_call_set_nothrow (as_a <gcall *> (g), true);
4090 138 : gsi_replace (&m_gsi, g, true);
4091 : }
4092 305 : }
4093 :
4094 : /* Helper method for lower_addsub_overflow and lower_mul_overflow.
4095 : If check_zero is true, caller wants to check if all bits in [start, end)
4096 : are zero, otherwise if bits in [start, end) are either all zero or
4097 : all ones. L is the limb with index LIMB, START and END are measured
4098 : in bits. */
4099 :
4100 : tree
4101 6130 : bitint_large_huge::arith_overflow_extract_bits (unsigned int start,
4102 : unsigned int end, tree l,
4103 : unsigned int limb,
4104 : bool check_zero)
4105 : {
4106 6130 : unsigned startlimb = start / limb_prec;
4107 6130 : unsigned endlimb = (end - 1) / limb_prec;
4108 6130 : gimple *g;
4109 :
4110 6130 : if ((start % limb_prec) == 0 && (end % limb_prec) == 0)
4111 : return l;
4112 5864 : if (startlimb == endlimb && limb == startlimb)
4113 : {
4114 1981 : if (check_zero)
4115 : {
4116 1456 : wide_int w = wi::shifted_mask (start % limb_prec,
4117 1456 : end - start, false, limb_prec);
4118 2912 : g = gimple_build_assign (make_ssa_name (m_limb_type),
4119 : BIT_AND_EXPR, l,
4120 1456 : wide_int_to_tree (m_limb_type, w));
4121 1456 : insert_before (g);
4122 1456 : return gimple_assign_lhs (g);
4123 1456 : }
4124 525 : unsigned int shift = start % limb_prec;
4125 525 : if ((end % limb_prec) != 0)
4126 : {
4127 328 : unsigned int lshift = (-end) % limb_prec;
4128 328 : shift += lshift;
4129 328 : g = gimple_build_assign (make_ssa_name (m_limb_type),
4130 : LSHIFT_EXPR, l,
4131 : build_int_cst (unsigned_type_node,
4132 328 : lshift));
4133 328 : insert_before (g);
4134 328 : l = gimple_assign_lhs (g);
4135 : }
4136 525 : l = add_cast (signed_type_for (m_limb_type), l);
4137 525 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
4138 : RSHIFT_EXPR, l,
4139 525 : build_int_cst (unsigned_type_node, shift));
4140 525 : insert_before (g);
4141 525 : return add_cast (m_limb_type, gimple_assign_lhs (g));
4142 : }
4143 3883 : else if (limb == startlimb)
4144 : {
4145 1881 : if ((start % limb_prec) == 0)
4146 : return l;
4147 1795 : if (!check_zero)
4148 917 : l = add_cast (signed_type_for (m_limb_type), l);
4149 1795 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
4150 : RSHIFT_EXPR, l,
4151 : build_int_cst (unsigned_type_node,
4152 1795 : start % limb_prec));
4153 1795 : insert_before (g);
4154 1795 : l = gimple_assign_lhs (g);
4155 1795 : if (!check_zero)
4156 917 : l = add_cast (m_limb_type, l);
4157 1795 : return l;
4158 : }
4159 2002 : else if (limb == endlimb)
4160 : {
4161 1603 : if ((end % limb_prec) == 0)
4162 : return l;
4163 1602 : if (check_zero)
4164 : {
4165 840 : wide_int w = wi::mask (end % limb_prec, false, limb_prec);
4166 1680 : g = gimple_build_assign (make_ssa_name (m_limb_type),
4167 : BIT_AND_EXPR, l,
4168 840 : wide_int_to_tree (m_limb_type, w));
4169 840 : insert_before (g);
4170 840 : return gimple_assign_lhs (g);
4171 840 : }
4172 762 : unsigned int shift = (-end) % limb_prec;
4173 762 : g = gimple_build_assign (make_ssa_name (m_limb_type),
4174 : LSHIFT_EXPR, l,
4175 762 : build_int_cst (unsigned_type_node, shift));
4176 762 : insert_before (g);
4177 762 : l = add_cast (signed_type_for (m_limb_type), gimple_assign_lhs (g));
4178 762 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
4179 : RSHIFT_EXPR, l,
4180 762 : build_int_cst (unsigned_type_node, shift));
4181 762 : insert_before (g);
4182 762 : return add_cast (m_limb_type, gimple_assign_lhs (g));
4183 : }
4184 : return l;
4185 : }
4186 :
4187 : /* Helper method for lower_addsub_overflow and lower_mul_overflow. Store
4188 : result including overflow flag into the right locations. */
4189 :
4190 : void
4191 4040 : bitint_large_huge::finish_arith_overflow (tree var, tree obj, tree type,
4192 : tree ovf, tree lhs, tree orig_obj,
4193 : gimple *stmt, unsigned nelts,
4194 : tree_code code)
4195 : {
4196 4040 : gimple *g;
4197 :
4198 4040 : if (obj == NULL_TREE
4199 4040 : && (TREE_CODE (type) != BITINT_TYPE
4200 225 : || bitint_precision_kind (type) < bitint_prec_large))
4201 : {
4202 : /* Add support for 3 or more limbs filled in from normal integral
4203 : type if this assert fails. If no target chooses limb mode smaller
4204 : than half of largest supported normal integral type, this will not
4205 : be needed. */
4206 241 : gcc_assert (TYPE_PRECISION (type) <= 2 * limb_prec);
4207 241 : tree lhs_type = type;
4208 241 : if (TREE_CODE (type) == BITINT_TYPE
4209 241 : && bitint_precision_kind (type) == bitint_prec_middle)
4210 46 : lhs_type = build_nonstandard_integer_type (TYPE_PRECISION (type),
4211 46 : TYPE_UNSIGNED (type));
4212 241 : tree r1 = limb_access (NULL_TREE, var,
4213 : bitint_big_endian
4214 0 : ? size_int (nelts - 1) : size_zero_node, true);
4215 241 : g = gimple_build_assign (make_ssa_name (m_limb_type), r1);
4216 241 : insert_before (g);
4217 241 : r1 = gimple_assign_lhs (g);
4218 241 : if (!useless_type_conversion_p (lhs_type, TREE_TYPE (r1)))
4219 241 : r1 = add_cast (lhs_type, r1);
4220 241 : if (TYPE_PRECISION (lhs_type) > limb_prec)
4221 : {
4222 90 : tree r2 = limb_access (NULL_TREE, var,
4223 : bitint_big_endian
4224 0 : ? size_int (nelts - 2) : size_one_node, true);
4225 90 : g = gimple_build_assign (make_ssa_name (m_limb_type), r2);
4226 90 : insert_before (g);
4227 90 : r2 = gimple_assign_lhs (g);
4228 90 : r2 = add_cast (lhs_type, r2);
4229 90 : g = gimple_build_assign (make_ssa_name (lhs_type), LSHIFT_EXPR, r2,
4230 : build_int_cst (unsigned_type_node,
4231 90 : limb_prec));
4232 90 : insert_before (g);
4233 90 : g = gimple_build_assign (make_ssa_name (lhs_type), BIT_IOR_EXPR, r1,
4234 : gimple_assign_lhs (g));
4235 90 : insert_before (g);
4236 90 : r1 = gimple_assign_lhs (g);
4237 : }
4238 241 : if (lhs_type != type)
4239 46 : r1 = add_cast (type, r1);
4240 241 : ovf = add_cast (lhs_type, ovf);
4241 241 : if (lhs_type != type)
4242 46 : ovf = add_cast (type, ovf);
4243 241 : g = gimple_build_assign (lhs, COMPLEX_EXPR, r1, ovf);
4244 241 : m_gsi = gsi_for_stmt (stmt);
4245 241 : gsi_replace (&m_gsi, g, true);
4246 : }
4247 : else
4248 : {
4249 3799 : unsigned HOST_WIDE_INT obj_nelts = 0;
4250 3799 : tree atype = NULL_TREE;
4251 3799 : if (obj)
4252 : {
4253 3708 : obj_nelts = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))) / limb_prec;
4254 3708 : if (orig_obj == NULL_TREE)
4255 2140 : obj_nelts >>= 1;
4256 3708 : atype = build_array_type_nelts (m_limb_type, obj_nelts);
4257 : }
4258 3799 : if (var && obj)
4259 : {
4260 480 : tree v1, v2;
4261 480 : tree off;
4262 480 : if (orig_obj == NULL_TREE)
4263 : {
4264 0 : off = build_zero_cst (build_pointer_type (TREE_TYPE (obj)));
4265 0 : v1 = build2 (MEM_REF, atype,
4266 : build_fold_addr_expr (unshare_expr (obj)), off);
4267 : }
4268 480 : else if (!useless_type_conversion_p (atype, TREE_TYPE (obj)))
4269 8 : v1 = build1 (VIEW_CONVERT_EXPR, atype, unshare_expr (obj));
4270 : else
4271 472 : v1 = unshare_expr (obj);
4272 480 : off = build_int_cst (build_pointer_type (TREE_TYPE (var)),
4273 : bitint_big_endian
4274 480 : ? (nelts - obj_nelts) * m_limb_size : 0);
4275 480 : v2 = build2 (MEM_REF, atype, build_fold_addr_expr (var), off);
4276 480 : g = gimple_build_assign (v1, v2);
4277 480 : insert_before (g);
4278 : }
4279 3319 : else if (obj && bitint_big_endian && nelts != obj_nelts)
4280 : {
4281 0 : gcc_assert (nelts > obj_nelts);
4282 0 : tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
4283 0 : tree off = build_int_cst (build_pointer_type (TREE_TYPE (obj)),
4284 0 : (nelts - obj_nelts) * m_limb_size);
4285 0 : tree src = build2 (MEM_REF, atype,
4286 : build_fold_addr_expr (unshare_expr (obj)), off);
4287 0 : g = gimple_build_call (fn, 3,
4288 : build_fold_addr_expr (unshare_expr (obj)),
4289 : src, build_int_cst (size_type_node,
4290 0 : obj_nelts * m_limb_size));
4291 0 : insert_before (g);
4292 : }
4293 3799 : if (orig_obj == NULL_TREE && obj)
4294 : {
4295 2140 : ovf = add_cast (m_limb_type, ovf);
4296 2140 : tree l = limb_access (NULL_TREE, obj,
4297 2140 : size_int (bitint_big_endian
4298 : ? obj_nelts * 2 - 1 : obj_nelts),
4299 : true);
4300 2140 : g = gimple_build_assign (l, ovf);
4301 2140 : insert_before (g);
4302 2140 : if (obj_nelts > 1)
4303 : {
4304 2140 : atype = build_array_type_nelts (m_limb_type, obj_nelts - 1);
4305 2140 : tree off = build_int_cst (build_pointer_type (TREE_TYPE (obj)),
4306 2140 : (obj_nelts + !bitint_big_endian)
4307 2140 : * m_limb_size);
4308 2140 : tree v1 = build2 (MEM_REF, atype,
4309 : build_fold_addr_expr (unshare_expr (obj)),
4310 : off);
4311 2140 : g = gimple_build_assign (v1, build_zero_cst (atype));
4312 2140 : insert_before (g);
4313 : }
4314 : }
4315 1659 : else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE)
4316 : {
4317 1632 : imm_use_iterator ui;
4318 1632 : use_operand_p use_p;
4319 1632 : FOR_EACH_IMM_USE_FAST (use_p, ui, lhs)
4320 : {
4321 1632 : g = USE_STMT (use_p);
4322 1632 : if (!is_gimple_assign (g)
4323 1632 : || gimple_assign_rhs_code (g) != IMAGPART_EXPR)
4324 0 : continue;
4325 1632 : tree lhs2 = gimple_assign_lhs (g);
4326 1632 : gimple *use_stmt;
4327 1632 : single_imm_use (lhs2, &use_p, &use_stmt);
4328 1632 : lhs2 = gimple_assign_lhs (use_stmt);
4329 1632 : gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
4330 1632 : if (useless_type_conversion_p (TREE_TYPE (lhs2), TREE_TYPE (ovf)))
4331 1611 : g = gimple_build_assign (lhs2, ovf);
4332 : else
4333 21 : g = gimple_build_assign (lhs2, NOP_EXPR, ovf);
4334 1632 : gsi_replace (&gsi, g, true);
4335 1632 : if (gsi_stmt (m_gsi) == use_stmt)
4336 91 : m_gsi = gsi_for_stmt (g);
4337 1632 : break;
4338 1632 : }
4339 : }
4340 27 : else if (ovf != boolean_false_node)
4341 : {
4342 27 : g = gimple_build_cond (NE_EXPR, ovf, boolean_false_node,
4343 : NULL_TREE, NULL_TREE);
4344 27 : edge edge_true, edge_false;
4345 27 : if_then (g, profile_probability::very_unlikely (),
4346 : edge_true, edge_false);
4347 27 : tree zero = build_zero_cst (TREE_TYPE (lhs));
4348 27 : tree fn = ubsan_build_overflow_builtin (code, m_loc,
4349 27 : TREE_TYPE (lhs),
4350 : zero, zero, NULL);
4351 27 : force_gimple_operand_gsi (&m_gsi, fn, true, NULL_TREE,
4352 : true, GSI_SAME_STMT);
4353 27 : m_gsi = gsi_after_labels (edge_true->dest);
4354 : }
4355 : }
4356 4040 : if (var)
4357 : {
4358 733 : tree clobber = build_clobber (TREE_TYPE (var), CLOBBER_STORAGE_END);
4359 733 : g = gimple_build_assign (var, clobber);
4360 733 : gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
4361 : }
4362 4040 : }
4363 :
4364 : /* Helper function for lower_addsub_overflow and lower_mul_overflow.
4365 : Given precisions of result TYPE (PREC), argument 0 precision PREC0,
4366 : argument 1 precision PREC1 and minimum precision for the result
4367 : PREC2, compute *START, *END, *CHECK_ZERO and return OVF. */
4368 :
4369 : static tree
4370 4040 : arith_overflow (tree_code code, tree type, int prec, int prec0, int prec1,
4371 : int prec2, unsigned *start, unsigned *end, bool *check_zero)
4372 : {
4373 4040 : *start = 0;
4374 4040 : *end = 0;
4375 4040 : *check_zero = true;
4376 : /* Ignore this special rule for subtraction, even if both
4377 : prec0 >= 0 and prec1 >= 0, their subtraction can be negative
4378 : in infinite precision. */
4379 4040 : if (code != MINUS_EXPR && prec0 >= 0 && prec1 >= 0)
4380 : {
4381 : /* Result in [0, prec2) is unsigned, if prec > prec2,
4382 : all bits above it will be zero. */
4383 626 : if ((prec - !TYPE_UNSIGNED (type)) >= prec2)
4384 0 : return boolean_false_node;
4385 : else
4386 : {
4387 : /* ovf if any of bits in [start, end) is non-zero. */
4388 626 : *start = prec - !TYPE_UNSIGNED (type);
4389 626 : *end = prec2;
4390 : }
4391 : }
4392 3414 : else if (TYPE_UNSIGNED (type))
4393 : {
4394 : /* If result in [0, prec2) is signed and if prec > prec2,
4395 : all bits above it will be sign bit copies. */
4396 1926 : if (prec >= prec2)
4397 : {
4398 : /* ovf if bit prec - 1 is non-zero. */
4399 184 : *start = prec - 1;
4400 184 : *end = prec;
4401 : }
4402 : else
4403 : {
4404 : /* ovf if any of bits in [start, end) is non-zero. */
4405 1742 : *start = prec;
4406 1742 : *end = prec2;
4407 : }
4408 : }
4409 1488 : else if (prec >= prec2)
4410 0 : return boolean_false_node;
4411 : else
4412 : {
4413 : /* ovf if [start, end) bits aren't all zeros or all ones. */
4414 1488 : *start = prec - 1;
4415 1488 : *end = prec2;
4416 1488 : *check_zero = false;
4417 : }
4418 : return NULL_TREE;
4419 : }
4420 :
4421 : /* Lower a .{ADD,SUB}_OVERFLOW call with at least one large/huge _BitInt
4422 : argument or return type _Complex large/huge _BitInt. */
4423 :
4424 : void
4425 2653 : bitint_large_huge::lower_addsub_overflow (tree obj, gimple *stmt)
4426 : {
4427 2653 : tree arg0 = gimple_call_arg (stmt, 0);
4428 2653 : tree arg1 = gimple_call_arg (stmt, 1);
4429 2653 : tree lhs = gimple_call_lhs (stmt);
4430 2653 : gimple *g;
4431 :
4432 2653 : if (!lhs)
4433 : {
4434 0 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4435 0 : gsi_remove (&gsi, true);
4436 0 : return;
4437 : }
4438 2653 : gimple *final_stmt = gsi_stmt (m_gsi);
4439 2653 : tree type = TREE_TYPE (lhs);
4440 2653 : if (TREE_CODE (type) == COMPLEX_TYPE)
4441 2635 : type = TREE_TYPE (type);
4442 2653 : int prec = TYPE_PRECISION (type);
4443 2653 : int prec0 = range_to_prec (arg0, stmt);
4444 2653 : int prec1 = range_to_prec (arg1, stmt);
4445 : /* If PREC0 >= 0 && PREC1 >= 0 and CODE is not MINUS_EXPR, PREC2 is
4446 : the be minimum unsigned precision of any possible operation's
4447 : result, otherwise it is minimum signed precision.
4448 : Some examples:
4449 : If PREC0 or PREC1 is 8, it means that argument is [0, 0xff],
4450 : if PREC0 or PREC1 is 10, it means that argument is [0, 0x3ff],
4451 : if PREC0 or PREC1 is -8, it means that argument is [-0x80, 0x7f],
4452 : if PREC0 or PREC1 is -10, it means that argument is [-0x200, 0x1ff].
4453 : PREC0 CODE PREC1 RESULT PREC2 SIGNED vs. UNSIGNED
4454 : 8 + 8 [0, 0x1fe] 9 UNSIGNED
4455 : 8 + 10 [0, 0x4fe] 11 UNSIGNED
4456 : -8 + -8 [-0x100, 0xfe] 9 SIGNED
4457 : -8 + -10 [-0x280, 0x27e] 11 SIGNED
4458 : 8 + -8 [-0x80, 0x17e] 10 SIGNED
4459 : 8 + -10 [-0x200, 0x2fe] 11 SIGNED
4460 : 10 + -8 [-0x80, 0x47e] 12 SIGNED
4461 : 8 - 8 [-0xff, 0xff] 9 SIGNED
4462 : 8 - 10 [-0x3ff, 0xff] 11 SIGNED
4463 : 10 - 8 [-0xff, 0x3ff] 11 SIGNED
4464 : -8 - -8 [-0xff, 0xff] 9 SIGNED
4465 : -8 - -10 [-0x27f, 0x27f] 11 SIGNED
4466 : -10 - -8 [-0x27f, 0x27f] 11 SIGNED
4467 : 8 - -8 [-0x7f, 0x17f] 10 SIGNED
4468 : 8 - -10 [-0x1ff, 0x2ff] 11 SIGNED
4469 : 10 - -8 [-0x7f, 0x47f] 12 SIGNED
4470 : -8 - 8 [-0x17f, 0x7f] 10 SIGNED
4471 : -8 - 10 [-0x47f, 0x7f] 12 SIGNED
4472 : -10 - 8 [-0x2ff, 0x1ff] 11 SIGNED */
4473 2653 : int prec2 = MAX (prec0 < 0 ? -prec0 : prec0,
4474 : prec1 < 0 ? -prec1 : prec1);
4475 : /* If operands are either both signed or both unsigned,
4476 : we need just one additional bit. */
4477 3692 : prec2 = (((prec0 < 0) == (prec1 < 0)
4478 : /* If one operand is signed and one unsigned and
4479 : the signed one has larger precision, we need
4480 : just one extra bit, otherwise two. */
4481 702 : || (prec0 < 0 ? (prec2 == -prec0 && prec2 != prec1)
4482 337 : : (prec2 == -prec1 && prec2 != prec0)))
4483 2653 : ? prec2 + 1 : prec2 + 2);
4484 2653 : int prec3 = MAX (prec0 < 0 ? -prec0 : prec0,
4485 : prec1 < 0 ? -prec1 : prec1);
4486 2653 : prec3 = MAX (prec3, prec);
4487 2653 : tree var = NULL_TREE;
4488 2653 : tree orig_obj = obj;
4489 2653 : if (obj == NULL_TREE
4490 1673 : && TREE_CODE (type) == BITINT_TYPE
4491 1572 : && bitint_precision_kind (type) >= bitint_prec_large
4492 1460 : && m_names
4493 4089 : && bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
4494 : {
4495 1381 : int part = var_to_partition (m_map, lhs);
4496 1381 : gcc_assert (m_vars[part] != NULL_TREE);
4497 1381 : obj = m_vars[part];
4498 1381 : if (TREE_TYPE (lhs) == type)
4499 2 : orig_obj = obj;
4500 : }
4501 2653 : if (TREE_CODE (type) != BITINT_TYPE
4502 2653 : || bitint_precision_kind (type) < bitint_prec_large)
4503 : {
4504 213 : unsigned HOST_WIDE_INT nelts = CEIL (prec, limb_prec);
4505 213 : tree atype = build_array_type_nelts (m_limb_type, nelts);
4506 213 : var = create_tmp_var (atype);
4507 : }
4508 :
4509 2653 : enum tree_code code;
4510 2653 : switch (gimple_call_internal_fn (stmt))
4511 : {
4512 : case IFN_ADD_OVERFLOW:
4513 : case IFN_UBSAN_CHECK_ADD:
4514 : code = PLUS_EXPR;
4515 : break;
4516 1359 : case IFN_SUB_OVERFLOW:
4517 1359 : case IFN_UBSAN_CHECK_SUB:
4518 1359 : code = MINUS_EXPR;
4519 1359 : break;
4520 0 : default:
4521 0 : gcc_unreachable ();
4522 : }
4523 2653 : unsigned start, end;
4524 2653 : bool check_zero;
4525 2653 : tree ovf = arith_overflow (code, type, prec, prec0, prec1, prec2,
4526 : &start, &end, &check_zero);
4527 :
4528 2653 : unsigned startlimb, endlimb;
4529 2653 : if (ovf)
4530 : {
4531 : startlimb = ~0U;
4532 : endlimb = ~0U;
4533 : }
4534 : else
4535 : {
4536 2653 : startlimb = start / limb_prec;
4537 2653 : endlimb = (end - 1) / limb_prec;
4538 : }
4539 :
4540 2653 : int prec4 = ovf != NULL_TREE ? prec : prec3;
4541 2653 : bitint_prec_kind kind = bitint_precision_kind (prec4);
4542 2653 : unsigned cnt, rem = 0, fin = 0, nelts;
4543 2653 : tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
4544 5306 : bool last_ovf = (ovf == NULL_TREE
4545 2653 : && CEIL (prec2, limb_prec) > CEIL (prec3, limb_prec));
4546 2653 : if (kind != bitint_prec_huge)
4547 1539 : nelts = cnt = CEIL (prec4, limb_prec) + last_ovf;
4548 : else
4549 : {
4550 1114 : rem = prec4 % (2 * limb_prec);
4551 1114 : fin = (prec4 - rem) / limb_prec;
4552 1114 : cnt = 2 + CEIL (rem, limb_prec) + last_ovf;
4553 1114 : nelts = fin + cnt - 2;
4554 1114 : idx = idx_first = create_loop (bitint_big_endian
4555 1114 : ? size_int (nelts - 1) : size_zero_node,
4556 : &idx_next);
4557 : }
4558 :
4559 2653 : if (kind == bitint_prec_huge)
4560 1114 : m_upwards_2limb = fin;
4561 2653 : m_upwards = true;
4562 :
4563 2653 : tree type0 = TREE_TYPE (arg0);
4564 2653 : tree type1 = TREE_TYPE (arg1);
4565 2653 : int prec5 = prec3;
4566 2653 : if (bitint_precision_kind (prec5) < bitint_prec_large)
4567 10 : prec5 = MAX (TYPE_PRECISION (type0), TYPE_PRECISION (type1));
4568 2653 : if (TYPE_PRECISION (type0) < prec5)
4569 : {
4570 146 : type0 = build_bitint_type (prec5, TYPE_UNSIGNED (type0));
4571 146 : if (TREE_CODE (arg0) == INTEGER_CST)
4572 27 : arg0 = fold_convert (type0, arg0);
4573 : }
4574 2653 : if (TYPE_PRECISION (type1) < prec5)
4575 : {
4576 156 : type1 = build_bitint_type (prec5, TYPE_UNSIGNED (type1));
4577 156 : if (TREE_CODE (arg1) == INTEGER_CST)
4578 76 : arg1 = fold_convert (type1, arg1);
4579 : }
4580 2653 : unsigned int data_cnt = 0;
4581 2653 : tree last_rhs1 = NULL_TREE, last_rhs2 = NULL_TREE;
4582 2653 : tree cmp = build_zero_cst (m_limb_type);
4583 2653 : unsigned prec_limbs = CEIL ((unsigned) prec, limb_prec);
4584 2653 : tree ovf_out = NULL_TREE, cmp_out = NULL_TREE;
4585 11552 : for (unsigned i = 0; i < cnt; i++)
4586 : {
4587 8899 : m_data_cnt = 0;
4588 8899 : tree rhs1, rhs2;
4589 8899 : if (kind != bitint_prec_huge)
4590 5303 : idx = size_int (bitint_big_endian ? nelts - 1 - i : i);
4591 3596 : else if (i >= 2)
4592 1368 : idx = size_int (bitint_big_endian ? nelts + 1 - fin - i : fin + i - 2);
4593 8899 : if (!last_ovf || i < cnt - 1)
4594 : {
4595 7957 : tree idx0 = idx, idx1 = idx;
4596 7957 : if (bitint_big_endian
4597 7957 : && CEIL ((unsigned) TYPE_PRECISION (type0), limb_prec) != nelts)
4598 : {
4599 0 : HOST_WIDE_INT diff
4600 0 : = ((HOST_WIDE_INT) CEIL (TYPE_PRECISION (type0), limb_prec)
4601 0 : - (HOST_WIDE_INT) nelts);
4602 0 : if (tree_fits_uhwi_p (idx))
4603 0 : idx0 = size_int (tree_to_uhwi (idx) + diff);
4604 : else
4605 : {
4606 0 : idx0 = make_ssa_name (sizetype);
4607 0 : g = gimple_build_assign (idx0, PLUS_EXPR, idx,
4608 0 : size_int (diff));
4609 0 : insert_before (g);
4610 : }
4611 : }
4612 7957 : if (type0 != TREE_TYPE (arg0))
4613 334 : rhs1 = handle_cast (type0, arg0, idx0);
4614 : else
4615 7623 : rhs1 = handle_operand (arg0, idx0);
4616 7957 : if (bitint_big_endian
4617 7957 : && CEIL ((unsigned) TYPE_PRECISION (type1), limb_prec) != nelts)
4618 : {
4619 0 : HOST_WIDE_INT diff
4620 0 : = ((HOST_WIDE_INT) CEIL (TYPE_PRECISION (type1), limb_prec)
4621 0 : - (HOST_WIDE_INT) nelts);
4622 0 : if (tree_fits_uhwi_p (idx))
4623 0 : idx1 = size_int (tree_to_uhwi (idx) + diff);
4624 : else
4625 : {
4626 0 : idx1 = make_ssa_name (sizetype);
4627 0 : g = gimple_build_assign (idx1, PLUS_EXPR, idx,
4628 0 : size_int (diff));
4629 0 : insert_before (g);
4630 : }
4631 : }
4632 7957 : if (type1 != TREE_TYPE (arg1))
4633 250 : rhs2 = handle_cast (type1, arg1, idx1);
4634 : else
4635 7707 : rhs2 = handle_operand (arg1, idx1);
4636 7957 : if (i == 0)
4637 2653 : data_cnt = m_data_cnt;
4638 7957 : if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4639 1907 : rhs1 = add_cast (m_limb_type, rhs1);
4640 7957 : if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs2)))
4641 1907 : rhs2 = add_cast (m_limb_type, rhs2);
4642 : last_rhs1 = rhs1;
4643 : last_rhs2 = rhs2;
4644 : }
4645 : else
4646 : {
4647 942 : m_data_cnt = data_cnt;
4648 942 : if (TYPE_UNSIGNED (type0) || prec0 >= 0)
4649 421 : rhs1 = build_zero_cst (m_limb_type);
4650 : else
4651 : {
4652 521 : rhs1 = add_cast (signed_type_for (m_limb_type), last_rhs1);
4653 521 : if (TREE_CODE (rhs1) == INTEGER_CST)
4654 52 : rhs1 = build_int_cst (m_limb_type,
4655 74 : tree_int_cst_sgn (rhs1) < 0 ? -1 : 0);
4656 : else
4657 : {
4658 938 : tree lpm1 = build_int_cst (unsigned_type_node,
4659 469 : limb_prec - 1);
4660 469 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
4661 : RSHIFT_EXPR, rhs1, lpm1);
4662 469 : insert_before (g);
4663 469 : rhs1 = add_cast (m_limb_type, gimple_assign_lhs (g));
4664 : }
4665 : }
4666 942 : if (TYPE_UNSIGNED (type1) || prec1 >= 0)
4667 543 : rhs2 = build_zero_cst (m_limb_type);
4668 : else
4669 : {
4670 399 : rhs2 = add_cast (signed_type_for (m_limb_type), last_rhs2);
4671 399 : if (TREE_CODE (rhs2) == INTEGER_CST)
4672 114 : rhs2 = build_int_cst (m_limb_type,
4673 153 : tree_int_cst_sgn (rhs2) < 0 ? -1 : 0);
4674 : else
4675 : {
4676 570 : tree lpm1 = build_int_cst (unsigned_type_node,
4677 285 : limb_prec - 1);
4678 285 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs2)),
4679 : RSHIFT_EXPR, rhs2, lpm1);
4680 285 : insert_before (g);
4681 285 : rhs2 = add_cast (m_limb_type, gimple_assign_lhs (g));
4682 : }
4683 : }
4684 : }
4685 8899 : tree rhs = handle_plus_minus (code, rhs1, rhs2, idx);
4686 8899 : if (ovf != boolean_false_node)
4687 : {
4688 8899 : if (tree_fits_uhwi_p (idx))
4689 : {
4690 6671 : unsigned limb = tree_to_uhwi (idx);
4691 6671 : if (bitint_big_endian)
4692 0 : limb = nelts - 1 - limb;
4693 6671 : if (limb >= startlimb && limb <= endlimb)
4694 : {
4695 3274 : tree l = arith_overflow_extract_bits (start, end, rhs,
4696 : limb, check_zero);
4697 3274 : tree this_ovf = make_ssa_name (boolean_type_node);
4698 3274 : if (ovf == NULL_TREE && !check_zero)
4699 : {
4700 879 : cmp = l;
4701 879 : g = gimple_build_assign (make_ssa_name (m_limb_type),
4702 : PLUS_EXPR, l,
4703 : build_int_cst (m_limb_type, 1));
4704 879 : insert_before (g);
4705 879 : g = gimple_build_assign (this_ovf, GT_EXPR,
4706 : gimple_assign_lhs (g),
4707 : build_int_cst (m_limb_type, 1));
4708 : }
4709 : else
4710 2395 : g = gimple_build_assign (this_ovf, NE_EXPR, l, cmp);
4711 3274 : insert_before (g);
4712 3274 : if (ovf == NULL_TREE)
4713 : ovf = this_ovf;
4714 : else
4715 : {
4716 1012 : tree b = make_ssa_name (boolean_type_node);
4717 1012 : g = gimple_build_assign (b, BIT_IOR_EXPR, ovf, this_ovf);
4718 1012 : insert_before (g);
4719 1012 : ovf = b;
4720 : }
4721 : }
4722 : }
4723 2228 : else if (startlimb < fin)
4724 : {
4725 782 : if (m_first && startlimb + 2 < fin)
4726 : {
4727 288 : tree data_out;
4728 288 : ovf = prepare_data_in_out (boolean_false_node, idx, &data_out);
4729 288 : ovf_out = m_data.pop ();
4730 288 : m_data.pop ();
4731 288 : if (!check_zero)
4732 : {
4733 149 : cmp = prepare_data_in_out (cmp, idx, &data_out);
4734 149 : cmp_out = m_data.pop ();
4735 149 : m_data.pop ();
4736 : }
4737 : }
4738 782 : if (i != 0 || startlimb != fin - 1)
4739 : {
4740 767 : tree_code cmp_code;
4741 767 : bool single_comparison
4742 767 : = (startlimb + 2 >= fin || (startlimb & 1) != (i & 1));
4743 : if (!single_comparison)
4744 : cmp_code = GE_EXPR;
4745 479 : else if ((startlimb & 1) == (i & 1))
4746 : cmp_code = EQ_EXPR;
4747 : else
4748 376 : cmp_code = GT_EXPR;
4749 767 : if (bitint_big_endian)
4750 0 : g = gimple_build_cond (swap_tree_comparison (cmp_code),
4751 0 : idx, size_int (nelts - 1
4752 : - startlimb),
4753 : NULL_TREE, NULL_TREE);
4754 : else
4755 767 : g = gimple_build_cond (cmp_code, idx, size_int (startlimb),
4756 : NULL_TREE, NULL_TREE);
4757 767 : edge edge_true_true, edge_true_false, edge_false;
4758 767 : gimple *g2 = NULL;
4759 767 : if (!single_comparison)
4760 288 : g2 = gimple_build_cond (NE_EXPR, idx,
4761 288 : size_int (bitint_big_endian
4762 : ? nelts - 1 - startlimb
4763 : : startlimb),
4764 : NULL_TREE, NULL_TREE);
4765 767 : if_then_if_then_else (g, g2, profile_probability::likely (),
4766 : profile_probability::likely (),
4767 : edge_true_true, edge_true_false,
4768 : edge_false);
4769 767 : unsigned tidx = startlimb + (cmp_code == GT_EXPR);
4770 767 : tree l = arith_overflow_extract_bits (start, end, rhs, tidx,
4771 : check_zero);
4772 767 : tree this_ovf = make_ssa_name (boolean_type_node);
4773 767 : if (cmp_code != GT_EXPR && !check_zero)
4774 : {
4775 153 : g = gimple_build_assign (make_ssa_name (m_limb_type),
4776 : PLUS_EXPR, l,
4777 : build_int_cst (m_limb_type, 1));
4778 153 : insert_before (g);
4779 153 : g = gimple_build_assign (this_ovf, GT_EXPR,
4780 : gimple_assign_lhs (g),
4781 : build_int_cst (m_limb_type, 1));
4782 : }
4783 : else
4784 614 : g = gimple_build_assign (this_ovf, NE_EXPR, l, cmp);
4785 767 : insert_before (g);
4786 767 : if (cmp_code == GT_EXPR)
4787 : {
4788 376 : tree t = make_ssa_name (boolean_type_node);
4789 376 : g = gimple_build_assign (t, BIT_IOR_EXPR, ovf, this_ovf);
4790 376 : insert_before (g);
4791 376 : this_ovf = t;
4792 : }
4793 767 : tree this_ovf2 = NULL_TREE;
4794 767 : if (!single_comparison)
4795 : {
4796 288 : m_gsi = gsi_after_labels (edge_true_true->src);
4797 288 : tree t = make_ssa_name (boolean_type_node);
4798 288 : g = gimple_build_assign (t, NE_EXPR, rhs, cmp);
4799 288 : insert_before (g);
4800 288 : this_ovf2 = make_ssa_name (boolean_type_node);
4801 288 : g = gimple_build_assign (this_ovf2, BIT_IOR_EXPR,
4802 : ovf, t);
4803 288 : insert_before (g);
4804 : }
4805 767 : m_gsi = gsi_after_labels (edge_true_false->dest);
4806 767 : tree t;
4807 767 : if (i == 1 && ovf_out)
4808 : t = ovf_out;
4809 : else
4810 479 : t = make_ssa_name (boolean_type_node);
4811 767 : gphi *phi = create_phi_node (t, edge_true_false->dest);
4812 767 : add_phi_arg (phi, this_ovf, edge_true_false,
4813 : UNKNOWN_LOCATION);
4814 767 : add_phi_arg (phi, ovf ? ovf
4815 : : boolean_false_node, edge_false,
4816 : UNKNOWN_LOCATION);
4817 767 : if (edge_true_true)
4818 288 : add_phi_arg (phi, this_ovf2, edge_true_true,
4819 : UNKNOWN_LOCATION);
4820 767 : ovf = t;
4821 767 : if (!check_zero && cmp_code != GT_EXPR)
4822 : {
4823 153 : t = cmp_out ? cmp_out : make_ssa_name (m_limb_type);
4824 153 : phi = create_phi_node (t, edge_true_false->dest);
4825 153 : add_phi_arg (phi, l, edge_true_false, UNKNOWN_LOCATION);
4826 153 : add_phi_arg (phi, cmp, edge_false, UNKNOWN_LOCATION);
4827 153 : if (edge_true_true)
4828 149 : add_phi_arg (phi, cmp, edge_true_true,
4829 : UNKNOWN_LOCATION);
4830 : cmp = t;
4831 : }
4832 : }
4833 : }
4834 : }
4835 :
4836 8899 : if (var || obj)
4837 : {
4838 8671 : if (tree_fits_uhwi_p (idx)
4839 6561 : && (bitint_big_endian
4840 6561 : ? nelts - 1 - tree_to_uhwi (idx)
4841 6561 : : tree_to_uhwi (idx)) >= prec_limbs)
4842 : ;
4843 7335 : else if (!tree_fits_uhwi_p (idx)
4844 2110 : && (unsigned) prec < (fin - (i == 0)) * limb_prec)
4845 : {
4846 1314 : bool single_comparison
4847 657 : = (((unsigned) prec % limb_prec) == 0
4848 499 : || prec_limbs + 1 >= fin
4849 1087 : || (prec_limbs & 1) == (i & 1));
4850 657 : if (bitint_big_endian)
4851 0 : g = gimple_build_cond (GE_EXPR, idx,
4852 0 : size_int (nelts - prec_limbs),
4853 : NULL_TREE, NULL_TREE);
4854 : else
4855 657 : g = gimple_build_cond (LE_EXPR, idx, size_int (prec_limbs - 1),
4856 : NULL_TREE, NULL_TREE);
4857 657 : gimple *g2 = NULL;
4858 657 : if (!single_comparison)
4859 215 : g2 = gimple_build_cond (EQ_EXPR, idx,
4860 215 : size_int (bitint_big_endian
4861 : ? nelts - prec_limbs
4862 : : prec_limbs - 1),
4863 : NULL_TREE, NULL_TREE);
4864 657 : edge edge_true_true, edge_true_false, edge_false;
4865 657 : if_then_if_then_else (g, g2, profile_probability::likely (),
4866 : profile_probability::unlikely (),
4867 : edge_true_true, edge_true_false,
4868 : edge_false);
4869 657 : tree idxl = idx;
4870 657 : if (bitint_big_endian && prec_limbs != nelts)
4871 : {
4872 0 : HOST_WIDE_INT diff = ((HOST_WIDE_INT) prec_limbs
4873 0 : - (HOST_WIDE_INT) nelts);
4874 0 : if (tree_fits_uhwi_p (idx))
4875 0 : idxl = size_int (tree_to_uhwi (idx) + diff);
4876 : else
4877 : {
4878 0 : idxl = make_ssa_name (sizetype);
4879 0 : g = gimple_build_assign (idxl, PLUS_EXPR, idx,
4880 0 : size_int (diff));
4881 0 : insert_before (g);
4882 : }
4883 : }
4884 946 : tree l = limb_access (type, var ? var : obj, idxl, true);
4885 657 : g = gimple_build_assign (l, rhs);
4886 657 : insert_before (g);
4887 657 : if (!single_comparison)
4888 : {
4889 215 : m_gsi = gsi_after_labels (edge_true_true->src);
4890 215 : tree plm1idx = size_int (bitint_big_endian
4891 : ? 0 : prec_limbs - 1);
4892 215 : tree plm1type = limb_access_type (type, plm1idx);
4893 215 : l = limb_access (type, var ? var : obj, plm1idx, true);
4894 215 : if (!useless_type_conversion_p (plm1type, TREE_TYPE (rhs)))
4895 215 : rhs = add_cast (plm1type, rhs);
4896 215 : if (!useless_type_conversion_p (TREE_TYPE (l),
4897 215 : TREE_TYPE (rhs)))
4898 215 : rhs = add_cast (TREE_TYPE (l), rhs);
4899 215 : g = gimple_build_assign (l, rhs);
4900 215 : insert_before (g);
4901 : }
4902 657 : m_gsi = gsi_after_labels (edge_true_false->dest);
4903 657 : }
4904 : else
4905 : {
4906 6678 : tree idxl = idx;
4907 6678 : if (bitint_big_endian && prec_limbs != nelts)
4908 : {
4909 0 : HOST_WIDE_INT diff = ((HOST_WIDE_INT) prec_limbs
4910 0 : - (HOST_WIDE_INT) nelts);
4911 0 : if (tree_fits_uhwi_p (idx))
4912 0 : idxl = size_int (tree_to_uhwi (idx) + diff);
4913 : else
4914 : {
4915 0 : idxl = make_ssa_name (sizetype);
4916 0 : g = gimple_build_assign (idxl, PLUS_EXPR, idx,
4917 0 : size_int (diff));
4918 0 : insert_before (g);
4919 : }
4920 : }
4921 13327 : tree l = limb_access (type, var ? var : obj, idxl, true);
4922 6678 : if (!useless_type_conversion_p (TREE_TYPE (l), TREE_TYPE (rhs)))
4923 0 : rhs = add_cast (TREE_TYPE (l), rhs);
4924 6678 : g = gimple_build_assign (l, rhs);
4925 6678 : insert_before (g);
4926 : }
4927 : }
4928 8899 : m_first = false;
4929 8899 : if (kind == bitint_prec_huge && i <= 1)
4930 : {
4931 2228 : if (i == 0)
4932 : {
4933 1114 : idx = make_ssa_name (sizetype);
4934 1114 : g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
4935 : bitint_big_endian
4936 0 : ? size_int (-1) : size_one_node);
4937 1114 : insert_before (g);
4938 : }
4939 : else
4940 : {
4941 1114 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
4942 2228 : size_int (bitint_big_endian ? -2 : 2));
4943 1114 : insert_before (g);
4944 1114 : if (bitint_big_endian)
4945 0 : g = gimple_build_cond (NE_EXPR, idx_first,
4946 0 : size_int (nelts + 1 - fin),
4947 : NULL_TREE, NULL_TREE);
4948 : else
4949 1114 : g = gimple_build_cond (NE_EXPR, idx_next, size_int (fin),
4950 : NULL_TREE, NULL_TREE);
4951 1114 : insert_before (g);
4952 1114 : m_gsi = gsi_for_stmt (final_stmt);
4953 1114 : m_bb = NULL;
4954 : }
4955 : }
4956 : }
4957 :
4958 2653 : finish_arith_overflow (var, obj, type, ovf, lhs, orig_obj, stmt,
4959 : prec_limbs, code);
4960 : }
4961 :
4962 : /* Lower a .MUL_OVERFLOW call with at least one large/huge _BitInt
4963 : argument or return type _Complex large/huge _BitInt. */
4964 :
4965 : void
4966 1387 : bitint_large_huge::lower_mul_overflow (tree obj, gimple *stmt)
4967 : {
4968 1387 : tree arg0 = gimple_call_arg (stmt, 0);
4969 1387 : tree arg1 = gimple_call_arg (stmt, 1);
4970 1387 : tree lhs = gimple_call_lhs (stmt);
4971 1387 : if (!lhs)
4972 : {
4973 0 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4974 0 : gsi_remove (&gsi, true);
4975 0 : return;
4976 : }
4977 1387 : gimple *final_stmt = gsi_stmt (m_gsi);
4978 1387 : tree type = TREE_TYPE (lhs);
4979 1387 : if (TREE_CODE (type) == COMPLEX_TYPE)
4980 1378 : type = TREE_TYPE (type);
4981 1387 : int prec = TYPE_PRECISION (type), prec0, prec1;
4982 1387 : arg0 = handle_operand_addr (arg0, stmt, NULL, &prec0);
4983 1387 : arg1 = handle_operand_addr (arg1, stmt, NULL, &prec1);
4984 1387 : int prec2 = ((prec0 < 0 ? -prec0 : prec0)
4985 1387 : + (prec1 < 0 ? -prec1 : prec1));
4986 1387 : if (prec0 == 1 || prec1 == 1)
4987 18 : --prec2;
4988 1387 : tree var = NULL_TREE;
4989 1387 : tree orig_obj = obj;
4990 1387 : bool force_var = false;
4991 1387 : if (obj == NULL_TREE
4992 802 : && TREE_CODE (type) == BITINT_TYPE
4993 796 : && bitint_precision_kind (type) >= bitint_prec_large
4994 774 : && m_names
4995 2149 : && bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
4996 : {
4997 762 : int part = var_to_partition (m_map, lhs);
4998 762 : gcc_assert (m_vars[part] != NULL_TREE);
4999 762 : obj = m_vars[part];
5000 762 : if (TREE_TYPE (lhs) == type)
5001 1 : orig_obj = obj;
5002 : }
5003 625 : else if (obj != NULL_TREE && DECL_P (obj))
5004 : {
5005 1731 : for (int i = 0; i < 2; ++i)
5006 : {
5007 1154 : tree arg = i ? arg1 : arg0;
5008 1154 : if (TREE_CODE (arg) == ADDR_EXPR)
5009 1154 : arg = TREE_OPERAND (arg, 0);
5010 1154 : if (get_base_address (arg) == obj)
5011 : {
5012 : force_var = true;
5013 : break;
5014 : }
5015 : }
5016 : }
5017 1387 : if (obj == NULL_TREE
5018 1387 : || force_var
5019 1347 : || TREE_CODE (type) != BITINT_TYPE
5020 1347 : || bitint_precision_kind (type) < bitint_prec_large
5021 3495 : || prec2 > (CEIL (prec, limb_prec) * limb_prec * (orig_obj ? 1 : 2)))
5022 : {
5023 520 : unsigned HOST_WIDE_INT nelts = CEIL (MAX (prec, prec2), limb_prec);
5024 520 : tree atype = build_array_type_nelts (m_limb_type, nelts);
5025 520 : var = create_tmp_var (atype);
5026 : }
5027 1387 : tree addr = build_fold_addr_expr (var ? var : obj);
5028 1387 : addr = force_gimple_operand_gsi (&m_gsi, addr, true,
5029 : NULL_TREE, true, GSI_SAME_STMT);
5030 1387 : tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
5031 1387 : gimple *g
5032 2774 : = gimple_build_call_internal (IFN_MULBITINT, 6,
5033 : addr, build_int_cst (sitype,
5034 1449 : MAX (prec2, prec)),
5035 1387 : arg0, build_int_cst (sitype, prec0),
5036 1387 : arg1, build_int_cst (sitype, prec1));
5037 1387 : insert_before (g);
5038 :
5039 1387 : unsigned start, end;
5040 1387 : bool check_zero;
5041 1387 : tree ovf = arith_overflow (MULT_EXPR, type, prec, prec0, prec1, prec2,
5042 : &start, &end, &check_zero);
5043 1387 : if (ovf == NULL_TREE)
5044 : {
5045 1387 : unsigned startlimb = start / limb_prec;
5046 1387 : unsigned endlimb = (end - 1) / limb_prec;
5047 1387 : unsigned nelts = CEIL (MAX (prec, prec2), limb_prec);
5048 1387 : unsigned cnt;
5049 1387 : bool use_loop = false;
5050 1387 : if (startlimb == endlimb)
5051 : cnt = 1;
5052 1114 : else if (startlimb + 1 == endlimb)
5053 : cnt = 2;
5054 943 : else if ((end % limb_prec) == 0)
5055 : {
5056 : cnt = 2;
5057 : use_loop = true;
5058 : }
5059 : else
5060 : {
5061 702 : cnt = 3;
5062 702 : use_loop = startlimb + 2 < endlimb;
5063 : }
5064 702 : if (cnt == 1)
5065 : {
5066 480 : tree l = limb_access (NULL_TREE, var ? var : obj,
5067 273 : size_int (bitint_big_endian
5068 : ? nelts - 1 - startlimb
5069 : : startlimb), true);
5070 273 : g = gimple_build_assign (make_ssa_name (m_limb_type), l);
5071 273 : insert_before (g);
5072 273 : l = arith_overflow_extract_bits (start, end, gimple_assign_lhs (g),
5073 : startlimb, check_zero);
5074 273 : ovf = make_ssa_name (boolean_type_node);
5075 273 : if (check_zero)
5076 233 : g = gimple_build_assign (ovf, NE_EXPR, l,
5077 : build_zero_cst (m_limb_type));
5078 : else
5079 : {
5080 40 : g = gimple_build_assign (make_ssa_name (m_limb_type),
5081 : PLUS_EXPR, l,
5082 : build_int_cst (m_limb_type, 1));
5083 40 : insert_before (g);
5084 40 : g = gimple_build_assign (ovf, GT_EXPR, gimple_assign_lhs (g),
5085 : build_int_cst (m_limb_type, 1));
5086 : }
5087 273 : insert_before (g);
5088 : }
5089 : else
5090 : {
5091 1114 : basic_block edge_bb = NULL;
5092 1114 : gimple_stmt_iterator gsi = m_gsi;
5093 1114 : gsi_prev (&gsi);
5094 1114 : edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
5095 1114 : edge_bb = e->src;
5096 1114 : m_gsi = gsi_end_bb (edge_bb);
5097 :
5098 1114 : tree cmp = build_zero_cst (m_limb_type);
5099 4044 : for (unsigned i = 0; i < cnt; i++)
5100 : {
5101 2930 : tree idx, idx_next = NULL_TREE;
5102 2930 : if (i == 0)
5103 1114 : idx = size_int (bitint_big_endian
5104 : ? nelts - 1 - startlimb : startlimb);
5105 1816 : else if (i == 2)
5106 702 : idx = size_int (bitint_big_endian
5107 : ? nelts - 1 - endlimb : endlimb);
5108 1114 : else if (use_loop)
5109 595 : idx = create_loop (size_int (bitint_big_endian
5110 : ? nelts - startlimb - 2
5111 : : startlimb + 1), &idx_next);
5112 : else
5113 519 : idx = size_int (bitint_big_endian
5114 : ? nelts - startlimb - 2 : startlimb + 1);
5115 4673 : tree l = limb_access (NULL_TREE, var ? var : obj, idx, true);
5116 2930 : g = gimple_build_assign (make_ssa_name (m_limb_type), l);
5117 2930 : insert_before (g);
5118 2930 : l = gimple_assign_lhs (g);
5119 2930 : if (i == 0 || i == 2)
5120 2518 : l = arith_overflow_extract_bits (start, end, l,
5121 : i == 0 ? startlimb : endlimb,
5122 : check_zero);
5123 1816 : if (i == 0 && !check_zero)
5124 : {
5125 416 : cmp = l;
5126 416 : g = gimple_build_assign (make_ssa_name (m_limb_type),
5127 : PLUS_EXPR, l,
5128 : build_int_cst (m_limb_type, 1));
5129 416 : insert_before (g);
5130 416 : g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
5131 : build_int_cst (m_limb_type, 1),
5132 : NULL_TREE, NULL_TREE);
5133 : }
5134 : else
5135 2514 : g = gimple_build_cond (NE_EXPR, l, cmp, NULL_TREE, NULL_TREE);
5136 2930 : insert_before (g);
5137 2930 : edge e1 = split_block (gsi_bb (m_gsi), g);
5138 2930 : e1->flags = EDGE_FALSE_VALUE;
5139 2930 : edge e2 = make_edge (e1->src, gimple_bb (final_stmt),
5140 : EDGE_TRUE_VALUE);
5141 2930 : e1->probability = profile_probability::likely ();
5142 2930 : e2->probability = e1->probability.invert ();
5143 2930 : if (i == 0)
5144 1114 : set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
5145 2930 : m_gsi = gsi_after_labels (e1->dest);
5146 2930 : if (i == 1 && use_loop)
5147 : {
5148 595 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
5149 : bitint_big_endian
5150 0 : ? size_int (-1) : size_one_node);
5151 595 : insert_before (g);
5152 595 : if (bitint_big_endian)
5153 0 : g = gimple_build_cond (NE_EXPR, idx,
5154 0 : size_int (nelts - endlimb
5155 : - (cnt == 2)),
5156 : NULL_TREE, NULL_TREE);
5157 : else
5158 595 : g = gimple_build_cond (NE_EXPR, idx_next,
5159 595 : size_int (endlimb + (cnt == 2)),
5160 : NULL_TREE, NULL_TREE);
5161 595 : insert_before (g);
5162 595 : edge true_edge, false_edge;
5163 595 : extract_true_false_edges_from_block (gsi_bb (m_gsi),
5164 : &true_edge,
5165 : &false_edge);
5166 595 : m_gsi = gsi_after_labels (false_edge->dest);
5167 595 : m_bb = NULL;
5168 : }
5169 : }
5170 :
5171 1114 : ovf = make_ssa_name (boolean_type_node);
5172 1114 : basic_block bb = gimple_bb (final_stmt);
5173 1114 : gphi *phi = create_phi_node (ovf, bb);
5174 1114 : edge e1 = find_edge (gsi_bb (m_gsi), bb);
5175 1114 : edge_iterator ei;
5176 5158 : FOR_EACH_EDGE (e, ei, bb->preds)
5177 : {
5178 4044 : tree val = e == e1 ? boolean_false_node : boolean_true_node;
5179 4044 : add_phi_arg (phi, val, e, UNKNOWN_LOCATION);
5180 : }
5181 1114 : m_gsi = gsi_for_stmt (final_stmt);
5182 : }
5183 : }
5184 :
5185 1387 : finish_arith_overflow (var, obj, type, ovf, lhs, orig_obj, stmt,
5186 1387 : CEIL (MAX (prec, prec2), limb_prec), MULT_EXPR);
5187 : }
5188 :
5189 : /* Lower REALPART_EXPR or IMAGPART_EXPR stmt extracting part of result from
5190 : .{ADD,SUB,MUL}_OVERFLOW call. */
5191 :
5192 : void
5193 5720 : bitint_large_huge::lower_cplxpart_stmt (tree obj, gimple *stmt)
5194 : {
5195 5720 : tree rhs1 = gimple_assign_rhs1 (stmt);
5196 5720 : rhs1 = TREE_OPERAND (rhs1, 0);
5197 5720 : if (obj == NULL_TREE)
5198 : {
5199 5717 : int part = var_to_partition (m_map, gimple_assign_lhs (stmt));
5200 5717 : gcc_assert (m_vars[part] != NULL_TREE);
5201 : obj = m_vars[part];
5202 : }
5203 5720 : if (TREE_CODE (rhs1) == SSA_NAME
5204 5720 : && (m_names == NULL
5205 5719 : || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
5206 : {
5207 1541 : lower_call (obj, SSA_NAME_DEF_STMT (rhs1));
5208 1541 : return;
5209 : }
5210 4179 : int part = var_to_partition (m_map, rhs1);
5211 4179 : gcc_assert (m_vars[part] != NULL_TREE);
5212 4179 : tree var = m_vars[part];
5213 4179 : unsigned HOST_WIDE_INT nelts
5214 4179 : = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))) / limb_prec;
5215 4179 : tree atype = build_array_type_nelts (m_limb_type, nelts);
5216 4179 : if (!useless_type_conversion_p (atype, TREE_TYPE (obj)))
5217 0 : obj = build1 (VIEW_CONVERT_EXPR, atype, obj);
5218 4179 : tree off = build_int_cst (build_pointer_type (TREE_TYPE (var)),
5219 4179 : gimple_assign_rhs_code (stmt) == REALPART_EXPR
5220 4179 : ? 0 : nelts * m_limb_size);
5221 4179 : tree v2 = build2 (MEM_REF, atype, build_fold_addr_expr (var), off);
5222 4179 : gimple *g = gimple_build_assign (obj, v2);
5223 4179 : insert_before (g);
5224 : }
5225 :
5226 : /* Lower COMPLEX_EXPR stmt. */
5227 :
5228 : void
5229 18 : bitint_large_huge::lower_complexexpr_stmt (gimple *stmt)
5230 : {
5231 18 : tree lhs = gimple_assign_lhs (stmt);
5232 18 : tree rhs1 = gimple_assign_rhs1 (stmt);
5233 18 : tree rhs2 = gimple_assign_rhs2 (stmt);
5234 18 : int part = var_to_partition (m_map, lhs);
5235 18 : gcc_assert (m_vars[part] != NULL_TREE);
5236 18 : lhs = m_vars[part];
5237 18 : unsigned HOST_WIDE_INT nelts
5238 18 : = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs1))) / limb_prec;
5239 18 : tree atype = build_array_type_nelts (m_limb_type, nelts);
5240 18 : tree zero = build_zero_cst (build_pointer_type (TREE_TYPE (lhs)));
5241 18 : tree v1 = build2 (MEM_REF, atype, build_fold_addr_expr (lhs), zero);
5242 18 : tree v2;
5243 18 : if (TREE_CODE (rhs1) == SSA_NAME)
5244 : {
5245 18 : part = var_to_partition (m_map, rhs1);
5246 18 : gcc_assert (m_vars[part] != NULL_TREE);
5247 : v2 = m_vars[part];
5248 : }
5249 0 : else if (integer_zerop (rhs1))
5250 0 : v2 = build_zero_cst (atype);
5251 : else
5252 0 : v2 = tree_output_constant_def (rhs1);
5253 18 : if (!useless_type_conversion_p (atype, TREE_TYPE (v2)))
5254 18 : v2 = build1 (VIEW_CONVERT_EXPR, atype, v2);
5255 18 : gimple *g = gimple_build_assign (v1, v2);
5256 18 : insert_before (g);
5257 18 : tree off = fold_convert (build_pointer_type (TREE_TYPE (lhs)),
5258 : TYPE_SIZE_UNIT (atype));
5259 18 : v1 = build2 (MEM_REF, atype, build_fold_addr_expr (lhs), off);
5260 18 : if (TREE_CODE (rhs2) == SSA_NAME)
5261 : {
5262 0 : part = var_to_partition (m_map, rhs2);
5263 0 : gcc_assert (m_vars[part] != NULL_TREE);
5264 : v2 = m_vars[part];
5265 : }
5266 18 : else if (integer_zerop (rhs2))
5267 18 : v2 = build_zero_cst (atype);
5268 : else
5269 0 : v2 = tree_output_constant_def (rhs2);
5270 18 : if (!useless_type_conversion_p (atype, TREE_TYPE (v2)))
5271 0 : v2 = build1 (VIEW_CONVERT_EXPR, atype, v2);
5272 18 : g = gimple_build_assign (v1, v2);
5273 18 : insert_before (g);
5274 18 : }
5275 :
5276 : /* Lower a .{CLZ,CTZ,CLRSB,FFS,PARITY,POPCOUNT} call with one large/huge _BitInt
5277 : argument. */
5278 :
5279 : void
5280 91 : bitint_large_huge::lower_bit_query (gimple *stmt)
5281 : {
5282 91 : tree arg0 = gimple_call_arg (stmt, 0);
5283 91 : tree arg1 = (gimple_call_num_args (stmt) == 2
5284 91 : ? gimple_call_arg (stmt, 1) : NULL_TREE);
5285 91 : tree lhs = gimple_call_lhs (stmt);
5286 91 : gimple *g;
5287 :
5288 91 : if (!lhs)
5289 : {
5290 0 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5291 0 : gsi_remove (&gsi, true);
5292 0 : return;
5293 : }
5294 91 : tree type = TREE_TYPE (arg0);
5295 91 : gcc_assert (TREE_CODE (type) == BITINT_TYPE);
5296 91 : bitint_prec_kind kind = bitint_precision_kind (type);
5297 91 : gcc_assert (kind >= bitint_prec_large);
5298 91 : enum internal_fn ifn = gimple_call_internal_fn (stmt);
5299 91 : enum built_in_function fcode = END_BUILTINS;
5300 91 : gcc_assert (TYPE_PRECISION (unsigned_type_node) == limb_prec
5301 : || TYPE_PRECISION (long_unsigned_type_node) == limb_prec
5302 : || TYPE_PRECISION (long_long_unsigned_type_node) == limb_prec);
5303 91 : switch (ifn)
5304 : {
5305 25 : case IFN_CLZ:
5306 25 : if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
5307 : fcode = BUILT_IN_CLZ;
5308 25 : else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
5309 : fcode = BUILT_IN_CLZL;
5310 : else
5311 0 : fcode = BUILT_IN_CLZLL;
5312 : break;
5313 10 : case IFN_FFS:
5314 : /* .FFS (X) is .CTZ (X, -1) + 1, though under the hood
5315 : we don't add the addend at the end. */
5316 10 : arg1 = integer_zero_node;
5317 : /* FALLTHRU */
5318 37 : case IFN_CTZ:
5319 37 : if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
5320 : fcode = BUILT_IN_CTZ;
5321 37 : else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
5322 : fcode = BUILT_IN_CTZL;
5323 : else
5324 0 : fcode = BUILT_IN_CTZLL;
5325 37 : m_upwards = true;
5326 37 : break;
5327 8 : case IFN_CLRSB:
5328 8 : if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
5329 : fcode = BUILT_IN_CLRSB;
5330 8 : else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
5331 : fcode = BUILT_IN_CLRSBL;
5332 : else
5333 0 : fcode = BUILT_IN_CLRSBLL;
5334 : break;
5335 11 : case IFN_PARITY:
5336 11 : if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
5337 : fcode = BUILT_IN_PARITY;
5338 11 : else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
5339 : fcode = BUILT_IN_PARITYL;
5340 : else
5341 0 : fcode = BUILT_IN_PARITYLL;
5342 11 : m_upwards = true;
5343 11 : break;
5344 10 : case IFN_POPCOUNT:
5345 10 : if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
5346 : fcode = BUILT_IN_POPCOUNT;
5347 10 : else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
5348 : fcode = BUILT_IN_POPCOUNTL;
5349 : else
5350 0 : fcode = BUILT_IN_POPCOUNTLL;
5351 10 : m_upwards = true;
5352 10 : break;
5353 0 : default:
5354 0 : gcc_unreachable ();
5355 : }
5356 91 : tree fndecl = builtin_decl_explicit (fcode), res = NULL_TREE;
5357 91 : unsigned cnt = 0, rem = 0, end = 0, prec = TYPE_PRECISION (type);
5358 91 : unsigned nelts = CEIL (prec, limb_prec);
5359 91 : struct bq_details { edge e; tree val, addend; } *bqp = NULL;
5360 91 : basic_block edge_bb = NULL;
5361 91 : if (m_upwards)
5362 : {
5363 58 : tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
5364 58 : if (kind == bitint_prec_large)
5365 : cnt = nelts;
5366 : else
5367 : {
5368 32 : rem = (prec % (2 * limb_prec));
5369 32 : end = (prec - rem) / limb_prec;
5370 32 : cnt = 2 + CEIL (rem, limb_prec);
5371 32 : idx = idx_first = create_loop (bitint_big_endian
5372 32 : ? size_int (nelts - 1)
5373 : : size_zero_node, &idx_next);
5374 : }
5375 :
5376 58 : if (ifn == IFN_CTZ || ifn == IFN_FFS)
5377 : {
5378 37 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5379 37 : gsi_prev (&gsi);
5380 37 : edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
5381 37 : edge_bb = e->src;
5382 37 : if (kind == bitint_prec_large)
5383 32 : m_gsi = gsi_end_bb (edge_bb);
5384 37 : bqp = XALLOCAVEC (struct bq_details, cnt);
5385 : }
5386 : else
5387 21 : m_after_stmt = stmt;
5388 58 : if (kind != bitint_prec_large)
5389 32 : m_upwards_2limb = end;
5390 :
5391 214 : for (unsigned i = 0; i < cnt; i++)
5392 : {
5393 156 : m_data_cnt = 0;
5394 156 : if (kind == bitint_prec_large)
5395 80 : idx = size_int (bitint_big_endian ? nelts - 1 - i : i);
5396 76 : else if (i >= 2)
5397 12 : idx = size_int (bitint_big_endian
5398 : ? nelts - 1 - end - (i > 2) : end + (i > 2));
5399 :
5400 156 : tree rhs1 = handle_operand (arg0, idx);
5401 156 : if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
5402 : {
5403 26 : if (!TYPE_UNSIGNED (TREE_TYPE (rhs1)))
5404 4 : rhs1 = add_cast (unsigned_type_for (TREE_TYPE (rhs1)), rhs1);
5405 26 : rhs1 = add_cast (m_limb_type, rhs1);
5406 : }
5407 :
5408 156 : tree in, out, tem;
5409 156 : if (ifn == IFN_PARITY)
5410 30 : in = prepare_data_in_out (build_zero_cst (m_limb_type), idx, &out);
5411 126 : else if (ifn == IFN_FFS)
5412 26 : in = prepare_data_in_out (integer_one_node, idx, &out);
5413 : else
5414 100 : in = prepare_data_in_out (integer_zero_node, idx, &out);
5415 :
5416 156 : switch (ifn)
5417 : {
5418 98 : case IFN_CTZ:
5419 98 : case IFN_FFS:
5420 98 : g = gimple_build_cond (NE_EXPR, rhs1,
5421 : build_zero_cst (m_limb_type),
5422 : NULL_TREE, NULL_TREE);
5423 98 : insert_before (g);
5424 98 : edge e1, e2;
5425 98 : e1 = split_block (gsi_bb (m_gsi), g);
5426 98 : e1->flags = EDGE_FALSE_VALUE;
5427 98 : e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
5428 98 : e1->probability = profile_probability::unlikely ();
5429 98 : e2->probability = e1->probability.invert ();
5430 98 : if (i == 0)
5431 37 : set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
5432 98 : m_gsi = gsi_after_labels (e1->dest);
5433 98 : bqp[i].e = e2;
5434 98 : bqp[i].val = rhs1;
5435 98 : if (tree_fits_uhwi_p (idx))
5436 56 : bqp[i].addend
5437 56 : = build_int_cst (integer_type_node,
5438 : (bitint_big_endian
5439 56 : ? nelts - 1 - tree_to_uhwi (idx)
5440 56 : : tree_to_uhwi (idx)) * limb_prec
5441 56 : + (ifn == IFN_FFS));
5442 : else
5443 : {
5444 42 : bqp[i].addend = in;
5445 42 : if (i == 1)
5446 21 : res = out;
5447 : else
5448 21 : res = make_ssa_name (integer_type_node);
5449 42 : g = gimple_build_assign (res, PLUS_EXPR, in,
5450 : build_int_cst (integer_type_node,
5451 42 : limb_prec));
5452 42 : insert_before (g);
5453 42 : m_data[m_data_cnt] = res;
5454 : }
5455 : break;
5456 30 : case IFN_PARITY:
5457 30 : if (!integer_zerop (in))
5458 : {
5459 25 : if (kind == bitint_prec_huge && i == 1)
5460 6 : res = out;
5461 : else
5462 19 : res = make_ssa_name (m_limb_type);
5463 25 : g = gimple_build_assign (res, BIT_XOR_EXPR, in, rhs1);
5464 25 : insert_before (g);
5465 : }
5466 : else
5467 : res = rhs1;
5468 30 : m_data[m_data_cnt] = res;
5469 30 : break;
5470 28 : case IFN_POPCOUNT:
5471 28 : g = gimple_build_call (fndecl, 1, rhs1);
5472 28 : tem = make_ssa_name (integer_type_node);
5473 28 : gimple_call_set_lhs (g, tem);
5474 28 : insert_before (g);
5475 28 : if (!integer_zerop (in))
5476 : {
5477 23 : if (kind == bitint_prec_huge && i == 1)
5478 5 : res = out;
5479 : else
5480 18 : res = make_ssa_name (integer_type_node);
5481 23 : g = gimple_build_assign (res, PLUS_EXPR, in, tem);
5482 23 : insert_before (g);
5483 : }
5484 : else
5485 : res = tem;
5486 28 : m_data[m_data_cnt] = res;
5487 28 : break;
5488 0 : default:
5489 0 : gcc_unreachable ();
5490 : }
5491 :
5492 156 : m_first = false;
5493 156 : if (kind == bitint_prec_huge && i <= 1)
5494 : {
5495 64 : if (i == 0)
5496 : {
5497 32 : idx = make_ssa_name (sizetype);
5498 32 : g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
5499 : bitint_big_endian
5500 0 : ? size_int (-1) : size_one_node);
5501 32 : insert_before (g);
5502 : }
5503 : else
5504 : {
5505 32 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
5506 64 : size_int (bitint_big_endian
5507 : ? -2 : 2));
5508 32 : insert_before (g);
5509 32 : if (bitint_big_endian)
5510 0 : g = gimple_build_cond (NE_EXPR, idx_first,
5511 0 : size_int (cnt - 1),
5512 : NULL_TREE, NULL_TREE);
5513 : else
5514 32 : g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
5515 : NULL_TREE, NULL_TREE);
5516 32 : insert_before (g);
5517 32 : if (ifn == IFN_CTZ || ifn == IFN_FFS)
5518 21 : m_gsi = gsi_after_labels (edge_bb);
5519 : else
5520 11 : m_gsi = gsi_for_stmt (stmt);
5521 32 : m_bb = NULL;
5522 : }
5523 : }
5524 : }
5525 : }
5526 : else
5527 : {
5528 33 : tree idx = NULL_TREE, idx_next = NULL_TREE, first = NULL_TREE;
5529 33 : int sub_one = 0;
5530 33 : if (kind == bitint_prec_large)
5531 : cnt = nelts;
5532 : else
5533 : {
5534 16 : rem = prec % limb_prec;
5535 16 : if (rem == 0 && (!TYPE_UNSIGNED (type) || ifn == IFN_CLRSB))
5536 : rem = limb_prec;
5537 16 : end = (prec - rem) / limb_prec;
5538 16 : cnt = 1 + (rem != 0);
5539 16 : if (ifn == IFN_CLRSB)
5540 4 : sub_one = 1;
5541 : }
5542 :
5543 33 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5544 33 : gsi_prev (&gsi);
5545 33 : edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
5546 33 : edge_bb = e->src;
5547 33 : m_gsi = gsi_end_bb (edge_bb);
5548 :
5549 33 : if (ifn == IFN_CLZ)
5550 25 : bqp = XALLOCAVEC (struct bq_details, cnt);
5551 : else
5552 : {
5553 8 : gsi = gsi_for_stmt (stmt);
5554 8 : gsi_prev (&gsi);
5555 8 : e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
5556 8 : edge_bb = e->src;
5557 8 : bqp = XALLOCAVEC (struct bq_details, 2 * cnt);
5558 : }
5559 :
5560 110 : for (unsigned i = 0; i < cnt; i++)
5561 : {
5562 77 : m_data_cnt = 0;
5563 77 : if (kind == bitint_prec_large)
5564 51 : idx = size_int (bitint_big_endian ? i : cnt - i - 1);
5565 26 : else if (i == cnt - 1)
5566 16 : idx = create_loop (size_int (bitint_big_endian ? i : end - 1),
5567 : &idx_next);
5568 : else
5569 10 : idx = bitint_big_endian ? size_zero_node : size_int (end);
5570 :
5571 77 : tree rhs1 = handle_operand (arg0, idx);
5572 77 : if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
5573 : {
5574 17 : if (ifn == IFN_CLZ && !TYPE_UNSIGNED (TREE_TYPE (rhs1)))
5575 0 : rhs1 = add_cast (unsigned_type_for (TREE_TYPE (rhs1)), rhs1);
5576 17 : else if (ifn == IFN_CLRSB && TYPE_UNSIGNED (TREE_TYPE (rhs1)))
5577 0 : rhs1 = add_cast (signed_type_for (TREE_TYPE (rhs1)), rhs1);
5578 17 : rhs1 = add_cast (m_limb_type, rhs1);
5579 : }
5580 :
5581 77 : if (ifn == IFN_CLZ)
5582 : {
5583 57 : g = gimple_build_cond (NE_EXPR, rhs1,
5584 : build_zero_cst (m_limb_type),
5585 : NULL_TREE, NULL_TREE);
5586 57 : insert_before (g);
5587 57 : edge e1 = split_block (gsi_bb (m_gsi), g);
5588 57 : e1->flags = EDGE_FALSE_VALUE;
5589 57 : edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
5590 57 : e1->probability = profile_probability::unlikely ();
5591 57 : e2->probability = e1->probability.invert ();
5592 57 : if (i == 0)
5593 25 : set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
5594 57 : m_gsi = gsi_after_labels (e1->dest);
5595 57 : bqp[i].e = e2;
5596 57 : bqp[i].val = rhs1;
5597 : }
5598 : else
5599 : {
5600 20 : if (i == 0)
5601 : {
5602 8 : first = rhs1;
5603 8 : g = gimple_build_assign (make_ssa_name (m_limb_type),
5604 : PLUS_EXPR, rhs1,
5605 : build_int_cst (m_limb_type, 1));
5606 8 : insert_before (g);
5607 8 : g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
5608 : build_int_cst (m_limb_type, 1),
5609 : NULL_TREE, NULL_TREE);
5610 8 : insert_before (g);
5611 : }
5612 : else
5613 : {
5614 12 : g = gimple_build_assign (make_ssa_name (m_limb_type),
5615 : BIT_XOR_EXPR, rhs1, first);
5616 12 : insert_before (g);
5617 12 : tree stype = signed_type_for (m_limb_type);
5618 12 : g = gimple_build_cond (LT_EXPR,
5619 : add_cast (stype,
5620 : gimple_assign_lhs (g)),
5621 : build_zero_cst (stype),
5622 : NULL_TREE, NULL_TREE);
5623 12 : insert_before (g);
5624 12 : edge e1 = split_block (gsi_bb (m_gsi), g);
5625 12 : e1->flags = EDGE_FALSE_VALUE;
5626 12 : edge e2 = make_edge (e1->src, gimple_bb (stmt),
5627 : EDGE_TRUE_VALUE);
5628 12 : e1->probability = profile_probability::unlikely ();
5629 12 : e2->probability = e1->probability.invert ();
5630 12 : if (i == 1)
5631 8 : set_immediate_dominator (CDI_DOMINATORS, e2->dest,
5632 : e2->src);
5633 12 : m_gsi = gsi_after_labels (e1->dest);
5634 12 : bqp[2 * i].e = e2;
5635 12 : g = gimple_build_cond (NE_EXPR, rhs1, first,
5636 : NULL_TREE, NULL_TREE);
5637 12 : insert_before (g);
5638 : }
5639 20 : edge e1 = split_block (gsi_bb (m_gsi), g);
5640 20 : e1->flags = EDGE_FALSE_VALUE;
5641 20 : edge e2 = make_edge (e1->src, edge_bb, EDGE_TRUE_VALUE);
5642 20 : e1->probability = profile_probability::unlikely ();
5643 20 : e2->probability = e1->probability.invert ();
5644 20 : if (i == 0)
5645 8 : set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
5646 20 : m_gsi = gsi_after_labels (e1->dest);
5647 20 : bqp[2 * i + 1].e = e2;
5648 20 : bqp[i].val = rhs1;
5649 : }
5650 77 : if (tree_fits_uhwi_p (idx))
5651 122 : bqp[i].addend
5652 61 : = build_int_cst (integer_type_node,
5653 61 : (int) prec
5654 61 : - (((int) (bitint_big_endian
5655 0 : ? nelts - 1 - tree_to_uhwi (idx)
5656 61 : : tree_to_uhwi (idx)) + 1)
5657 61 : * limb_prec) - sub_one);
5658 : else
5659 : {
5660 16 : tree in, out;
5661 16 : in = build_int_cst (integer_type_node, rem - sub_one);
5662 16 : m_first = true;
5663 16 : in = prepare_data_in_out (in, idx, &out);
5664 16 : out = m_data[m_data_cnt + 1];
5665 16 : bqp[i].addend = in;
5666 16 : g = gimple_build_assign (out, PLUS_EXPR, in,
5667 : build_int_cst (integer_type_node,
5668 16 : limb_prec));
5669 16 : insert_before (g);
5670 16 : m_data[m_data_cnt] = out;
5671 : }
5672 :
5673 77 : m_first = false;
5674 77 : if (kind == bitint_prec_huge && i == cnt - 1)
5675 : {
5676 32 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
5677 : bitint_big_endian
5678 16 : ? size_one_node : size_int (-1));
5679 16 : insert_before (g);
5680 16 : g = gimple_build_cond (NE_EXPR, idx,
5681 : bitint_big_endian
5682 0 : ? size_int (nelts - 1) : size_zero_node,
5683 : NULL_TREE, NULL_TREE);
5684 16 : insert_before (g);
5685 16 : edge true_edge, false_edge;
5686 16 : extract_true_false_edges_from_block (gsi_bb (m_gsi),
5687 : &true_edge, &false_edge);
5688 16 : m_gsi = gsi_after_labels (false_edge->dest);
5689 16 : m_bb = NULL;
5690 : }
5691 : }
5692 : }
5693 91 : switch (ifn)
5694 : {
5695 62 : case IFN_CLZ:
5696 62 : case IFN_CTZ:
5697 62 : case IFN_FFS:
5698 62 : gphi *phi1, *phi2, *phi3;
5699 62 : basic_block bb;
5700 62 : bb = gsi_bb (m_gsi);
5701 62 : remove_edge (find_edge (bb, gimple_bb (stmt)));
5702 62 : phi1 = create_phi_node (make_ssa_name (m_limb_type),
5703 : gimple_bb (stmt));
5704 62 : phi2 = create_phi_node (make_ssa_name (integer_type_node),
5705 : gimple_bb (stmt));
5706 217 : for (unsigned i = 0; i < cnt; i++)
5707 : {
5708 155 : add_phi_arg (phi1, bqp[i].val, bqp[i].e, UNKNOWN_LOCATION);
5709 155 : add_phi_arg (phi2, bqp[i].addend, bqp[i].e, UNKNOWN_LOCATION);
5710 : }
5711 62 : if (arg1 == NULL_TREE)
5712 : {
5713 35 : g = gimple_build_builtin_unreachable (m_loc);
5714 35 : insert_before (g);
5715 : }
5716 62 : m_gsi = gsi_for_stmt (stmt);
5717 62 : g = gimple_build_call (fndecl, 1, gimple_phi_result (phi1));
5718 62 : gimple_call_set_lhs (g, make_ssa_name (integer_type_node));
5719 62 : insert_before (g);
5720 62 : if (arg1 == NULL_TREE)
5721 35 : g = gimple_build_assign (lhs, PLUS_EXPR,
5722 : gimple_phi_result (phi2),
5723 : gimple_call_lhs (g));
5724 : else
5725 : {
5726 27 : g = gimple_build_assign (make_ssa_name (integer_type_node),
5727 : PLUS_EXPR, gimple_phi_result (phi2),
5728 : gimple_call_lhs (g));
5729 27 : insert_before (g);
5730 27 : edge e1 = split_block (gimple_bb (stmt), g);
5731 27 : edge e2 = make_edge (bb, e1->dest, EDGE_FALLTHRU);
5732 27 : e2->probability = profile_probability::always ();
5733 27 : set_immediate_dominator (CDI_DOMINATORS, e1->dest,
5734 : get_immediate_dominator (CDI_DOMINATORS,
5735 : e1->src));
5736 27 : phi3 = create_phi_node (make_ssa_name (integer_type_node), e1->dest);
5737 27 : add_phi_arg (phi3, gimple_assign_lhs (g), e1, UNKNOWN_LOCATION);
5738 27 : add_phi_arg (phi3, arg1, e2, UNKNOWN_LOCATION);
5739 27 : m_gsi = gsi_for_stmt (stmt);
5740 27 : g = gimple_build_assign (lhs, gimple_phi_result (phi3));
5741 : }
5742 62 : gsi_replace (&m_gsi, g, true);
5743 62 : break;
5744 8 : case IFN_CLRSB:
5745 8 : bb = gsi_bb (m_gsi);
5746 8 : remove_edge (find_edge (bb, edge_bb));
5747 8 : edge e;
5748 8 : e = make_edge (bb, gimple_bb (stmt), EDGE_FALLTHRU);
5749 8 : e->probability = profile_probability::always ();
5750 8 : set_immediate_dominator (CDI_DOMINATORS, gimple_bb (stmt),
5751 : get_immediate_dominator (CDI_DOMINATORS,
5752 : edge_bb));
5753 8 : phi1 = create_phi_node (make_ssa_name (m_limb_type),
5754 : edge_bb);
5755 8 : phi2 = create_phi_node (make_ssa_name (integer_type_node),
5756 : edge_bb);
5757 8 : phi3 = create_phi_node (make_ssa_name (integer_type_node),
5758 : gimple_bb (stmt));
5759 28 : for (unsigned i = 0; i < cnt; i++)
5760 : {
5761 20 : add_phi_arg (phi1, bqp[i].val, bqp[2 * i + 1].e, UNKNOWN_LOCATION);
5762 20 : add_phi_arg (phi2, bqp[i].addend, bqp[2 * i + 1].e,
5763 : UNKNOWN_LOCATION);
5764 20 : tree a = bqp[i].addend;
5765 20 : if (i && kind == bitint_prec_large)
5766 8 : a = int_const_binop (PLUS_EXPR, a, integer_minus_one_node);
5767 20 : if (i)
5768 12 : add_phi_arg (phi3, a, bqp[2 * i].e, UNKNOWN_LOCATION);
5769 : }
5770 8 : add_phi_arg (phi3, build_int_cst (integer_type_node, prec - 1), e,
5771 : UNKNOWN_LOCATION);
5772 8 : m_gsi = gsi_after_labels (edge_bb);
5773 8 : g = gimple_build_call (fndecl, 1,
5774 : add_cast (signed_type_for (m_limb_type),
5775 : gimple_phi_result (phi1)));
5776 8 : gimple_call_set_lhs (g, make_ssa_name (integer_type_node));
5777 8 : insert_before (g);
5778 8 : g = gimple_build_assign (make_ssa_name (integer_type_node),
5779 : PLUS_EXPR, gimple_call_lhs (g),
5780 : gimple_phi_result (phi2));
5781 8 : insert_before (g);
5782 8 : if (kind != bitint_prec_large)
5783 : {
5784 4 : g = gimple_build_assign (make_ssa_name (integer_type_node),
5785 : PLUS_EXPR, gimple_assign_lhs (g),
5786 : integer_one_node);
5787 4 : insert_before (g);
5788 : }
5789 8 : add_phi_arg (phi3, gimple_assign_lhs (g),
5790 : find_edge (edge_bb, gimple_bb (stmt)), UNKNOWN_LOCATION);
5791 8 : m_gsi = gsi_for_stmt (stmt);
5792 8 : g = gimple_build_assign (lhs, gimple_phi_result (phi3));
5793 8 : gsi_replace (&m_gsi, g, true);
5794 8 : break;
5795 11 : case IFN_PARITY:
5796 11 : g = gimple_build_call (fndecl, 1, res);
5797 11 : gimple_call_set_lhs (g, lhs);
5798 11 : gsi_replace (&m_gsi, g, true);
5799 11 : break;
5800 10 : case IFN_POPCOUNT:
5801 10 : g = gimple_build_assign (lhs, res);
5802 10 : gsi_replace (&m_gsi, g, true);
5803 10 : break;
5804 0 : default:
5805 0 : gcc_unreachable ();
5806 : }
5807 : }
5808 :
5809 : /* Lower a call statement with one or more large/huge _BitInt
5810 : arguments or large/huge _BitInt return value. */
5811 :
5812 : void
5813 8604 : bitint_large_huge::lower_call (tree obj, gimple *stmt)
5814 : {
5815 8604 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5816 8604 : unsigned int nargs = gimple_call_num_args (stmt);
5817 8604 : if (gimple_call_internal_p (stmt))
5818 4131 : switch (gimple_call_internal_fn (stmt))
5819 : {
5820 2653 : case IFN_ADD_OVERFLOW:
5821 2653 : case IFN_SUB_OVERFLOW:
5822 2653 : case IFN_UBSAN_CHECK_ADD:
5823 2653 : case IFN_UBSAN_CHECK_SUB:
5824 2653 : lower_addsub_overflow (obj, stmt);
5825 6784 : return;
5826 1387 : case IFN_MUL_OVERFLOW:
5827 1387 : case IFN_UBSAN_CHECK_MUL:
5828 1387 : lower_mul_overflow (obj, stmt);
5829 1387 : return;
5830 91 : case IFN_CLZ:
5831 91 : case IFN_CTZ:
5832 91 : case IFN_CLRSB:
5833 91 : case IFN_FFS:
5834 91 : case IFN_PARITY:
5835 91 : case IFN_POPCOUNT:
5836 91 : lower_bit_query (stmt);
5837 91 : return;
5838 : default:
5839 : break;
5840 : }
5841 4473 : bool returns_twice = (gimple_call_flags (stmt) & ECF_RETURNS_TWICE) != 0;
5842 10060 : for (unsigned int i = 0; i < nargs; ++i)
5843 : {
5844 5587 : tree arg = gimple_call_arg (stmt, i);
5845 8839 : if (TREE_CODE (arg) != SSA_NAME
5846 2432 : || TREE_CODE (TREE_TYPE (arg)) != BITINT_TYPE
5847 7957 : || bitint_precision_kind (TREE_TYPE (arg)) <= bitint_prec_middle)
5848 3252 : continue;
5849 2335 : if (SSA_NAME_IS_DEFAULT_DEF (arg)
5850 2335 : && (!SSA_NAME_VAR (arg) || VAR_P (SSA_NAME_VAR (arg))))
5851 : {
5852 1 : tree var = create_tmp_reg (TREE_TYPE (arg));
5853 1 : arg = get_or_create_ssa_default_def (cfun, var);
5854 : }
5855 : else
5856 : {
5857 2334 : int p = var_to_partition (m_map, arg);
5858 2334 : tree v = m_vars[p];
5859 2334 : gcc_assert (v != NULL_TREE);
5860 2334 : if (!types_compatible_p (TREE_TYPE (arg), TREE_TYPE (v)))
5861 2314 : v = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (arg), v);
5862 2334 : arg = make_ssa_name (TREE_TYPE (arg));
5863 2334 : gimple *g = gimple_build_assign (arg, v);
5864 2334 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
5865 2334 : if (returns_twice && bb_has_abnormal_pred (gimple_bb (stmt)))
5866 : {
5867 11 : m_returns_twice_calls.safe_push (stmt);
5868 11 : returns_twice = false;
5869 : }
5870 : }
5871 2335 : gimple_call_set_arg (stmt, i, arg);
5872 2335 : if (m_preserved == NULL)
5873 404 : m_preserved = BITMAP_ALLOC (NULL);
5874 2335 : bitmap_set_bit (m_preserved, SSA_NAME_VERSION (arg));
5875 : }
5876 4473 : tree lhs = gimple_call_lhs (stmt);
5877 4473 : if (lhs
5878 4342 : && TREE_CODE (lhs) == SSA_NAME
5879 4342 : && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5880 8745 : && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
5881 : {
5882 4246 : int p = var_to_partition (m_map, lhs);
5883 4246 : tree v = m_vars[p];
5884 4246 : gcc_assert (v != NULL_TREE);
5885 4246 : if (!types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (v)))
5886 4246 : v = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), v);
5887 4246 : gimple_call_set_lhs (stmt, v);
5888 4246 : SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5889 : }
5890 4473 : update_stmt (stmt);
5891 : }
5892 :
5893 : /* Lower __asm STMT which involves large/huge _BitInt values. */
5894 :
5895 : void
5896 3 : bitint_large_huge::lower_asm (gimple *stmt)
5897 : {
5898 3 : gasm *g = as_a <gasm *> (stmt);
5899 3 : unsigned noutputs = gimple_asm_noutputs (g);
5900 3 : unsigned ninputs = gimple_asm_ninputs (g);
5901 :
5902 5 : for (unsigned i = 0; i < noutputs; ++i)
5903 : {
5904 2 : tree t = gimple_asm_output_op (g, i);
5905 2 : tree s = TREE_VALUE (t);
5906 2 : if (TREE_CODE (s) == SSA_NAME
5907 1 : && TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5908 3 : && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5909 : {
5910 1 : int part = var_to_partition (m_map, s);
5911 1 : gcc_assert (m_vars[part] != NULL_TREE);
5912 1 : TREE_VALUE (t) = m_vars[part];
5913 : }
5914 : }
5915 8 : for (unsigned i = 0; i < ninputs; ++i)
5916 : {
5917 5 : tree t = gimple_asm_input_op (g, i);
5918 5 : tree s = TREE_VALUE (t);
5919 5 : if (TREE_CODE (s) == SSA_NAME
5920 4 : && TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
5921 9 : && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
5922 : {
5923 4 : if (SSA_NAME_IS_DEFAULT_DEF (s)
5924 4 : && (!SSA_NAME_VAR (s) || VAR_P (SSA_NAME_VAR (s))))
5925 : {
5926 1 : TREE_VALUE (t) = create_tmp_var (TREE_TYPE (s), "bitint");
5927 1 : mark_addressable (TREE_VALUE (t));
5928 : }
5929 : else
5930 : {
5931 3 : int part = var_to_partition (m_map, s);
5932 3 : gcc_assert (m_vars[part] != NULL_TREE);
5933 3 : TREE_VALUE (t) = m_vars[part];
5934 : }
5935 : }
5936 : }
5937 3 : update_stmt (stmt);
5938 3 : }
5939 :
5940 : /* Lower statement STMT which involves large/huge _BitInt values
5941 : into code accessing individual limbs. */
5942 :
5943 : void
5944 41725 : bitint_large_huge::lower_stmt (gimple *stmt)
5945 : {
5946 41725 : m_first = true;
5947 41725 : m_lhs = NULL_TREE;
5948 41725 : m_data.truncate (0);
5949 41725 : m_data_cnt = 0;
5950 41725 : m_gsi = gsi_for_stmt (stmt);
5951 41725 : m_after_stmt = NULL;
5952 41725 : m_bb = NULL;
5953 41725 : m_init_gsi = m_gsi;
5954 41725 : gsi_prev (&m_init_gsi);
5955 41725 : m_preheader_bb = NULL;
5956 41725 : m_upwards_2limb = 0;
5957 41725 : m_upwards = false;
5958 41725 : m_var_msb = false;
5959 41725 : m_cast_conditional = false;
5960 41725 : m_bitfld_load = 0;
5961 41725 : m_loc = gimple_location (stmt);
5962 41725 : if (is_gimple_call (stmt))
5963 : {
5964 6948 : lower_call (NULL_TREE, stmt);
5965 6948 : return;
5966 : }
5967 34777 : if (gimple_code (stmt) == GIMPLE_ASM)
5968 : {
5969 3 : lower_asm (stmt);
5970 3 : return;
5971 : }
5972 34774 : tree lhs = NULL_TREE, cmp_op1 = NULL_TREE, cmp_op2 = NULL_TREE;
5973 34774 : tree_code cmp_code = comparison_op (stmt, &cmp_op1, &cmp_op2);
5974 34774 : bool eq_p = (cmp_code == EQ_EXPR || cmp_code == NE_EXPR);
5975 34774 : bool mergeable_cast_p = false;
5976 34774 : bool final_cast_p = false;
5977 34774 : if (gimple_assign_cast_p (stmt))
5978 : {
5979 5289 : lhs = gimple_assign_lhs (stmt);
5980 5289 : tree rhs1 = gimple_assign_rhs1 (stmt);
5981 5289 : if (TREE_CODE (rhs1) == VIEW_CONVERT_EXPR)
5982 44 : rhs1 = TREE_OPERAND (rhs1, 0);
5983 5289 : if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
5984 1183 : && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
5985 6443 : && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
5986 : mergeable_cast_p = true;
5987 4272 : else if (TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE
5988 4135 : && bitint_precision_kind (TREE_TYPE (rhs1)) >= bitint_prec_large
5989 8407 : && (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5990 36 : || POINTER_TYPE_P (TREE_TYPE (lhs))
5991 35 : || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR))
5992 : {
5993 4135 : final_cast_p = true;
5994 4135 : if (((TREE_CODE (TREE_TYPE (lhs)) == INTEGER_TYPE
5995 536 : && TYPE_PRECISION (TREE_TYPE (lhs)) > MAX_FIXED_MODE_SIZE)
5996 4135 : || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5997 36 : && !POINTER_TYPE_P (TREE_TYPE (lhs))))
5998 4170 : && gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
5999 : {
6000 : /* Handle VIEW_CONVERT_EXPRs to not generally supported
6001 : huge INTEGER_TYPEs like uint256_t or uint512_t. These
6002 : are usually emitted from memcpy folding and backends
6003 : support moves with them but that is usually it.
6004 : Similarly handle VCEs to vector/complex types etc. */
6005 35 : gcc_assert (TREE_CODE (rhs1) == SSA_NAME);
6006 35 : if (SSA_NAME_IS_DEFAULT_DEF (rhs1)
6007 35 : && (!SSA_NAME_VAR (rhs1) || VAR_P (SSA_NAME_VAR (rhs1))))
6008 : {
6009 0 : tree var = create_tmp_reg (TREE_TYPE (lhs));
6010 0 : rhs1 = get_or_create_ssa_default_def (cfun, var);
6011 0 : gimple_assign_set_rhs1 (stmt, rhs1);
6012 0 : gimple_assign_set_rhs_code (stmt, SSA_NAME);
6013 : }
6014 35 : else if (m_names == NULL
6015 35 : || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1)))
6016 : {
6017 0 : gimple *g = SSA_NAME_DEF_STMT (rhs1);
6018 0 : gcc_assert (gimple_assign_load_p (g));
6019 0 : tree mem = gimple_assign_rhs1 (g);
6020 0 : tree ltype = TREE_TYPE (lhs);
6021 0 : addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (mem));
6022 0 : if (as != TYPE_ADDR_SPACE (ltype))
6023 0 : ltype
6024 0 : = build_qualified_type (ltype,
6025 0 : TYPE_QUALS (ltype)
6026 0 : | ENCODE_QUAL_ADDR_SPACE (as));
6027 0 : rhs1 = build1 (VIEW_CONVERT_EXPR, ltype, unshare_expr (mem));
6028 0 : gimple_assign_set_rhs1 (stmt, rhs1);
6029 : }
6030 : else
6031 : {
6032 35 : int part = var_to_partition (m_map, rhs1);
6033 35 : gcc_assert (m_vars[part] != NULL_TREE);
6034 35 : rhs1 = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
6035 : m_vars[part]);
6036 35 : gimple_assign_set_rhs1 (stmt, rhs1);
6037 : }
6038 35 : update_stmt (stmt);
6039 35 : return;
6040 : }
6041 4100 : if (TREE_CODE (rhs1) == SSA_NAME
6042 4100 : && (m_names == NULL
6043 4063 : || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
6044 : {
6045 1695 : gimple *g = SSA_NAME_DEF_STMT (rhs1);
6046 1695 : if (is_gimple_assign (g)
6047 1695 : && gimple_assign_rhs_code (g) == IMAGPART_EXPR)
6048 : {
6049 1632 : tree rhs2 = TREE_OPERAND (gimple_assign_rhs1 (g), 0);
6050 1632 : if (TREE_CODE (rhs2) == SSA_NAME
6051 1632 : && (m_names == NULL
6052 1595 : || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs2))))
6053 : {
6054 1632 : g = SSA_NAME_DEF_STMT (rhs2);
6055 1632 : int ovf = optimizable_arith_overflow (g);
6056 1632 : if (ovf == 2)
6057 : /* If .{ADD,SUB,MUL}_OVERFLOW has both REALPART_EXPR
6058 : and IMAGPART_EXPR uses, where the latter is cast to
6059 : non-_BitInt, it will be optimized when handling
6060 : the REALPART_EXPR. */
6061 : return;
6062 91 : if (ovf == 1)
6063 : {
6064 91 : lower_call (NULL_TREE, g);
6065 91 : return;
6066 : }
6067 : }
6068 : }
6069 : }
6070 : }
6071 137 : else if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
6072 137 : && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
6073 137 : && !INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
6074 137 : && !POINTER_TYPE_P (TREE_TYPE (rhs1))
6075 274 : && gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
6076 : {
6077 8 : int part = var_to_partition (m_map, lhs);
6078 8 : gcc_assert (m_vars[part] != NULL_TREE);
6079 8 : lhs = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs1), m_vars[part]);
6080 8 : insert_before (gimple_build_assign (lhs, rhs1));
6081 8 : return;
6082 : }
6083 : }
6084 33099 : if (gimple_store_p (stmt))
6085 : {
6086 8640 : tree rhs1 = gimple_assign_rhs1 (stmt);
6087 8640 : if (TREE_CODE (rhs1) == SSA_NAME
6088 8640 : && (m_names == NULL
6089 7611 : || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
6090 : {
6091 1448 : gimple *g = SSA_NAME_DEF_STMT (rhs1);
6092 1448 : m_loc = gimple_location (g);
6093 1448 : lhs = gimple_assign_lhs (stmt);
6094 1448 : if (is_gimple_assign (g) && !mergeable_op (g))
6095 504 : switch (gimple_assign_rhs_code (g))
6096 : {
6097 104 : case LSHIFT_EXPR:
6098 104 : case RSHIFT_EXPR:
6099 104 : lower_shift_stmt (lhs, g);
6100 351 : handled:
6101 351 : m_gsi = gsi_for_stmt (stmt);
6102 351 : unlink_stmt_vdef (stmt);
6103 702 : release_ssa_name (gimple_vdef (stmt));
6104 351 : gsi_remove (&m_gsi, true);
6105 351 : return;
6106 182 : case MULT_EXPR:
6107 182 : case TRUNC_DIV_EXPR:
6108 182 : case EXACT_DIV_EXPR:
6109 182 : case TRUNC_MOD_EXPR:
6110 182 : lower_muldiv_stmt (lhs, g);
6111 182 : goto handled;
6112 38 : case FIX_TRUNC_EXPR:
6113 38 : lower_float_conv_stmt (lhs, g);
6114 38 : goto handled;
6115 3 : case REALPART_EXPR:
6116 3 : case IMAGPART_EXPR:
6117 3 : lower_cplxpart_stmt (lhs, g);
6118 3 : goto handled;
6119 7 : case VIEW_CONVERT_EXPR:
6120 7 : {
6121 7 : tree rhs1 = gimple_assign_rhs1 (g);
6122 7 : rhs1 = TREE_OPERAND (rhs1, 0);
6123 7 : if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
6124 6 : && !POINTER_TYPE_P (TREE_TYPE (rhs1)))
6125 : {
6126 6 : tree ltype = TREE_TYPE (rhs1);
6127 6 : addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (lhs));
6128 6 : ltype
6129 12 : = build_qualified_type (ltype,
6130 6 : TYPE_QUALS (TREE_TYPE (lhs))
6131 6 : | ENCODE_QUAL_ADDR_SPACE (as));
6132 6 : lhs = build1 (VIEW_CONVERT_EXPR, ltype, lhs);
6133 6 : gimple_assign_set_lhs (stmt, lhs);
6134 6 : gimple_assign_set_rhs1 (stmt, rhs1);
6135 6 : gimple_assign_set_rhs_code (stmt, TREE_CODE (rhs1));
6136 6 : update_stmt (stmt);
6137 6 : return;
6138 : }
6139 : }
6140 : break;
6141 : default:
6142 : break;
6143 : }
6144 944 : else if (optimizable_arith_overflow (g) == 3)
6145 : {
6146 24 : lower_call (lhs, g);
6147 24 : goto handled;
6148 : }
6149 1091 : m_loc = gimple_location (stmt);
6150 : }
6151 : }
6152 32742 : if (mergeable_op (stmt)
6153 21628 : || gimple_store_p (stmt)
6154 21628 : || gimple_assign_load_p (stmt)
6155 : || eq_p
6156 16829 : || mergeable_cast_p
6157 42514 : || (is_gimple_assign (stmt)
6158 9384 : && gimple_assign_rhs_code (stmt) == PAREN_EXPR))
6159 : {
6160 22972 : lhs = lower_mergeable_stmt (stmt, cmp_code, cmp_op1, cmp_op2);
6161 22972 : if (!eq_p)
6162 : return;
6163 : }
6164 9770 : else if (cmp_code != ERROR_MARK)
6165 722 : lhs = lower_comparison_stmt (stmt, cmp_code, cmp_op1, cmp_op2);
6166 16267 : if (cmp_code != ERROR_MARK)
6167 : {
6168 7219 : if (gimple_code (stmt) == GIMPLE_COND)
6169 : {
6170 6453 : gcond *cstmt = as_a <gcond *> (stmt);
6171 6453 : gimple_cond_set_lhs (cstmt, lhs);
6172 6453 : gimple_cond_set_rhs (cstmt, boolean_false_node);
6173 6453 : gimple_cond_set_code (cstmt, cmp_code);
6174 6453 : update_stmt (stmt);
6175 6453 : return;
6176 : }
6177 766 : if (gimple_assign_rhs_code (stmt) == COND_EXPR)
6178 : {
6179 0 : tree cond = build2 (cmp_code, boolean_type_node, lhs,
6180 : boolean_false_node);
6181 0 : gimple_assign_set_rhs1 (stmt, cond);
6182 0 : lhs = gimple_assign_lhs (stmt);
6183 0 : gcc_assert (TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
6184 : || (bitint_precision_kind (TREE_TYPE (lhs))
6185 : <= bitint_prec_middle));
6186 0 : update_stmt (stmt);
6187 0 : return;
6188 : }
6189 766 : gimple_assign_set_rhs1 (stmt, lhs);
6190 766 : gimple_assign_set_rhs2 (stmt, boolean_false_node);
6191 766 : gimple_assign_set_rhs_code (stmt, cmp_code);
6192 766 : update_stmt (stmt);
6193 766 : return;
6194 : }
6195 9048 : if (final_cast_p)
6196 : {
6197 2468 : tree lhs_type = TREE_TYPE (lhs);
6198 : /* Add support for 3 or more limbs filled in from normal integral
6199 : type if this assert fails. If no target chooses limb mode smaller
6200 : than half of largest supported normal integral type, this will not
6201 : be needed. */
6202 2468 : gcc_assert (TYPE_PRECISION (lhs_type) <= 2 * limb_prec);
6203 2468 : gimple *g;
6204 2468 : if ((TREE_CODE (lhs_type) == BITINT_TYPE
6205 29 : && bitint_precision_kind (lhs_type) == bitint_prec_middle)
6206 2489 : || POINTER_TYPE_P (lhs_type))
6207 9 : lhs_type = build_nonstandard_integer_type (TYPE_PRECISION (lhs_type),
6208 9 : TYPE_UNSIGNED (lhs_type));
6209 2468 : m_data_cnt = 0;
6210 2468 : tree rhs1 = gimple_assign_rhs1 (stmt);
6211 2468 : unsigned int prec = TYPE_PRECISION (TREE_TYPE (rhs1));
6212 2468 : unsigned int cnt = CEIL (prec, limb_prec);
6213 2468 : tree r1 = handle_operand (rhs1, size_int (bitint_big_endian
6214 : ? cnt - 1 : 0));
6215 2468 : if (!useless_type_conversion_p (lhs_type, TREE_TYPE (r1)))
6216 2361 : r1 = add_cast (lhs_type, r1);
6217 2468 : if (TYPE_PRECISION (lhs_type) > limb_prec)
6218 : {
6219 63 : m_data_cnt = 0;
6220 63 : m_first = false;
6221 63 : tree r2 = handle_operand (rhs1, size_int (bitint_big_endian
6222 : ? cnt - 2 : 1));
6223 63 : r2 = add_cast (lhs_type, r2);
6224 63 : g = gimple_build_assign (make_ssa_name (lhs_type), LSHIFT_EXPR, r2,
6225 : build_int_cst (unsigned_type_node,
6226 63 : limb_prec));
6227 63 : insert_before (g);
6228 63 : g = gimple_build_assign (make_ssa_name (lhs_type), BIT_IOR_EXPR, r1,
6229 : gimple_assign_lhs (g));
6230 63 : insert_before (g);
6231 63 : r1 = gimple_assign_lhs (g);
6232 : }
6233 2468 : if (lhs_type != TREE_TYPE (lhs))
6234 9 : g = gimple_build_assign (lhs, NOP_EXPR, r1);
6235 : else
6236 2459 : g = gimple_build_assign (lhs, r1);
6237 2468 : gsi_replace (&m_gsi, g, true);
6238 2468 : return;
6239 : }
6240 6580 : if (is_gimple_assign (stmt))
6241 6580 : switch (gimple_assign_rhs_code (stmt))
6242 : {
6243 443 : case LSHIFT_EXPR:
6244 443 : case RSHIFT_EXPR:
6245 443 : lower_shift_stmt (NULL_TREE, stmt);
6246 443 : return;
6247 135 : case MULT_EXPR:
6248 135 : case TRUNC_DIV_EXPR:
6249 135 : case EXACT_DIV_EXPR:
6250 135 : case TRUNC_MOD_EXPR:
6251 135 : lower_muldiv_stmt (NULL_TREE, stmt);
6252 135 : return;
6253 267 : case FIX_TRUNC_EXPR:
6254 267 : case FLOAT_EXPR:
6255 267 : lower_float_conv_stmt (NULL_TREE, stmt);
6256 267 : return;
6257 5717 : case REALPART_EXPR:
6258 5717 : case IMAGPART_EXPR:
6259 5717 : lower_cplxpart_stmt (NULL_TREE, stmt);
6260 5717 : return;
6261 18 : case COMPLEX_EXPR:
6262 18 : lower_complexexpr_stmt (stmt);
6263 18 : return;
6264 : default:
6265 : break;
6266 : }
6267 0 : gcc_unreachable ();
6268 : }
6269 :
6270 : /* Helper for walk_non_aliased_vuses. Determine if we arrived at
6271 : the desired memory state. */
6272 :
6273 : void *
6274 2179 : vuse_eq (ao_ref *, tree vuse1, void *data)
6275 : {
6276 2179 : tree vuse2 = (tree) data;
6277 2179 : if (vuse1 == vuse2)
6278 818 : return data;
6279 :
6280 : return NULL;
6281 : }
6282 :
6283 : /* Return true if STMT uses a library function and needs to take
6284 : address of its inputs. We need to avoid bit-fields in those
6285 : cases. Similarly, we need to avoid overlap between destination
6286 : and source limb arrays. */
6287 :
6288 : bool
6289 14980 : stmt_needs_operand_addr (gimple *stmt)
6290 : {
6291 14980 : if (is_gimple_assign (stmt))
6292 10227 : switch (gimple_assign_rhs_code (stmt))
6293 : {
6294 584 : case MULT_EXPR:
6295 584 : case TRUNC_DIV_EXPR:
6296 584 : case EXACT_DIV_EXPR:
6297 584 : case TRUNC_MOD_EXPR:
6298 584 : case FLOAT_EXPR:
6299 584 : return true;
6300 : default:
6301 : break;
6302 : }
6303 4753 : else if (gimple_call_internal_p (stmt, IFN_MUL_OVERFLOW)
6304 4753 : || gimple_call_internal_p (stmt, IFN_UBSAN_CHECK_MUL))
6305 : return true;
6306 : return false;
6307 : }
6308 :
6309 : /* Dominator walker used to discover which large/huge _BitInt
6310 : loads could be sunk into all their uses. */
6311 :
6312 590 : class bitint_dom_walker : public dom_walker
6313 : {
6314 : public:
6315 295 : bitint_dom_walker (bitmap names, bitmap loads)
6316 590 : : dom_walker (CDI_DOMINATORS), m_names (names), m_loads (loads) {}
6317 :
6318 : edge before_dom_children (basic_block) final override;
6319 :
6320 : private:
6321 : bitmap m_names, m_loads;
6322 : };
6323 :
6324 : edge
6325 4461 : bitint_dom_walker::before_dom_children (basic_block bb)
6326 : {
6327 4461 : gphi *phi = get_virtual_phi (bb);
6328 4461 : tree vop;
6329 4461 : if (phi)
6330 788 : vop = gimple_phi_result (phi);
6331 3673 : else if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
6332 : vop = NULL_TREE;
6333 : else
6334 3378 : vop = (tree) get_immediate_dominator (CDI_DOMINATORS, bb)->aux;
6335 :
6336 4461 : auto_vec<tree, 16> worklist;
6337 8922 : for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6338 19825 : !gsi_end_p (gsi); gsi_next (&gsi))
6339 : {
6340 15364 : gimple *stmt = gsi_stmt (gsi);
6341 15364 : if (is_gimple_debug (stmt))
6342 2749 : continue;
6343 :
6344 15385 : if (!vop && gimple_vuse (stmt))
6345 : vop = gimple_vuse (stmt);
6346 :
6347 14980 : tree cvop = vop;
6348 28110 : if (gimple_vdef (stmt))
6349 14980 : vop = gimple_vdef (stmt);
6350 :
6351 14980 : tree lhs = gimple_get_lhs (stmt);
6352 17345 : if (lhs
6353 10888 : && TREE_CODE (lhs) == SSA_NAME
6354 8678 : && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
6355 5821 : && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
6356 20659 : && !bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
6357 : /* If lhs of stmt is large/huge _BitInt SSA_NAME not in m_names,
6358 : it means it will be handled in a loop or straight line code
6359 : at the location of its (ultimate) immediate use, so for
6360 : vop checking purposes check these only at the ultimate
6361 : immediate use. */
6362 2365 : continue;
6363 :
6364 12615 : ssa_op_iter oi;
6365 12615 : use_operand_p use_p;
6366 21388 : FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
6367 : {
6368 8773 : tree s = USE_FROM_PTR (use_p);
6369 8773 : if (TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
6370 8773 : && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
6371 3220 : worklist.safe_push (s);
6372 : }
6373 :
6374 12615 : bool needs_operand_addr = stmt_needs_operand_addr (stmt);
6375 31285 : while (worklist.length () > 0)
6376 : {
6377 6055 : tree s = worklist.pop ();
6378 :
6379 6055 : if (!bitmap_bit_p (m_names, SSA_NAME_VERSION (s)))
6380 : {
6381 2365 : gimple *g = SSA_NAME_DEF_STMT (s);
6382 2365 : needs_operand_addr |= stmt_needs_operand_addr (g);
6383 5427 : FOR_EACH_SSA_USE_OPERAND (use_p, g, oi, SSA_OP_USE)
6384 : {
6385 3062 : tree s2 = USE_FROM_PTR (use_p);
6386 3062 : if (TREE_CODE (TREE_TYPE (s2)) == BITINT_TYPE
6387 3062 : && (bitint_precision_kind (TREE_TYPE (s2))
6388 : >= bitint_prec_large))
6389 2835 : worklist.safe_push (s2);
6390 : }
6391 3072 : continue;
6392 2365 : }
6393 3690 : if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
6394 3690 : && gimple_assign_cast_p (SSA_NAME_DEF_STMT (s)))
6395 : {
6396 226 : tree rhs = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
6397 396 : if (TREE_CODE (rhs) == SSA_NAME
6398 226 : && bitmap_bit_p (m_loads, SSA_NAME_VERSION (rhs)))
6399 : s = rhs;
6400 : else
6401 170 : continue;
6402 : }
6403 3464 : else if (!bitmap_bit_p (m_loads, SSA_NAME_VERSION (s)))
6404 535 : continue;
6405 :
6406 2985 : gimple *g = SSA_NAME_DEF_STMT (s);
6407 2985 : tree rhs1 = gimple_assign_rhs1 (g);
6408 2985 : if (needs_operand_addr
6409 213 : && TREE_CODE (rhs1) == COMPONENT_REF
6410 3002 : && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
6411 : {
6412 4 : tree fld = TREE_OPERAND (rhs1, 1);
6413 : /* For little-endian, we can allow as inputs bit-fields
6414 : which start at a limb boundary. */
6415 6 : if (!bitint_big_endian
6416 4 : && DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
6417 4 : && tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld))
6418 4 : && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
6419 4 : % limb_prec) == 0)
6420 : ;
6421 : else
6422 : {
6423 2 : bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
6424 2 : continue;
6425 : }
6426 : }
6427 :
6428 2983 : ao_ref ref;
6429 2983 : ao_ref_init (&ref, rhs1);
6430 2983 : tree lvop = gimple_vuse (g);
6431 2983 : unsigned limit = 64;
6432 2983 : tree vuse = cvop;
6433 2983 : if (vop != cvop
6434 1329 : && is_gimple_assign (stmt)
6435 1327 : && gimple_store_p (stmt)
6436 4310 : && (needs_operand_addr
6437 1139 : || !operand_equal_p (lhs, gimple_assign_rhs1 (g), 0)))
6438 : vuse = vop;
6439 2983 : if (vuse != lvop
6440 2983 : && walk_non_aliased_vuses (&ref, vuse, false, vuse_eq,
6441 : NULL, NULL, NULL, limit, lvop) == NULL)
6442 525 : bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
6443 : }
6444 : }
6445 :
6446 4461 : bb->aux = (void *) vop;
6447 4461 : return NULL;
6448 4461 : }
6449 :
6450 : }
6451 :
6452 : /* Replacement for normal processing of STMT in tree-ssa-coalesce.cc
6453 : build_ssa_conflict_graph.
6454 : The differences are:
6455 : 1) don't process assignments with large/huge _BitInt lhs not in NAMES
6456 : 2) for large/huge _BitInt multiplication/division/modulo process def
6457 : only after processing uses rather than before to make uses conflict
6458 : with the definition
6459 : 3) for large/huge _BitInt uses not in NAMES mark the uses of their
6460 : SSA_NAME_DEF_STMT (recursively), because those uses will be sunk into
6461 : the final statement. */
6462 :
6463 : void
6464 81817 : build_bitint_stmt_ssa_conflicts (gimple *stmt, live_track *live,
6465 : ssa_conflicts *graph, bitmap names,
6466 : void (*def) (live_track *, tree,
6467 : ssa_conflicts *),
6468 : void (*use) (live_track *, tree),
6469 : void (*clear) (live_track *, tree))
6470 : {
6471 81817 : bool muldiv_p = false;
6472 81817 : tree lhs = NULL_TREE;
6473 81817 : if (is_gimple_assign (stmt))
6474 : {
6475 44557 : lhs = gimple_assign_lhs (stmt);
6476 44557 : if (TREE_CODE (lhs) == SSA_NAME)
6477 : {
6478 32175 : tree type = TREE_TYPE (lhs);
6479 32175 : if (TREE_CODE (type) == COMPLEX_TYPE)
6480 63 : type = TREE_TYPE (type);
6481 32175 : if (TREE_CODE (type) == BITINT_TYPE
6482 32175 : && bitint_precision_kind (type) >= bitint_prec_large)
6483 : {
6484 19293 : if (!bitmap_bit_p (names, SSA_NAME_VERSION (lhs)))
6485 4618 : return;
6486 :
6487 : /* A copy between 2 partitions does not introduce an interference
6488 : by itself. If they did, you would never be able to coalesce
6489 : two things which are copied. If the two variables really do
6490 : conflict, they will conflict elsewhere in the program.
6491 :
6492 : This is handled by simply removing the SRC of the copy from
6493 : the live list, and processing the stmt normally.
6494 :
6495 : Don't do this if lhs is not in names though, in such cases
6496 : it is actually used at some point later in the basic
6497 : block. */
6498 14675 : if (gimple_assign_copy_p (stmt))
6499 : {
6500 1665 : tree rhs1 = gimple_assign_rhs1 (stmt);
6501 1665 : if (TREE_CODE (rhs1) == SSA_NAME)
6502 31 : clear (live, rhs1);
6503 : }
6504 :
6505 14675 : switch (gimple_assign_rhs_code (stmt))
6506 : {
6507 135 : case MULT_EXPR:
6508 135 : case TRUNC_DIV_EXPR:
6509 135 : case EXACT_DIV_EXPR:
6510 135 : case TRUNC_MOD_EXPR:
6511 135 : muldiv_p = true;
6512 : default:
6513 : break;
6514 : }
6515 : }
6516 : }
6517 : }
6518 37260 : else if (bitint_big_endian
6519 0 : && is_gimple_call (stmt)
6520 37260 : && gimple_call_internal_p (stmt))
6521 0 : switch (gimple_call_internal_fn (stmt))
6522 : {
6523 0 : case IFN_ADD_OVERFLOW:
6524 0 : case IFN_SUB_OVERFLOW:
6525 0 : case IFN_UBSAN_CHECK_ADD:
6526 0 : case IFN_UBSAN_CHECK_SUB:
6527 0 : case IFN_MUL_OVERFLOW:
6528 0 : case IFN_UBSAN_CHECK_MUL:
6529 0 : lhs = gimple_call_lhs (stmt);
6530 0 : if (lhs)
6531 77199 : muldiv_p = true;
6532 : break;
6533 : default:
6534 : break;
6535 : }
6536 :
6537 154398 : auto_vec<tree, 16> worklist;
6538 77199 : ssa_op_iter iter;
6539 77199 : tree var;
6540 : /* On little-endian, mergeable ops process limbs from 0 up so except
6541 : for multiplication/division/modulo there is no risk in using the
6542 : same underlying variable for lhs and some operand, even when casts
6543 : are involved, the lhs limb is stored only after processing the source
6544 : limbs with the same index.
6545 : For multiplication/division/modulo, the libgcc library function requires
6546 : no aliasing between result and sources.
6547 : On big-endian, even mergeable ops limb processing can be problematic
6548 : though, because it can apply various index corrections e.g. when there
6549 : is a cast from operand with different number of limbs. So, make the
6550 : lhs conflict with all the operands which are (for now virtually) used on
6551 : the current stmt if there is any mismatch in the number of limbs between
6552 : operands and the lhs. */
6553 77199 : if (bitint_big_endian && lhs && !muldiv_p)
6554 : {
6555 0 : tree ltype = TREE_TYPE (lhs);
6556 0 : if (TREE_CODE (ltype) == COMPLEX_TYPE)
6557 : muldiv_p = true;
6558 0 : else if (TREE_CODE (lhs) == SSA_NAME
6559 0 : && TREE_CODE (ltype) == BITINT_TYPE
6560 0 : && bitint_precision_kind (ltype) >= bitint_prec_large)
6561 : {
6562 0 : unsigned lnelts = CEIL (TYPE_PRECISION (ltype), limb_prec);
6563 0 : FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE)
6564 : {
6565 0 : tree type = TREE_TYPE (var);
6566 0 : if (TREE_CODE (type) == COMPLEX_TYPE)
6567 0 : type = TREE_TYPE (type);
6568 0 : if (TREE_CODE (type) == BITINT_TYPE
6569 0 : && bitint_precision_kind (type) >= bitint_prec_large)
6570 : {
6571 0 : if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
6572 : {
6573 0 : unsigned nelts = CEIL (TYPE_PRECISION (type), limb_prec);
6574 0 : if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
6575 0 : || lnelts != nelts)
6576 : {
6577 0 : muldiv_p = true;
6578 : break;
6579 : }
6580 : }
6581 : else
6582 0 : worklist.safe_push (var);
6583 : }
6584 : }
6585 :
6586 0 : while (!muldiv_p && worklist.length () > 0)
6587 : {
6588 0 : tree s = worklist.pop ();
6589 0 : FOR_EACH_SSA_TREE_OPERAND (var, SSA_NAME_DEF_STMT (s), iter,
6590 : SSA_OP_USE)
6591 : {
6592 0 : tree type = TREE_TYPE (var);
6593 0 : if (TREE_CODE (type) == COMPLEX_TYPE)
6594 0 : type = TREE_TYPE (type);
6595 0 : if (TREE_CODE (type) == BITINT_TYPE
6596 0 : && bitint_precision_kind (type) >= bitint_prec_large)
6597 : {
6598 0 : if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
6599 : {
6600 0 : unsigned nelts = CEIL (TYPE_PRECISION (type),
6601 : limb_prec);
6602 0 : if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
6603 0 : || lnelts != nelts)
6604 : {
6605 : muldiv_p = true;
6606 : break;
6607 : }
6608 : }
6609 : else
6610 0 : worklist.safe_push (var);
6611 : }
6612 : }
6613 : }
6614 0 : worklist.truncate (0);
6615 : }
6616 : }
6617 :
6618 77199 : if (!muldiv_p)
6619 : {
6620 : /* For stmts with more than one SSA_NAME definition pretend all the
6621 : SSA_NAME outputs but the first one are live at this point, so
6622 : that conflicts are added in between all those even when they are
6623 : actually not really live after the asm, because expansion might
6624 : copy those into pseudos after the asm and if multiple outputs
6625 : share the same partition, it might overwrite those that should
6626 : be live. E.g.
6627 : asm volatile (".." : "=r" (a) : "=r" (b) : "0" (a), "1" (a));
6628 : return a;
6629 : See PR70593. */
6630 77064 : bool first = true;
6631 113360 : FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
6632 36296 : if (first)
6633 : first = false;
6634 : else
6635 0 : use (live, var);
6636 :
6637 113360 : FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
6638 36296 : def (live, var, graph);
6639 : }
6640 :
6641 131207 : FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE)
6642 : {
6643 54008 : tree type = TREE_TYPE (var);
6644 54008 : if (TREE_CODE (type) == COMPLEX_TYPE)
6645 6081 : type = TREE_TYPE (type);
6646 54008 : if (TREE_CODE (type) == BITINT_TYPE
6647 54008 : && bitint_precision_kind (type) >= bitint_prec_large)
6648 : {
6649 34808 : if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
6650 29929 : use (live, var);
6651 : else
6652 4879 : worklist.safe_push (var);
6653 : }
6654 : }
6655 :
6656 84991 : while (worklist.length () > 0)
6657 : {
6658 7792 : tree s = worklist.pop ();
6659 16217 : FOR_EACH_SSA_TREE_OPERAND (var, SSA_NAME_DEF_STMT (s), iter, SSA_OP_USE)
6660 : {
6661 8425 : tree type = TREE_TYPE (var);
6662 8425 : if (TREE_CODE (type) == COMPLEX_TYPE)
6663 1597 : type = TREE_TYPE (type);
6664 8425 : if (TREE_CODE (type) == BITINT_TYPE
6665 8425 : && bitint_precision_kind (type) >= bitint_prec_large)
6666 : {
6667 7782 : if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
6668 4869 : use (live, var);
6669 : else
6670 2913 : worklist.safe_push (var);
6671 : }
6672 : }
6673 : }
6674 :
6675 77199 : if (muldiv_p)
6676 135 : def (live, lhs, graph);
6677 : }
6678 :
6679 : /* If STMT is .{ADD,SUB,MUL}_OVERFLOW with INTEGER_CST arguments,
6680 : return the largest bitint_prec_kind of them, otherwise return
6681 : bitint_prec_small. */
6682 :
6683 : static bitint_prec_kind
6684 190460 : arith_overflow_arg_kind (gimple *stmt)
6685 : {
6686 190460 : bitint_prec_kind ret = bitint_prec_small;
6687 190460 : if (is_gimple_call (stmt) && gimple_call_internal_p (stmt))
6688 87947 : switch (gimple_call_internal_fn (stmt))
6689 : {
6690 : case IFN_ADD_OVERFLOW:
6691 : case IFN_SUB_OVERFLOW:
6692 : case IFN_MUL_OVERFLOW:
6693 223308 : for (int i = 0; i < 2; ++i)
6694 : {
6695 148872 : tree a = gimple_call_arg (stmt, i);
6696 148872 : if (TREE_CODE (a) == INTEGER_CST
6697 148872 : && TREE_CODE (TREE_TYPE (a)) == BITINT_TYPE)
6698 : {
6699 5928 : bitint_prec_kind kind = bitint_precision_kind (TREE_TYPE (a));
6700 148872 : ret = MAX (ret, kind);
6701 : }
6702 : }
6703 : break;
6704 : default:
6705 : break;
6706 : }
6707 190460 : return ret;
6708 : }
6709 :
6710 : /* Entry point for _BitInt(N) operation lowering during optimization. */
6711 :
6712 : static unsigned int
6713 1472253 : gimple_lower_bitint (void)
6714 : {
6715 1472253 : small_max_prec = mid_min_prec = large_min_prec = huge_min_prec = 0;
6716 1472253 : limb_prec = abi_limb_prec = 0;
6717 1472253 : bitint_big_endian = false;
6718 :
6719 1472253 : unsigned int i;
6720 63150878 : for (i = 0; i < num_ssa_names; ++i)
6721 : {
6722 61685766 : tree s = ssa_name (i);
6723 61685766 : if (s == NULL)
6724 13150792 : continue;
6725 48534974 : tree type = TREE_TYPE (s);
6726 48534974 : if (TREE_CODE (type) == COMPLEX_TYPE)
6727 : {
6728 180096 : if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s))
6729 : != bitint_prec_small)
6730 : break;
6731 179983 : type = TREE_TYPE (type);
6732 : }
6733 48534861 : if (TREE_CODE (type) == BITINT_TYPE
6734 48534861 : && bitint_precision_kind (type) != bitint_prec_small)
6735 : break;
6736 : /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6737 : into memory. Such functions could have no large/huge SSA_NAMEs. */
6738 48527892 : if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
6739 : {
6740 21119177 : gimple *g = SSA_NAME_DEF_STMT (s);
6741 21119177 : if (is_gimple_assign (g) && gimple_store_p (g))
6742 : {
6743 10690084 : tree t = gimple_assign_rhs1 (g);
6744 10690084 : if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6745 10690084 : && (bitint_precision_kind (TREE_TYPE (t))
6746 : >= bitint_prec_large))
6747 : break;
6748 : }
6749 : }
6750 : /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
6751 : to floating point types need to be rewritten. */
6752 27408715 : else if (SCALAR_FLOAT_TYPE_P (type))
6753 : {
6754 2296150 : gimple *g = SSA_NAME_DEF_STMT (s);
6755 2296150 : if (is_gimple_assign (g) && gimple_assign_rhs_code (g) == FLOAT_EXPR)
6756 : {
6757 127595 : tree t = gimple_assign_rhs1 (g);
6758 127595 : if (TREE_CODE (t) == INTEGER_CST
6759 110 : && TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6760 127596 : && (bitint_precision_kind (TREE_TYPE (t))
6761 : != bitint_prec_small))
6762 : break;
6763 : }
6764 : }
6765 : }
6766 2944506 : if (i == num_ssa_names)
6767 : return 0;
6768 :
6769 7141 : basic_block bb;
6770 7141 : auto_vec<gimple *, 4> switch_statements;
6771 45385 : FOR_EACH_BB_FN (bb, cfun)
6772 : {
6773 113995 : if (gswitch *swtch = safe_dyn_cast <gswitch *> (*gsi_last_bb (bb)))
6774 : {
6775 23 : tree idx = gimple_switch_index (swtch);
6776 23 : if (TREE_CODE (TREE_TYPE (idx)) != BITINT_TYPE
6777 23 : || bitint_precision_kind (TREE_TYPE (idx)) < bitint_prec_large)
6778 12 : continue;
6779 :
6780 11 : if (optimize)
6781 6 : group_case_labels_stmt (swtch);
6782 11 : if (gimple_switch_num_labels (swtch) == 1)
6783 : {
6784 1 : single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
6785 1 : gimple_stmt_iterator gsi = gsi_for_stmt (swtch);
6786 1 : gsi_remove (&gsi, true);
6787 : }
6788 : else
6789 10 : switch_statements.safe_push (swtch);
6790 : }
6791 : }
6792 :
6793 7141 : if (!switch_statements.is_empty ())
6794 : {
6795 10 : bool expanded = false;
6796 10 : gimple *stmt;
6797 10 : unsigned int j;
6798 10 : i = 0;
6799 20 : FOR_EACH_VEC_ELT (switch_statements, j, stmt)
6800 : {
6801 10 : gswitch *swtch = as_a<gswitch *> (stmt);
6802 10 : tree_switch_conversion::switch_decision_tree dt (swtch);
6803 10 : expanded |= dt.analyze_switch_statement ();
6804 10 : }
6805 :
6806 10 : if (expanded)
6807 : {
6808 10 : free_dominance_info (CDI_DOMINATORS);
6809 10 : free_dominance_info (CDI_POST_DOMINATORS);
6810 10 : mark_virtual_operands_for_renaming (cfun);
6811 10 : cleanup_tree_cfg (TODO_update_ssa);
6812 : }
6813 : }
6814 :
6815 7141 : struct bitint_large_huge large_huge;
6816 7141 : bool has_large_huge_parm_result = false;
6817 7141 : bool has_large_huge = false;
6818 7141 : unsigned int ret = 0, first_large_huge = ~0U;
6819 7141 : bool edge_insertions = false;
6820 123188 : for (; i < num_ssa_names; ++i)
6821 : {
6822 116047 : tree s = ssa_name (i);
6823 116047 : if (s == NULL)
6824 2728 : continue;
6825 113319 : tree type = TREE_TYPE (s);
6826 113319 : if (TREE_CODE (type) == COMPLEX_TYPE)
6827 : {
6828 5197 : if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s))
6829 : >= bitint_prec_large)
6830 1957 : has_large_huge = true;
6831 5197 : type = TREE_TYPE (type);
6832 : }
6833 113319 : if (TREE_CODE (type) == BITINT_TYPE
6834 113319 : && bitint_precision_kind (type) >= bitint_prec_large)
6835 : {
6836 34661 : if (first_large_huge == ~0U)
6837 5607 : first_large_huge = i;
6838 34661 : gimple *stmt = SSA_NAME_DEF_STMT (s), *g;
6839 34661 : gimple_stmt_iterator gsi;
6840 34661 : tree_code rhs_code;
6841 : /* Unoptimize certain constructs to simpler alternatives to
6842 : avoid having to lower all of them. */
6843 34661 : if (is_gimple_assign (stmt) && gimple_bb (stmt))
6844 21550 : switch (rhs_code = gimple_assign_rhs_code (stmt))
6845 : {
6846 : default:
6847 : break;
6848 317 : case MULT_EXPR:
6849 317 : case TRUNC_DIV_EXPR:
6850 317 : case EXACT_DIV_EXPR:
6851 317 : case TRUNC_MOD_EXPR:
6852 317 : if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s))
6853 : {
6854 2 : location_t loc = gimple_location (stmt);
6855 2 : gsi = gsi_for_stmt (stmt);
6856 2 : tree rhs1 = gimple_assign_rhs1 (stmt);
6857 2 : tree rhs2 = gimple_assign_rhs2 (stmt);
6858 : /* For multiplication and division with (ab)
6859 : lhs and one or both operands force the operands
6860 : into new SSA_NAMEs to avoid coalescing failures. */
6861 2 : if (TREE_CODE (rhs1) == SSA_NAME
6862 2 : && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1))
6863 : {
6864 2 : first_large_huge = 0;
6865 2 : tree t = make_ssa_name (TREE_TYPE (rhs1));
6866 2 : g = gimple_build_assign (t, SSA_NAME, rhs1);
6867 2 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6868 2 : gimple_set_location (g, loc);
6869 2 : gimple_assign_set_rhs1 (stmt, t);
6870 2 : if (rhs1 == rhs2)
6871 : {
6872 0 : gimple_assign_set_rhs2 (stmt, t);
6873 0 : rhs2 = t;
6874 : }
6875 2 : update_stmt (stmt);
6876 : }
6877 2 : if (TREE_CODE (rhs2) == SSA_NAME
6878 2 : && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs2))
6879 : {
6880 0 : first_large_huge = 0;
6881 0 : tree t = make_ssa_name (TREE_TYPE (rhs2));
6882 0 : g = gimple_build_assign (t, SSA_NAME, rhs2);
6883 0 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6884 0 : gimple_set_location (g, loc);
6885 0 : gimple_assign_set_rhs2 (stmt, t);
6886 0 : update_stmt (stmt);
6887 : }
6888 : }
6889 : break;
6890 3 : case LROTATE_EXPR:
6891 3 : case RROTATE_EXPR:
6892 3 : {
6893 3 : first_large_huge = 0;
6894 3 : location_t loc = gimple_location (stmt);
6895 3 : gsi = gsi_for_stmt (stmt);
6896 3 : tree rhs1 = gimple_assign_rhs1 (stmt);
6897 3 : tree type = TREE_TYPE (rhs1);
6898 3 : tree n = gimple_assign_rhs2 (stmt), m;
6899 3 : tree p = build_int_cst (TREE_TYPE (n),
6900 3 : TYPE_PRECISION (type));
6901 3 : if (TREE_CODE (n) == INTEGER_CST)
6902 : {
6903 0 : if (integer_zerop (n))
6904 : m = n;
6905 : else
6906 0 : m = fold_build2 (MINUS_EXPR, TREE_TYPE (n), p, n);
6907 : }
6908 : else
6909 : {
6910 3 : tree tem = make_ssa_name (TREE_TYPE (n));
6911 3 : g = gimple_build_assign (tem, MINUS_EXPR, p, n);
6912 3 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6913 3 : gimple_set_location (g, loc);
6914 3 : m = make_ssa_name (TREE_TYPE (n));
6915 3 : g = gimple_build_assign (m, TRUNC_MOD_EXPR, tem, p);
6916 3 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6917 3 : gimple_set_location (g, loc);
6918 : }
6919 3 : if (!TYPE_UNSIGNED (type))
6920 : {
6921 0 : tree utype = build_bitint_type (TYPE_PRECISION (type),
6922 : 1);
6923 0 : if (TREE_CODE (rhs1) == INTEGER_CST)
6924 0 : rhs1 = fold_convert (utype, rhs1);
6925 : else
6926 : {
6927 0 : tree t = make_ssa_name (type);
6928 0 : g = gimple_build_assign (t, NOP_EXPR, rhs1);
6929 0 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6930 0 : gimple_set_location (g, loc);
6931 : }
6932 : }
6933 4 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
6934 : rhs_code == LROTATE_EXPR
6935 : ? LSHIFT_EXPR : RSHIFT_EXPR,
6936 : rhs1, n);
6937 3 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6938 3 : gimple_set_location (g, loc);
6939 3 : tree op1 = gimple_assign_lhs (g);
6940 4 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
6941 : rhs_code == LROTATE_EXPR
6942 : ? RSHIFT_EXPR : LSHIFT_EXPR,
6943 : rhs1, m);
6944 3 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6945 3 : gimple_set_location (g, loc);
6946 3 : tree op2 = gimple_assign_lhs (g);
6947 3 : tree lhs = gimple_assign_lhs (stmt);
6948 3 : if (!TYPE_UNSIGNED (type))
6949 : {
6950 0 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (op1)),
6951 : BIT_IOR_EXPR, op1, op2);
6952 0 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6953 0 : gimple_set_location (g, loc);
6954 0 : g = gimple_build_assign (lhs, NOP_EXPR,
6955 : gimple_assign_lhs (g));
6956 : }
6957 : else
6958 3 : g = gimple_build_assign (lhs, BIT_IOR_EXPR, op1, op2);
6959 3 : gsi_replace (&gsi, g, true);
6960 3 : gimple_set_location (g, loc);
6961 : }
6962 3 : break;
6963 21 : case ABS_EXPR:
6964 21 : case ABSU_EXPR:
6965 21 : case MIN_EXPR:
6966 21 : case MAX_EXPR:
6967 21 : case COND_EXPR:
6968 21 : first_large_huge = 0;
6969 21 : gsi = gsi_for_stmt (stmt);
6970 21 : tree lhs = gimple_assign_lhs (stmt);
6971 21 : tree rhs1 = gimple_assign_rhs1 (stmt), rhs2 = NULL_TREE;
6972 21 : location_t loc = gimple_location (stmt);
6973 21 : if (rhs_code == ABS_EXPR)
6974 4 : g = gimple_build_cond (LT_EXPR, rhs1,
6975 4 : build_zero_cst (TREE_TYPE (rhs1)),
6976 : NULL_TREE, NULL_TREE);
6977 17 : else if (rhs_code == ABSU_EXPR)
6978 : {
6979 8 : rhs2 = make_ssa_name (TREE_TYPE (lhs));
6980 8 : g = gimple_build_assign (rhs2, NOP_EXPR, rhs1);
6981 8 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6982 8 : gimple_set_location (g, loc);
6983 8 : g = gimple_build_cond (LT_EXPR, rhs1,
6984 8 : build_zero_cst (TREE_TYPE (rhs1)),
6985 : NULL_TREE, NULL_TREE);
6986 8 : rhs1 = rhs2;
6987 : }
6988 9 : else if (rhs_code == MIN_EXPR || rhs_code == MAX_EXPR)
6989 : {
6990 9 : rhs2 = gimple_assign_rhs2 (stmt);
6991 9 : if (TREE_CODE (rhs1) == INTEGER_CST)
6992 0 : std::swap (rhs1, rhs2);
6993 9 : g = gimple_build_cond (LT_EXPR, rhs1, rhs2,
6994 : NULL_TREE, NULL_TREE);
6995 9 : if (rhs_code == MAX_EXPR)
6996 5 : std::swap (rhs1, rhs2);
6997 : }
6998 : else
6999 : {
7000 0 : g = gimple_build_cond (NE_EXPR, rhs1,
7001 0 : build_zero_cst (TREE_TYPE (rhs1)),
7002 : NULL_TREE, NULL_TREE);
7003 0 : rhs1 = gimple_assign_rhs2 (stmt);
7004 0 : rhs2 = gimple_assign_rhs3 (stmt);
7005 : }
7006 21 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
7007 21 : gimple_set_location (g, loc);
7008 21 : edge e1 = split_block (gsi_bb (gsi), g);
7009 21 : edge e2 = split_block (e1->dest, (gimple *) NULL);
7010 21 : edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
7011 21 : e3->probability = profile_probability::even ();
7012 21 : e1->flags = EDGE_TRUE_VALUE;
7013 21 : e1->probability = e3->probability.invert ();
7014 21 : if (dom_info_available_p (CDI_DOMINATORS))
7015 13 : set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
7016 21 : if (rhs_code == ABS_EXPR || rhs_code == ABSU_EXPR)
7017 : {
7018 12 : gsi = gsi_after_labels (e1->dest);
7019 12 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
7020 : NEGATE_EXPR, rhs1);
7021 12 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
7022 12 : gimple_set_location (g, loc);
7023 12 : rhs2 = gimple_assign_lhs (g);
7024 12 : std::swap (rhs1, rhs2);
7025 : }
7026 21 : gsi = gsi_for_stmt (stmt);
7027 21 : gsi_remove (&gsi, true);
7028 21 : gphi *phi = create_phi_node (lhs, e2->dest);
7029 21 : add_phi_arg (phi, rhs1, e2, UNKNOWN_LOCATION);
7030 21 : add_phi_arg (phi, rhs2, e3, UNKNOWN_LOCATION);
7031 21 : break;
7032 : }
7033 : }
7034 : /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
7035 : into memory. Such functions could have no large/huge SSA_NAMEs. */
7036 78658 : else if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
7037 : {
7038 50198 : gimple *g = SSA_NAME_DEF_STMT (s);
7039 50198 : if (is_gimple_assign (g) && gimple_store_p (g))
7040 : {
7041 15916 : tree t = gimple_assign_rhs1 (g);
7042 15916 : if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
7043 15916 : && (bitint_precision_kind (TREE_TYPE (t))
7044 : >= bitint_prec_large))
7045 : has_large_huge = true;
7046 : }
7047 : }
7048 : /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
7049 : to floating point types need to be rewritten. */
7050 28460 : else if (SCALAR_FLOAT_TYPE_P (type))
7051 : {
7052 665 : gimple *g = SSA_NAME_DEF_STMT (s);
7053 665 : if (is_gimple_assign (g) && gimple_assign_rhs_code (g) == FLOAT_EXPR)
7054 : {
7055 177 : tree t = gimple_assign_rhs1 (g);
7056 177 : if (TREE_CODE (t) == INTEGER_CST
7057 1 : && TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
7058 178 : && (bitint_precision_kind (TREE_TYPE (t))
7059 : >= bitint_prec_large))
7060 : has_large_huge = true;
7061 : }
7062 : }
7063 : }
7064 99899 : for (i = first_large_huge; i < num_ssa_names; ++i)
7065 : {
7066 92758 : tree s = ssa_name (i);
7067 92758 : if (s == NULL)
7068 2425 : continue;
7069 90333 : tree type = TREE_TYPE (s);
7070 90333 : if (TREE_CODE (type) == COMPLEX_TYPE)
7071 3968 : type = TREE_TYPE (type);
7072 90333 : if (TREE_CODE (type) == BITINT_TYPE
7073 90333 : && bitint_precision_kind (type) >= bitint_prec_large)
7074 : {
7075 34661 : use_operand_p use_p;
7076 34661 : gimple *use_stmt;
7077 34661 : has_large_huge = true;
7078 36317 : if (optimize
7079 50757 : && optimizable_arith_overflow (SSA_NAME_DEF_STMT (s)))
7080 6181 : continue;
7081 : /* Ignore large/huge _BitInt SSA_NAMEs which have single use in
7082 : the same bb and could be handled in the same loop with the
7083 : immediate use. */
7084 33005 : if (optimize
7085 14440 : && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
7086 14418 : && single_imm_use (s, &use_p, &use_stmt)
7087 46955 : && gimple_bb (SSA_NAME_DEF_STMT (s)) == gimple_bb (use_stmt))
7088 : {
7089 9833 : if (mergeable_op (SSA_NAME_DEF_STMT (s)))
7090 : {
7091 2064 : if (mergeable_op (use_stmt))
7092 1812 : continue;
7093 252 : tree_code cmp_code = comparison_op (use_stmt, NULL, NULL);
7094 252 : if (cmp_code == EQ_EXPR || cmp_code == NE_EXPR)
7095 26 : continue;
7096 226 : if (gimple_assign_cast_p (use_stmt))
7097 : {
7098 85 : tree lhs = gimple_assign_lhs (use_stmt);
7099 170 : if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
7100 : /* Don't merge with VIEW_CONVERT_EXPRs to
7101 : huge INTEGER_TYPEs used sometimes in memcpy
7102 : expansion. */
7103 158 : && (TREE_CODE (TREE_TYPE (lhs)) != INTEGER_TYPE
7104 8 : || (TYPE_PRECISION (TREE_TYPE (lhs))
7105 16 : <= MAX_FIXED_MODE_SIZE)))
7106 73 : continue;
7107 : }
7108 141 : else if (gimple_store_p (use_stmt)
7109 0 : && is_gimple_assign (use_stmt)
7110 0 : && !gimple_has_volatile_ops (use_stmt)
7111 141 : && !stmt_ends_bb_p (use_stmt))
7112 0 : continue;
7113 : }
7114 7922 : if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (s)))
7115 : {
7116 795 : tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
7117 795 : if (TREE_CODE (rhs1) == VIEW_CONVERT_EXPR)
7118 : {
7119 17 : rhs1 = TREE_OPERAND (rhs1, 0);
7120 17 : if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
7121 13 : && !POINTER_TYPE_P (TREE_TYPE (rhs1))
7122 13 : && gimple_store_p (use_stmt))
7123 6 : continue;
7124 : }
7125 1578 : if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
7126 706 : && ((is_gimple_assign (use_stmt)
7127 659 : && (gimple_assign_rhs_code (use_stmt)
7128 : != COMPLEX_EXPR))
7129 47 : || gimple_code (use_stmt) == GIMPLE_COND)
7130 696 : && (!gimple_store_p (use_stmt)
7131 106 : || (is_gimple_assign (use_stmt)
7132 106 : && !gimple_has_volatile_ops (use_stmt)
7133 106 : && !stmt_ends_bb_p (use_stmt)))
7134 1485 : && (TREE_CODE (rhs1) != SSA_NAME
7135 696 : || !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
7136 : {
7137 696 : if (is_gimple_assign (use_stmt))
7138 659 : switch (gimple_assign_rhs_code (use_stmt))
7139 : {
7140 54 : case TRUNC_DIV_EXPR:
7141 54 : case EXACT_DIV_EXPR:
7142 54 : case TRUNC_MOD_EXPR:
7143 54 : case FLOAT_EXPR:
7144 : /* For division, modulo and casts to floating
7145 : point, avoid representing unsigned operands
7146 : using negative prec if they were sign-extended
7147 : from narrower precision. */
7148 54 : if (TYPE_UNSIGNED (TREE_TYPE (s))
7149 28 : && !TYPE_UNSIGNED (TREE_TYPE (rhs1))
7150 63 : && (TYPE_PRECISION (TREE_TYPE (s))
7151 9 : > TYPE_PRECISION (TREE_TYPE (rhs1))))
7152 8 : goto force_name;
7153 : /* FALLTHRU */
7154 100 : case MULT_EXPR:
7155 100 : if (TREE_CODE (TREE_TYPE (rhs1)) != BITINT_TYPE
7156 100 : || (bitint_precision_kind (TREE_TYPE (rhs1))
7157 : < bitint_prec_large))
7158 35 : continue;
7159 : /* Uses which use handle_operand_addr can't
7160 : deal with nested casts. */
7161 65 : if (TREE_CODE (rhs1) == SSA_NAME
7162 65 : && gimple_assign_cast_p
7163 65 : (SSA_NAME_DEF_STMT (rhs1))
7164 43 : && has_single_use (rhs1)
7165 108 : && (gimple_bb (SSA_NAME_DEF_STMT (rhs1))
7166 43 : == gimple_bb (SSA_NAME_DEF_STMT (s))))
7167 43 : goto force_name;
7168 : break;
7169 0 : case VIEW_CONVERT_EXPR:
7170 0 : {
7171 0 : tree lhs = gimple_assign_lhs (use_stmt);
7172 : /* Don't merge with VIEW_CONVERT_EXPRs to
7173 : non-integral types. */
7174 0 : if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
7175 0 : goto force_name;
7176 : /* Don't merge with VIEW_CONVERT_EXPRs to
7177 : huge INTEGER_TYPEs used sometimes in memcpy
7178 : expansion. */
7179 0 : if (TREE_CODE (TREE_TYPE (lhs)) == INTEGER_TYPE
7180 0 : && (TYPE_PRECISION (TREE_TYPE (lhs))
7181 0 : > MAX_FIXED_MODE_SIZE))
7182 0 : goto force_name;
7183 : }
7184 : break;
7185 : default:
7186 : break;
7187 : }
7188 610 : if (TREE_CODE (TREE_TYPE (rhs1)) != BITINT_TYPE
7189 610 : || (bitint_precision_kind (TREE_TYPE (rhs1))
7190 : < bitint_prec_large))
7191 224 : continue;
7192 386 : if ((TYPE_PRECISION (TREE_TYPE (rhs1))
7193 386 : >= TYPE_PRECISION (TREE_TYPE (s)))
7194 386 : && mergeable_op (use_stmt))
7195 60 : continue;
7196 : /* Prevent merging a widening non-mergeable cast
7197 : on result of some narrower mergeable op
7198 : together with later mergeable operations. E.g.
7199 : result of _BitInt(223) addition shouldn't be
7200 : sign-extended to _BitInt(513) and have another
7201 : _BitInt(513) added to it, as handle_plus_minus
7202 : with its PHI node handling inside of handle_cast
7203 : will not work correctly. An exception is if
7204 : use_stmt is a store, this is handled directly
7205 : in lower_mergeable_stmt. */
7206 645 : if (TREE_CODE (rhs1) != SSA_NAME
7207 326 : || !has_single_use (rhs1)
7208 240 : || (gimple_bb (SSA_NAME_DEF_STMT (rhs1))
7209 240 : != gimple_bb (SSA_NAME_DEF_STMT (s)))
7210 191 : || !mergeable_op (SSA_NAME_DEF_STMT (rhs1))
7211 388 : || gimple_store_p (use_stmt))
7212 319 : continue;
7213 7 : if ((TYPE_PRECISION (TREE_TYPE (rhs1))
7214 7 : < TYPE_PRECISION (TREE_TYPE (s)))
7215 9 : && gimple_assign_cast_p (SSA_NAME_DEF_STMT (rhs1)))
7216 : {
7217 : /* Another exception is if the widening cast is
7218 : from mergeable same precision cast from something
7219 : not mergeable. */
7220 0 : tree rhs2
7221 0 : = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (rhs1));
7222 0 : if (TREE_CODE (TREE_TYPE (rhs2)) == BITINT_TYPE
7223 0 : && (TYPE_PRECISION (TREE_TYPE (rhs1))
7224 0 : == TYPE_PRECISION (TREE_TYPE (rhs2))))
7225 : {
7226 0 : if (TREE_CODE (rhs2) != SSA_NAME
7227 0 : || !has_single_use (rhs2)
7228 0 : || (gimple_bb (SSA_NAME_DEF_STMT (rhs2))
7229 0 : != gimple_bb (SSA_NAME_DEF_STMT (s)))
7230 0 : || !mergeable_op (SSA_NAME_DEF_STMT (rhs2)))
7231 0 : continue;
7232 : }
7233 : }
7234 : }
7235 : }
7236 7227 : if (is_gimple_assign (SSA_NAME_DEF_STMT (s)))
7237 5169 : switch (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (s)))
7238 : {
7239 1640 : case REALPART_EXPR:
7240 1640 : case IMAGPART_EXPR:
7241 1640 : {
7242 1640 : gimple *ds = SSA_NAME_DEF_STMT (s);
7243 1640 : tree rhs1 = gimple_assign_rhs1 (ds);
7244 1640 : rhs1 = TREE_OPERAND (rhs1, 0);
7245 1640 : if (TREE_CODE (rhs1) == SSA_NAME)
7246 : {
7247 1640 : gimple *g = SSA_NAME_DEF_STMT (rhs1);
7248 1640 : if (optimizable_arith_overflow (g))
7249 : {
7250 1636 : if (gimple_assign_rhs_code (ds) == IMAGPART_EXPR)
7251 1632 : continue;
7252 4 : if (gimple_store_p (use_stmt))
7253 : {
7254 : /* Punt if the cast use of IMAGPART_EXPR stmt
7255 : appears before the store use_stmt, because
7256 : optimizable arith overflow can't be
7257 : lowered at the store location in that case.
7258 : See PR121828. */
7259 4 : gimple_stmt_iterator gsi
7260 4 : = gsi_for_stmt (use_stmt);
7261 4 : unsigned int cnt = 0;
7262 6 : do
7263 : {
7264 6 : gsi_prev_nondebug (&gsi);
7265 6 : if (gsi_end_p (gsi))
7266 : break;
7267 6 : gimple *g2 = gsi_stmt (gsi);
7268 6 : if (g2 == ds)
7269 : break;
7270 3 : if (++cnt == 64)
7271 : break;
7272 3 : if (!gimple_assign_cast_p (g2))
7273 2 : continue;
7274 1 : tree rhs2 = gimple_assign_rhs1 (g2);
7275 1 : if (TREE_CODE (rhs2) != SSA_NAME)
7276 0 : continue;
7277 1 : gimple *g3 = SSA_NAME_DEF_STMT (rhs2);
7278 1 : if (!is_gimple_assign (g3))
7279 0 : continue;
7280 1 : if (gimple_assign_rhs_code (g3)
7281 : != IMAGPART_EXPR)
7282 0 : continue;
7283 1 : rhs2 = gimple_assign_rhs1 (g3);
7284 1 : rhs2 = TREE_OPERAND (rhs2, 0);
7285 1 : if (rhs2 != rhs1)
7286 0 : continue;
7287 : cnt = 64;
7288 : break;
7289 : }
7290 : while (1);
7291 4 : if (cnt == 64)
7292 : break;
7293 : }
7294 : }
7295 : }
7296 : }
7297 : /* FALLTHRU */
7298 635 : case LSHIFT_EXPR:
7299 635 : case RSHIFT_EXPR:
7300 635 : case MULT_EXPR:
7301 635 : case TRUNC_DIV_EXPR:
7302 635 : case EXACT_DIV_EXPR:
7303 635 : case TRUNC_MOD_EXPR:
7304 635 : case FIX_TRUNC_EXPR:
7305 635 : if (gimple_store_p (use_stmt)
7306 330 : && is_gimple_assign (use_stmt)
7307 330 : && !gimple_has_volatile_ops (use_stmt)
7308 965 : && !stmt_ends_bb_p (use_stmt))
7309 : {
7310 330 : tree lhs = gimple_assign_lhs (use_stmt);
7311 : /* As multiply/division passes address of the lhs
7312 : to library function and that assumes it can extend
7313 : it to whole number of limbs, avoid merging those
7314 : with bit-field stores. Don't allow it for
7315 : shifts etc. either, so that the bit-field store
7316 : handling doesn't have to be done everywhere. */
7317 330 : if (TREE_CODE (lhs) == COMPONENT_REF
7318 330 : && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
7319 : break;
7320 327 : continue;
7321 327 : }
7322 : break;
7323 : default:
7324 : break;
7325 : }
7326 : }
7327 :
7328 : /* Also ignore uninitialized uses. */
7329 28440 : if (SSA_NAME_IS_DEFAULT_DEF (s)
7330 28440 : && (!SSA_NAME_VAR (s) || VAR_P (SSA_NAME_VAR (s))))
7331 11 : continue;
7332 :
7333 28480 : force_name:
7334 28480 : if (!large_huge.m_names)
7335 5505 : large_huge.m_names = BITMAP_ALLOC (NULL);
7336 28480 : bitmap_set_bit (large_huge.m_names, SSA_NAME_VERSION (s));
7337 28480 : if (has_single_use (s))
7338 : {
7339 : tree s2 = s;
7340 : /* The coalescing hook special cases SSA_NAME copies.
7341 : Make sure not to mark in m_single_use_names single
7342 : use SSA_NAMEs copied from non-single use SSA_NAMEs. */
7343 25029 : while (gimple_assign_copy_p (SSA_NAME_DEF_STMT (s2)))
7344 : {
7345 919 : s2 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s2));
7346 919 : if (TREE_CODE (s2) != SSA_NAME)
7347 : break;
7348 27 : if (!has_single_use (s2))
7349 : {
7350 : s2 = NULL_TREE;
7351 : break;
7352 : }
7353 : }
7354 25022 : if (s2)
7355 : {
7356 25002 : if (!large_huge.m_single_use_names)
7357 5397 : large_huge.m_single_use_names = BITMAP_ALLOC (NULL);
7358 25002 : bitmap_set_bit (large_huge.m_single_use_names,
7359 25002 : SSA_NAME_VERSION (s));
7360 : }
7361 : }
7362 28480 : if (SSA_NAME_VAR (s)
7363 6808 : && ((TREE_CODE (SSA_NAME_VAR (s)) == PARM_DECL
7364 4967 : && SSA_NAME_IS_DEFAULT_DEF (s))
7365 1878 : || TREE_CODE (SSA_NAME_VAR (s)) == RESULT_DECL))
7366 : has_large_huge_parm_result = true;
7367 28480 : if (optimize
7368 9915 : && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
7369 9893 : && gimple_assign_load_p (SSA_NAME_DEF_STMT (s))
7370 6028 : && !gimple_has_volatile_ops (SSA_NAME_DEF_STMT (s))
7371 31445 : && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s)))
7372 : {
7373 2965 : use_operand_p use_p;
7374 2965 : imm_use_iterator iter;
7375 2965 : bool optimizable_load = true;
7376 8957 : FOR_EACH_IMM_USE_FAST (use_p, iter, s)
7377 : {
7378 3125 : gimple *use_stmt = USE_STMT (use_p);
7379 3125 : if (is_gimple_debug (use_stmt))
7380 0 : continue;
7381 3125 : if (gimple_code (use_stmt) == GIMPLE_PHI
7382 3113 : || is_gimple_call (use_stmt)
7383 3028 : || gimple_code (use_stmt) == GIMPLE_ASM
7384 6152 : || (is_gimple_assign (use_stmt)
7385 1895 : && (gimple_assign_rhs_code (use_stmt)
7386 : == COMPLEX_EXPR)))
7387 : {
7388 : optimizable_load = false;
7389 : break;
7390 : }
7391 2965 : }
7392 :
7393 2965 : ssa_op_iter oi;
7394 4011 : FOR_EACH_SSA_USE_OPERAND (use_p, SSA_NAME_DEF_STMT (s),
7395 : oi, SSA_OP_USE)
7396 : {
7397 1046 : tree s2 = USE_FROM_PTR (use_p);
7398 1046 : if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s2))
7399 : {
7400 : optimizable_load = false;
7401 : break;
7402 : }
7403 : }
7404 :
7405 2965 : if (optimizable_load && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s)))
7406 : {
7407 2867 : if (!large_huge.m_loads)
7408 295 : large_huge.m_loads = BITMAP_ALLOC (NULL);
7409 2867 : bitmap_set_bit (large_huge.m_loads, SSA_NAME_VERSION (s));
7410 : }
7411 : }
7412 : }
7413 : /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
7414 : into memory. Such functions could have no large/huge SSA_NAMEs. */
7415 55672 : else if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
7416 : {
7417 39433 : gimple *g = SSA_NAME_DEF_STMT (s);
7418 39433 : if (is_gimple_assign (g) && gimple_store_p (g))
7419 : {
7420 13283 : tree t = gimple_assign_rhs1 (g);
7421 13283 : if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
7422 13283 : && bitint_precision_kind (TREE_TYPE (t)) >= bitint_prec_large)
7423 : has_large_huge = true;
7424 : }
7425 : }
7426 : }
7427 :
7428 7141 : if (large_huge.m_names || has_large_huge)
7429 : {
7430 5676 : ret = TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
7431 5676 : calculate_dominance_info (CDI_DOMINATORS);
7432 5676 : if (optimize)
7433 2944 : enable_ranger (cfun);
7434 5676 : if (large_huge.m_loads)
7435 : {
7436 295 : basic_block entry = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7437 295 : entry->aux = NULL;
7438 590 : bitint_dom_walker (large_huge.m_names,
7439 295 : large_huge.m_loads).walk (entry);
7440 295 : bitmap_and_compl_into (large_huge.m_names, large_huge.m_loads);
7441 295 : clear_aux_for_blocks ();
7442 295 : BITMAP_FREE (large_huge.m_loads);
7443 : }
7444 5676 : large_huge.m_limb_type = build_nonstandard_integer_type (limb_prec, 1);
7445 5676 : large_huge.m_limb_size
7446 5676 : = tree_to_uhwi (TYPE_SIZE_UNIT (large_huge.m_limb_type));
7447 : }
7448 7141 : if (large_huge.m_names)
7449 : {
7450 5505 : large_huge.m_map
7451 11010 : = init_var_map (num_ssa_names, NULL, large_huge.m_names);
7452 5505 : coalesce_ssa_name (large_huge.m_map);
7453 5505 : partition_view_normal (large_huge.m_map);
7454 5505 : if (dump_file && (dump_flags & TDF_DETAILS))
7455 : {
7456 0 : fprintf (dump_file, "After Coalescing:\n");
7457 0 : dump_var_map (dump_file, large_huge.m_map);
7458 : }
7459 5505 : large_huge.m_vars
7460 5505 : = XCNEWVEC (tree, num_var_partitions (large_huge.m_map));
7461 5505 : bitmap_iterator bi;
7462 5505 : if (has_large_huge_parm_result)
7463 18645 : EXECUTE_IF_SET_IN_BITMAP (large_huge.m_names, 0, i, bi)
7464 : {
7465 14568 : tree s = ssa_name (i);
7466 14568 : if (SSA_NAME_VAR (s)
7467 5728 : && ((TREE_CODE (SSA_NAME_VAR (s)) == PARM_DECL
7468 4967 : && SSA_NAME_IS_DEFAULT_DEF (s))
7469 798 : || TREE_CODE (SSA_NAME_VAR (s)) == RESULT_DECL))
7470 : {
7471 4930 : int p = var_to_partition (large_huge.m_map, s);
7472 4930 : if (large_huge.m_vars[p] == NULL_TREE)
7473 : {
7474 4930 : large_huge.m_vars[p] = SSA_NAME_VAR (s);
7475 4930 : mark_addressable (SSA_NAME_VAR (s));
7476 : }
7477 : }
7478 : }
7479 5505 : tree atype = NULL_TREE;
7480 5505 : if (dump_file && (dump_flags & TDF_DETAILS))
7481 0 : fprintf (dump_file, "Mapping SSA_NAMEs to decls:\n");
7482 31645 : EXECUTE_IF_SET_IN_BITMAP (large_huge.m_names, 0, i, bi)
7483 : {
7484 26140 : tree s = ssa_name (i);
7485 26140 : int p = var_to_partition (large_huge.m_map, s);
7486 26140 : if (large_huge.m_vars[p] == NULL_TREE)
7487 : {
7488 18446 : if (atype == NULL_TREE
7489 32215 : || !tree_int_cst_equal (TYPE_SIZE (atype),
7490 13769 : TYPE_SIZE (TREE_TYPE (s))))
7491 : {
7492 7564 : unsigned HOST_WIDE_INT nelts
7493 7564 : = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (s))) / limb_prec;
7494 7564 : atype = build_array_type_nelts (large_huge.m_limb_type,
7495 7564 : nelts);
7496 : }
7497 18446 : large_huge.m_vars[p] = create_tmp_var (atype, "bitint");
7498 18446 : mark_addressable (large_huge.m_vars[p]);
7499 : }
7500 26140 : if (dump_file && (dump_flags & TDF_DETAILS))
7501 : {
7502 0 : print_generic_expr (dump_file, s, TDF_SLIM);
7503 0 : fprintf (dump_file, " -> ");
7504 0 : print_generic_expr (dump_file, large_huge.m_vars[p], TDF_SLIM);
7505 0 : fprintf (dump_file, "\n");
7506 : }
7507 : }
7508 : }
7509 :
7510 45679 : FOR_EACH_BB_REVERSE_FN (bb, cfun)
7511 : {
7512 38538 : gimple_stmt_iterator prev;
7513 193753 : for (gimple_stmt_iterator gsi = gsi_last_bb (bb); !gsi_end_p (gsi);
7514 116677 : gsi = prev)
7515 : {
7516 116677 : prev = gsi;
7517 116677 : gsi_prev (&prev);
7518 116677 : ssa_op_iter iter;
7519 116677 : gimple *stmt = gsi_stmt (gsi);
7520 116677 : if (is_gimple_debug (stmt))
7521 79642 : continue;
7522 111987 : bitint_prec_kind kind = bitint_prec_small;
7523 111987 : tree t;
7524 336516 : FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, SSA_OP_ALL_OPERANDS)
7525 224529 : if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE)
7526 : {
7527 76876 : bitint_prec_kind this_kind
7528 76876 : = bitint_precision_kind (TREE_TYPE (t));
7529 224730 : kind = MAX (kind, this_kind);
7530 : }
7531 111987 : if (is_gimple_assign (stmt) && gimple_store_p (stmt))
7532 : {
7533 15939 : t = gimple_assign_rhs1 (stmt);
7534 15939 : if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE)
7535 : {
7536 13752 : bitint_prec_kind this_kind
7537 13752 : = bitint_precision_kind (TREE_TYPE (t));
7538 13752 : kind = MAX (kind, this_kind);
7539 : }
7540 : }
7541 111987 : if (is_gimple_assign (stmt)
7542 111987 : && gimple_assign_rhs_code (stmt) == FLOAT_EXPR)
7543 : {
7544 179 : t = gimple_assign_rhs1 (stmt);
7545 179 : if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
7546 179 : && TREE_CODE (t) == INTEGER_CST)
7547 : {
7548 1 : bitint_prec_kind this_kind
7549 1 : = bitint_precision_kind (TREE_TYPE (t));
7550 1 : kind = MAX (kind, this_kind);
7551 : }
7552 : }
7553 111987 : if (is_gimple_call (stmt))
7554 : {
7555 25393 : t = gimple_call_lhs (stmt);
7556 25393 : if (t && TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE)
7557 : {
7558 5167 : bitint_prec_kind this_kind = arith_overflow_arg_kind (stmt);
7559 5167 : kind = MAX (kind, this_kind);
7560 5167 : if (TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == BITINT_TYPE)
7561 : {
7562 5047 : this_kind
7563 5047 : = bitint_precision_kind (TREE_TYPE (TREE_TYPE (t)));
7564 5047 : kind = MAX (kind, this_kind);
7565 : }
7566 : }
7567 : }
7568 111671 : if (kind == bitint_prec_small)
7569 43999 : continue;
7570 67988 : switch (gimple_code (stmt))
7571 : {
7572 10881 : case GIMPLE_CALL:
7573 : /* For now. We'll need to handle some internal functions and
7574 : perhaps some builtins. */
7575 10881 : if (kind == bitint_prec_middle)
7576 2277 : continue;
7577 : break;
7578 4 : case GIMPLE_ASM:
7579 4 : if (kind == bitint_prec_middle)
7580 1 : continue;
7581 : break;
7582 1117 : case GIMPLE_RETURN:
7583 1117 : continue;
7584 47413 : case GIMPLE_ASSIGN:
7585 47413 : if (gimple_clobber_p (stmt))
7586 3509 : continue;
7587 43904 : if (kind >= bitint_prec_large)
7588 : break;
7589 8729 : if (gimple_assign_single_p (stmt))
7590 : /* No need to lower copies, loads or stores. */
7591 5784 : continue;
7592 2945 : if (gimple_assign_cast_p (stmt))
7593 : {
7594 2379 : tree lhs = gimple_assign_lhs (stmt);
7595 2379 : tree rhs = gimple_assign_rhs1 (stmt);
7596 4758 : if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
7597 2379 : && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
7598 4752 : && (TYPE_PRECISION (TREE_TYPE (lhs))
7599 2373 : == TYPE_PRECISION (TREE_TYPE (rhs))))
7600 : /* No need to lower casts to same precision. */
7601 28 : continue;
7602 : }
7603 : break;
7604 : default:
7605 : break;
7606 1117 : }
7607 :
7608 11490 : if (kind == bitint_prec_middle)
7609 : {
7610 5037 : tree type = NULL_TREE;
7611 : /* Middle _BitInt(N) is rewritten to casts to INTEGER_TYPEs
7612 : with the same precision and back. */
7613 5037 : unsigned int nops = gimple_num_ops (stmt);
7614 16906 : for (unsigned int i = is_gimple_assign (stmt) ? 1 : 0;
7615 16906 : i < nops; ++i)
7616 11869 : if (tree op = gimple_op (stmt, i))
7617 : {
7618 7637 : tree nop = maybe_cast_middle_bitint (&gsi, op, type);
7619 7637 : if (nop != op)
7620 6757 : gimple_set_op (stmt, i, nop);
7621 880 : else if (COMPARISON_CLASS_P (op))
7622 : {
7623 0 : TREE_OPERAND (op, 0)
7624 0 : = maybe_cast_middle_bitint (&gsi,
7625 0 : TREE_OPERAND (op, 0),
7626 : type);
7627 0 : TREE_OPERAND (op, 1)
7628 0 : = maybe_cast_middle_bitint (&gsi,
7629 0 : TREE_OPERAND (op, 1),
7630 : type);
7631 : }
7632 880 : else if (TREE_CODE (op) == CASE_LABEL_EXPR)
7633 : {
7634 24 : CASE_LOW (op)
7635 24 : = maybe_cast_middle_bitint (&gsi, CASE_LOW (op),
7636 : type);
7637 48 : CASE_HIGH (op)
7638 48 : = maybe_cast_middle_bitint (&gsi, CASE_HIGH (op),
7639 : type);
7640 : }
7641 : }
7642 5037 : if (tree lhs = gimple_get_lhs (stmt))
7643 2917 : if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
7644 2917 : && (bitint_precision_kind (TREE_TYPE (lhs))
7645 : == bitint_prec_middle))
7646 : {
7647 1368 : int prec = TYPE_PRECISION (TREE_TYPE (lhs));
7648 1368 : int uns = TYPE_UNSIGNED (TREE_TYPE (lhs));
7649 1368 : type = build_nonstandard_integer_type (prec, uns);
7650 1368 : tree lhs2 = make_ssa_name (type);
7651 1368 : gimple_set_lhs (stmt, lhs2);
7652 1368 : gimple *g = gimple_build_assign (lhs, NOP_EXPR, lhs2);
7653 1368 : if (stmt_ends_bb_p (stmt))
7654 : {
7655 4 : edge e = find_fallthru_edge (gsi_bb (gsi)->succs);
7656 4 : gsi_insert_on_edge (e, g);
7657 4 : edge_insertions = true;
7658 : }
7659 : else
7660 1364 : gsi_insert_after (&gsi, g, GSI_SAME_STMT);
7661 : }
7662 5037 : update_stmt (stmt);
7663 5037 : continue;
7664 5037 : }
7665 :
7666 50235 : if (tree lhs = gimple_get_lhs (stmt))
7667 43648 : if (TREE_CODE (lhs) == SSA_NAME)
7668 : {
7669 35008 : tree type = TREE_TYPE (lhs);
7670 35008 : if (TREE_CODE (type) == COMPLEX_TYPE)
7671 4043 : type = TREE_TYPE (type);
7672 43518 : if (TREE_CODE (type) == BITINT_TYPE
7673 29730 : && bitint_precision_kind (type) >= bitint_prec_large
7674 64549 : && (large_huge.m_names == NULL
7675 29388 : || !bitmap_bit_p (large_huge.m_names,
7676 29388 : SSA_NAME_VERSION (lhs))))
7677 8510 : continue;
7678 : }
7679 :
7680 41725 : large_huge.lower_stmt (stmt);
7681 : }
7682 :
7683 38538 : tree atype = NULL_TREE;
7684 47063 : for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7685 8525 : gsi_next (&gsi))
7686 : {
7687 8525 : gphi *phi = gsi.phi ();
7688 8525 : tree lhs = gimple_phi_result (phi);
7689 8525 : if (TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
7690 8525 : || bitint_precision_kind (TREE_TYPE (lhs)) < bitint_prec_large)
7691 8382 : continue;
7692 143 : int p1 = var_to_partition (large_huge.m_map, lhs);
7693 143 : gcc_assert (large_huge.m_vars[p1] != NULL_TREE);
7694 : tree v1 = large_huge.m_vars[p1];
7695 544 : for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
7696 : {
7697 401 : tree arg = gimple_phi_arg_def (phi, i);
7698 401 : edge e = gimple_phi_arg_edge (phi, i);
7699 401 : gimple *g;
7700 401 : switch (TREE_CODE (arg))
7701 : {
7702 74 : case INTEGER_CST:
7703 74 : if (integer_zerop (arg) && VAR_P (v1))
7704 : {
7705 45 : tree zero = build_zero_cst (TREE_TYPE (v1));
7706 45 : g = gimple_build_assign (v1, zero);
7707 45 : gsi_insert_on_edge (e, g);
7708 45 : edge_insertions = true;
7709 136 : break;
7710 : }
7711 29 : int ext;
7712 29 : unsigned int min_prec, prec, rem;
7713 29 : tree c;
7714 29 : prec = TYPE_PRECISION (TREE_TYPE (arg));
7715 29 : rem = prec % (2 * limb_prec);
7716 29 : min_prec = bitint_min_cst_precision (arg, ext);
7717 29 : if (min_prec > prec - rem - 2 * limb_prec
7718 12 : && min_prec > (unsigned) limb_prec)
7719 : /* Constant which has enough significant bits that it
7720 : isn't worth trying to save .rodata space by extending
7721 : from smaller number. */
7722 : min_prec = prec;
7723 : else
7724 : {
7725 20 : min_prec = CEIL (min_prec, limb_prec) * limb_prec;
7726 20 : if (min_prec > (unsigned) limb_prec
7727 3 : && abi_limb_prec > limb_prec)
7728 : {
7729 : /* For targets with ABI limb precision higher than
7730 : limb precision round to ABI limb precision,
7731 : otherwise c can contain padding bits. */
7732 0 : min_prec
7733 0 : = CEIL (min_prec, abi_limb_prec) * abi_limb_prec;
7734 0 : if (min_prec > prec - rem - 2 * limb_prec)
7735 9 : min_prec = prec;
7736 : }
7737 : }
7738 29 : if (min_prec == 0)
7739 : c = NULL_TREE;
7740 26 : else if (min_prec == prec)
7741 9 : c = tree_output_constant_def (arg);
7742 17 : else if (min_prec == (unsigned) limb_prec)
7743 14 : c = fold_convert (large_huge.m_limb_type, arg);
7744 : else
7745 : {
7746 3 : tree ctype = build_bitint_type (min_prec, 1);
7747 3 : c = tree_output_constant_def (fold_convert (ctype, arg));
7748 : }
7749 26 : if (c)
7750 : {
7751 26 : if (VAR_P (v1) && min_prec == prec)
7752 : {
7753 8 : tree v2 = build1 (VIEW_CONVERT_EXPR,
7754 8 : TREE_TYPE (v1), c);
7755 8 : g = gimple_build_assign (v1, v2);
7756 8 : gsi_insert_on_edge (e, g);
7757 8 : edge_insertions = true;
7758 8 : break;
7759 : }
7760 18 : if (TREE_CODE (TREE_TYPE (c)) == INTEGER_TYPE)
7761 : {
7762 14 : if (bitint_big_endian)
7763 : {
7764 0 : tree ptype = build_pointer_type (TREE_TYPE (v1));
7765 0 : tree sz1 = TYPE_SIZE_UNIT (TREE_TYPE (v1));
7766 0 : tree sz2 = TYPE_SIZE_UNIT (TREE_TYPE (c));
7767 0 : tree off = build_int_cst (ptype,
7768 0 : tree_to_uhwi (sz1)
7769 0 : - tree_to_uhwi (sz2));
7770 0 : tree vd = build2 (MEM_REF, TREE_TYPE (c),
7771 : build_fold_addr_expr (v1),
7772 : off);
7773 0 : g = gimple_build_assign (vd, c);
7774 : }
7775 : else
7776 14 : g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
7777 14 : TREE_TYPE (c),
7778 : v1), c);
7779 : }
7780 : else
7781 : {
7782 4 : unsigned HOST_WIDE_INT nelts
7783 4 : = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (c)))
7784 4 : / limb_prec;
7785 4 : tree vtype
7786 8 : = build_array_type_nelts (large_huge.m_limb_type,
7787 4 : nelts);
7788 4 : tree vd;
7789 4 : if (bitint_big_endian)
7790 : {
7791 0 : tree ptype = build_pointer_type (TREE_TYPE (v1));
7792 0 : tree sz1 = TYPE_SIZE_UNIT (TREE_TYPE (v1));
7793 0 : tree sz2 = TYPE_SIZE_UNIT (vtype);
7794 0 : tree off = build_int_cst (ptype,
7795 0 : tree_to_uhwi (sz1)
7796 0 : - tree_to_uhwi (sz2));
7797 0 : vd = build2 (MEM_REF, vtype,
7798 : build_fold_addr_expr (v1), off);
7799 : }
7800 : else
7801 4 : vd = build1 (VIEW_CONVERT_EXPR, vtype, v1);
7802 4 : g = gimple_build_assign (vd,
7803 : build1 (VIEW_CONVERT_EXPR,
7804 : vtype, c));
7805 : }
7806 18 : gsi_insert_on_edge (e, g);
7807 18 : if (min_prec == prec)
7808 : {
7809 : edge_insertions = true;
7810 : break;
7811 : }
7812 : }
7813 20 : if (ext == 0)
7814 : {
7815 14 : unsigned HOST_WIDE_INT nelts
7816 14 : = (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (v1)))
7817 14 : - min_prec) / limb_prec;
7818 14 : tree vtype
7819 28 : = build_array_type_nelts (large_huge.m_limb_type,
7820 14 : nelts);
7821 14 : tree ptype = build_pointer_type (TREE_TYPE (v1));
7822 14 : tree off;
7823 14 : if (c && !bitint_big_endian)
7824 13 : off = fold_convert (ptype,
7825 : TYPE_SIZE_UNIT (TREE_TYPE (c)));
7826 : else
7827 1 : off = build_zero_cst (ptype);
7828 14 : tree vd = build2 (MEM_REF, vtype,
7829 : build_fold_addr_expr (v1), off);
7830 14 : g = gimple_build_assign (vd, build_zero_cst (vtype));
7831 : }
7832 : else
7833 : {
7834 6 : tree vd = v1;
7835 6 : if (c && !bitint_big_endian)
7836 : {
7837 4 : tree ptype = build_pointer_type (TREE_TYPE (v1));
7838 4 : tree off
7839 4 : = fold_convert (ptype,
7840 : TYPE_SIZE_UNIT (TREE_TYPE (c)));
7841 4 : vd = build2 (MEM_REF, large_huge.m_limb_type,
7842 : build_fold_addr_expr (v1), off);
7843 : }
7844 6 : vd = build_fold_addr_expr (vd);
7845 6 : unsigned HOST_WIDE_INT nbytes
7846 6 : = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (v1)));
7847 6 : if (c)
7848 4 : nbytes
7849 4 : -= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (c)));
7850 6 : tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
7851 6 : g = gimple_build_call (fn, 3, vd,
7852 : integer_minus_one_node,
7853 : build_int_cst (sizetype,
7854 6 : nbytes));
7855 : }
7856 20 : gsi_insert_on_edge (e, g);
7857 20 : edge_insertions = true;
7858 20 : break;
7859 0 : default:
7860 0 : gcc_unreachable ();
7861 327 : case SSA_NAME:
7862 327 : if (gimple_code (SSA_NAME_DEF_STMT (arg)) == GIMPLE_NOP)
7863 : {
7864 9 : if (large_huge.m_names == NULL
7865 18 : || !bitmap_bit_p (large_huge.m_names,
7866 9 : SSA_NAME_VERSION (arg)))
7867 310 : continue;
7868 : }
7869 327 : int p2 = var_to_partition (large_huge.m_map, arg);
7870 327 : if (p1 == p2)
7871 310 : continue;
7872 17 : gcc_assert (large_huge.m_vars[p2] != NULL_TREE);
7873 17 : tree v2 = large_huge.m_vars[p2];
7874 17 : if (VAR_P (v1) && VAR_P (v2))
7875 17 : g = gimple_build_assign (v1, v2);
7876 0 : else if (VAR_P (v1))
7877 0 : g = gimple_build_assign (v1, build1 (VIEW_CONVERT_EXPR,
7878 0 : TREE_TYPE (v1), v2));
7879 0 : else if (VAR_P (v2))
7880 0 : g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
7881 0 : TREE_TYPE (v2), v1), v2);
7882 : else
7883 : {
7884 0 : if (atype == NULL_TREE
7885 0 : || !tree_int_cst_equal (TYPE_SIZE (atype),
7886 0 : TYPE_SIZE (TREE_TYPE (lhs))))
7887 : {
7888 0 : unsigned HOST_WIDE_INT nelts
7889 0 : = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))
7890 0 : / limb_prec;
7891 0 : atype
7892 0 : = build_array_type_nelts (large_huge.m_limb_type,
7893 0 : nelts);
7894 : }
7895 0 : g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
7896 : atype, v1),
7897 : build1 (VIEW_CONVERT_EXPR,
7898 : atype, v2));
7899 : }
7900 17 : gsi_insert_on_edge (e, g);
7901 17 : edge_insertions = true;
7902 17 : break;
7903 : }
7904 : }
7905 : }
7906 : }
7907 :
7908 7141 : if (large_huge.m_names || has_large_huge)
7909 : {
7910 5676 : gimple *nop = NULL;
7911 372600 : for (i = 0; i < num_ssa_names; ++i)
7912 : {
7913 366924 : tree s = ssa_name (i);
7914 366924 : if (s == NULL_TREE)
7915 14884 : continue;
7916 352040 : tree type = TREE_TYPE (s);
7917 352040 : if (TREE_CODE (type) == COMPLEX_TYPE)
7918 16870 : type = TREE_TYPE (type);
7919 352040 : if (TREE_CODE (type) == BITINT_TYPE
7920 352040 : && bitint_precision_kind (type) >= bitint_prec_large)
7921 : {
7922 39331 : if (large_huge.m_preserved
7923 43805 : && bitmap_bit_p (large_huge.m_preserved,
7924 6809 : SSA_NAME_VERSION (s)))
7925 2335 : continue;
7926 34661 : gimple *g = SSA_NAME_DEF_STMT (s);
7927 34661 : if (gimple_code (g) == GIMPLE_NOP)
7928 : {
7929 9187 : if (SSA_NAME_VAR (s))
7930 4951 : set_ssa_default_def (cfun, SSA_NAME_VAR (s), NULL_TREE);
7931 9187 : release_ssa_name (s);
7932 9187 : continue;
7933 : }
7934 25474 : if (gimple_bb (g) == NULL)
7935 : {
7936 2 : release_ssa_name (s);
7937 2 : continue;
7938 : }
7939 25472 : if (gimple_code (g) != GIMPLE_ASM)
7940 : {
7941 25471 : gimple_stmt_iterator gsi = gsi_for_stmt (g);
7942 25471 : bool save_vta = flag_var_tracking_assignments;
7943 25471 : flag_var_tracking_assignments = false;
7944 25471 : gsi_remove (&gsi, true);
7945 25471 : flag_var_tracking_assignments = save_vta;
7946 : }
7947 25472 : if (nop == NULL)
7948 4753 : nop = gimple_build_nop ();
7949 25472 : SSA_NAME_DEF_STMT (s) = nop;
7950 25472 : release_ssa_name (s);
7951 : }
7952 : }
7953 5676 : if (optimize)
7954 2944 : disable_ranger (cfun);
7955 : }
7956 :
7957 7141 : if (edge_insertions)
7958 33 : gsi_commit_edge_inserts ();
7959 :
7960 : /* Fix up arguments of ECF_RETURNS_TWICE calls. Those were temporarily
7961 : inserted before the call, but that is invalid IL, so move them to the
7962 : right place and add corresponding PHIs. */
7963 7141 : if (!large_huge.m_returns_twice_calls.is_empty ())
7964 : {
7965 9 : auto_vec<gimple *, 16> arg_stmts;
7966 29 : while (!large_huge.m_returns_twice_calls.is_empty ())
7967 : {
7968 11 : gimple *stmt = large_huge.m_returns_twice_calls.pop ();
7969 11 : gimple_stmt_iterator gsi = gsi_after_labels (gimple_bb (stmt));
7970 36 : while (gsi_stmt (gsi) != stmt)
7971 : {
7972 25 : if (is_gimple_debug (gsi_stmt (gsi)))
7973 2 : gsi_next (&gsi);
7974 : else
7975 : {
7976 23 : arg_stmts.safe_push (gsi_stmt (gsi));
7977 23 : gsi_remove (&gsi, false);
7978 : }
7979 : }
7980 11 : gimple *g;
7981 11 : basic_block bb = NULL;
7982 11 : edge e = NULL, ead = NULL;
7983 34 : FOR_EACH_VEC_ELT (arg_stmts, i, g)
7984 : {
7985 23 : gsi_safe_insert_before (&gsi, g);
7986 23 : if (i == 0)
7987 : {
7988 11 : bb = gimple_bb (stmt);
7989 11 : gcc_checking_assert (EDGE_COUNT (bb->preds) == 2);
7990 11 : e = EDGE_PRED (bb, 0);
7991 11 : ead = EDGE_PRED (bb, 1);
7992 11 : if ((ead->flags & EDGE_ABNORMAL) == 0)
7993 0 : std::swap (e, ead);
7994 11 : gcc_checking_assert ((e->flags & EDGE_ABNORMAL) == 0
7995 : && (ead->flags & EDGE_ABNORMAL));
7996 : }
7997 23 : tree lhs = gimple_assign_lhs (g);
7998 23 : tree arg = lhs;
7999 23 : gphi *phi = create_phi_node (copy_ssa_name (arg), bb);
8000 23 : add_phi_arg (phi, arg, e, UNKNOWN_LOCATION);
8001 23 : tree var = create_tmp_reg (TREE_TYPE (arg));
8002 23 : suppress_warning (var, OPT_Wuninitialized);
8003 23 : arg = get_or_create_ssa_default_def (cfun, var);
8004 23 : SSA_NAME_OCCURS_IN_ABNORMAL_PHI (arg) = 1;
8005 23 : add_phi_arg (phi, arg, ead, UNKNOWN_LOCATION);
8006 23 : arg = gimple_phi_result (phi);
8007 23 : SSA_NAME_OCCURS_IN_ABNORMAL_PHI (arg) = 1;
8008 23 : imm_use_iterator iter;
8009 23 : gimple *use_stmt;
8010 92 : FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
8011 : {
8012 46 : if (use_stmt == phi)
8013 23 : continue;
8014 23 : gcc_checking_assert (use_stmt == stmt);
8015 23 : use_operand_p use_p;
8016 69 : FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
8017 23 : SET_USE (use_p, arg);
8018 23 : }
8019 : }
8020 11 : update_stmt (stmt);
8021 11 : arg_stmts.truncate (0);
8022 : }
8023 9 : }
8024 :
8025 7141 : return ret;
8026 7141 : }
8027 :
8028 : namespace {
8029 :
8030 : const pass_data pass_data_lower_bitint =
8031 : {
8032 : GIMPLE_PASS, /* type */
8033 : "bitintlower", /* name */
8034 : OPTGROUP_NONE, /* optinfo_flags */
8035 : TV_NONE, /* tv_id */
8036 : PROP_ssa, /* properties_required */
8037 : PROP_gimple_lbitint, /* properties_provided */
8038 : 0, /* properties_destroyed */
8039 : 0, /* todo_flags_start */
8040 : 0, /* todo_flags_finish */
8041 : };
8042 :
8043 : class pass_lower_bitint : public gimple_opt_pass
8044 : {
8045 : public:
8046 571444 : pass_lower_bitint (gcc::context *ctxt)
8047 1142888 : : gimple_opt_pass (pass_data_lower_bitint, ctxt)
8048 : {}
8049 :
8050 : /* opt_pass methods: */
8051 285722 : opt_pass * clone () final override { return new pass_lower_bitint (m_ctxt); }
8052 1044129 : unsigned int execute (function *) final override
8053 : {
8054 1044129 : return gimple_lower_bitint ();
8055 : }
8056 :
8057 : }; // class pass_lower_bitint
8058 :
8059 : } // anon namespace
8060 :
8061 : gimple_opt_pass *
8062 285722 : make_pass_lower_bitint (gcc::context *ctxt)
8063 : {
8064 285722 : return new pass_lower_bitint (ctxt);
8065 : }
8066 :
8067 :
8068 : namespace {
8069 :
8070 : const pass_data pass_data_lower_bitint_O0 =
8071 : {
8072 : GIMPLE_PASS, /* type */
8073 : "bitintlower0", /* name */
8074 : OPTGROUP_NONE, /* optinfo_flags */
8075 : TV_NONE, /* tv_id */
8076 : PROP_cfg, /* properties_required */
8077 : PROP_gimple_lbitint, /* properties_provided */
8078 : 0, /* properties_destroyed */
8079 : 0, /* todo_flags_start */
8080 : 0, /* todo_flags_finish */
8081 : };
8082 :
8083 : class pass_lower_bitint_O0 : public gimple_opt_pass
8084 : {
8085 : public:
8086 285722 : pass_lower_bitint_O0 (gcc::context *ctxt)
8087 571444 : : gimple_opt_pass (pass_data_lower_bitint_O0, ctxt)
8088 : {}
8089 :
8090 : /* opt_pass methods: */
8091 1472150 : bool gate (function *fun) final override
8092 : {
8093 : /* With errors, normal optimization passes are not run. If we don't
8094 : lower bitint operations at all, rtl expansion will abort. */
8095 1472150 : return !(fun->curr_properties & PROP_gimple_lbitint);
8096 : }
8097 :
8098 428124 : unsigned int execute (function *) final override
8099 : {
8100 428124 : return gimple_lower_bitint ();
8101 : }
8102 :
8103 : }; // class pass_lower_bitint_O0
8104 :
8105 : } // anon namespace
8106 :
8107 : gimple_opt_pass *
8108 285722 : make_pass_lower_bitint_O0 (gcc::context *ctxt)
8109 : {
8110 285722 : return new pass_lower_bitint_O0 (ctxt);
8111 : }
|