Line data Source code
1 : /* Lower _BitInt(N) operations to scalar operations.
2 : Copyright (C) 2023-2026 Free Software Foundation, Inc.
3 : Contributed by Jakub Jelinek <jakub@redhat.com>.
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify it
8 : under the terms of the GNU General Public License as published by the
9 : Free Software Foundation; either version 3, or (at your option) any
10 : later version.
11 :
12 : GCC is distributed in the hope that it will be useful, but WITHOUT
13 : ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 : for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 : #include "config.h"
22 : #include "system.h"
23 : #include "coretypes.h"
24 : #include "backend.h"
25 : #include "rtl.h"
26 : #include "tree.h"
27 : #include "gimple.h"
28 : #include "cfghooks.h"
29 : #include "tree-pass.h"
30 : #include "ssa.h"
31 : #include "fold-const.h"
32 : #include "gimplify.h"
33 : #include "gimple-iterator.h"
34 : #include "tree-cfg.h"
35 : #include "tree-dfa.h"
36 : #include "cfgloop.h"
37 : #include "cfganal.h"
38 : #include "target.h"
39 : #include "tree-ssa-live.h"
40 : #include "tree-ssa-coalesce.h"
41 : #include "domwalk.h"
42 : #include "memmodel.h"
43 : #include "optabs.h"
44 : #include "varasm.h"
45 : #include "gimple-range.h"
46 : #include "value-range.h"
47 : #include "langhooks.h"
48 : #include "gimplify-me.h"
49 : #include "diagnostic-core.h"
50 : #include "tree-eh.h"
51 : #include "tree-pretty-print.h"
52 : #include "alloc-pool.h"
53 : #include "tree-into-ssa.h"
54 : #include "tree-cfgcleanup.h"
55 : #include "tree-switch-conversion.h"
56 : #include "ubsan.h"
57 : #include "stor-layout.h"
58 : #include "gimple-lower-bitint.h"
59 :
60 : /* Split BITINT_TYPE precisions in 4 categories. Small _BitInt, where
61 : target hook says it is a single limb, middle _BitInt which per ABI
62 : does not, but there is some INTEGER_TYPE in which arithmetics can be
63 : performed (operations on such _BitInt are lowered to casts to that
64 : arithmetic type and cast back; e.g. on x86_64 limb is DImode, but
65 : target supports TImode, so _BitInt(65) to _BitInt(128) are middle
66 : ones), large _BitInt which should by straight line code and
67 : finally huge _BitInt which should be handled by loops over the limbs. */
68 :
69 : enum bitint_prec_kind {
70 : bitint_prec_small,
71 : bitint_prec_middle,
72 : bitint_prec_large,
73 : bitint_prec_huge
74 : };
75 :
76 : /* Caches to speed up bitint_precision_kind. */
77 :
78 : static int small_max_prec, mid_min_prec, large_min_prec, huge_min_prec;
79 : static int limb_prec, abi_limb_prec;
80 : static bool bitint_big_endian;
81 : static enum bitint_ext bitint_extended;
82 :
83 : /* Categorize _BitInt(PREC) as small, middle, large or huge. */
84 :
85 : static bitint_prec_kind
86 483692 : bitint_precision_kind (int prec)
87 : {
88 483692 : if (prec <= small_max_prec)
89 : return bitint_prec_small;
90 466019 : if (huge_min_prec && prec >= huge_min_prec)
91 : return bitint_prec_huge;
92 263953 : if (large_min_prec && prec >= large_min_prec)
93 : return bitint_prec_large;
94 52063 : if (mid_min_prec && prec >= mid_min_prec)
95 : return bitint_prec_middle;
96 :
97 9628 : struct bitint_info info;
98 9628 : bool ok = targetm.c.bitint_type_info (prec, &info);
99 9628 : gcc_assert (ok);
100 9628 : scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
101 9628 : if (prec <= GET_MODE_PRECISION (limb_mode))
102 : {
103 2314 : small_max_prec = prec;
104 2314 : return bitint_prec_small;
105 : }
106 7314 : bitint_big_endian = info.big_endian;
107 7314 : bitint_extended = info.extended;
108 7314 : if (info.limb_mode == info.abi_limb_mode && bitint_extended == bitint_ext_full)
109 0 : bitint_extended = bitint_ext_partial;
110 7314 : if (!large_min_prec
111 14538 : && GET_MODE_PRECISION (limb_mode) <= MAX_FIXED_MODE_SIZE)
112 14448 : large_min_prec = MAX_FIXED_MODE_SIZE + 1;
113 7314 : if (!limb_prec)
114 7224 : limb_prec = GET_MODE_PRECISION (limb_mode);
115 7314 : if (!abi_limb_prec)
116 7224 : abi_limb_prec
117 7224 : = GET_MODE_PRECISION (as_a <scalar_int_mode> (info.abi_limb_mode));
118 : /* For bitint_ext_full with different limb_mode from abi_limb_mode we
119 : currently only support only abi_limb_mode twice the precision of
120 : limb_mode, and don't support big endian in that case either. */
121 7314 : gcc_assert (bitint_extended != bitint_ext_full
122 : || (abi_limb_prec == 2 * limb_prec
123 : && !bitint_big_endian));
124 7314 : if (!huge_min_prec)
125 : {
126 14448 : if (4 * limb_prec >= MAX_FIXED_MODE_SIZE)
127 7224 : huge_min_prec = 4 * limb_prec;
128 : else
129 0 : huge_min_prec = MAX_FIXED_MODE_SIZE + 1;
130 : }
131 14628 : if (prec <= MAX_FIXED_MODE_SIZE)
132 : {
133 1606 : if (!mid_min_prec || prec < mid_min_prec)
134 1606 : mid_min_prec = prec;
135 1606 : return bitint_prec_middle;
136 : }
137 5708 : if (huge_min_prec && prec >= huge_min_prec)
138 : return bitint_prec_huge;
139 : return bitint_prec_large;
140 : }
141 :
142 : /* Same for a TYPE. */
143 :
144 : static bitint_prec_kind
145 478250 : bitint_precision_kind (tree type)
146 : {
147 478250 : return bitint_precision_kind (TYPE_PRECISION (type));
148 : }
149 :
150 : /* Return minimum precision needed to describe INTEGER_CST
151 : CST. All bits above that precision up to precision of
152 : TREE_TYPE (CST) are cleared if EXT is set to 0, or set
153 : if EXT is set to -1. */
154 :
155 : static unsigned
156 5313 : bitint_min_cst_precision (tree cst, int &ext)
157 : {
158 5313 : ext = tree_int_cst_sgn (cst) < 0 ? -1 : 0;
159 5313 : wide_int w = wi::to_wide (cst);
160 5313 : unsigned min_prec = wi::min_precision (w, TYPE_SIGN (TREE_TYPE (cst)));
161 : /* For signed values, we don't need to count the sign bit,
162 : we'll use constant 0 or -1 for the upper bits. */
163 5313 : if (!TYPE_UNSIGNED (TREE_TYPE (cst)))
164 3239 : --min_prec;
165 : else
166 : {
167 : /* For unsigned values, also try signed min_precision
168 : in case the constant has lots of most significant bits set. */
169 2074 : unsigned min_prec2 = wi::min_precision (w, SIGNED) - 1;
170 2074 : if (min_prec2 < min_prec)
171 : {
172 990 : ext = -1;
173 990 : return min_prec2;
174 : }
175 : }
176 : return min_prec;
177 5313 : }
178 :
179 : namespace {
180 :
181 : /* If OP is middle _BitInt, cast it to corresponding INTEGER_TYPE
182 : cached in TYPE and return it. */
183 :
184 : tree
185 7766 : maybe_cast_middle_bitint (gimple_stmt_iterator *gsi, tree op, tree &type)
186 : {
187 7766 : if (op == NULL_TREE
188 7738 : || TREE_CODE (TREE_TYPE (op)) != BITINT_TYPE
189 14627 : || bitint_precision_kind (TREE_TYPE (op)) != bitint_prec_middle)
190 908 : return op;
191 :
192 6858 : int prec = TYPE_PRECISION (TREE_TYPE (op));
193 6858 : int uns = TYPE_UNSIGNED (TREE_TYPE (op));
194 6858 : if (type == NULL_TREE
195 2537 : || TYPE_PRECISION (type) != prec
196 9395 : || TYPE_UNSIGNED (type) != uns)
197 4321 : type = build_nonstandard_integer_type (prec, uns);
198 :
199 6858 : if (TREE_CODE (op) != SSA_NAME)
200 : {
201 2349 : tree nop = fold_convert (type, op);
202 2349 : if (is_gimple_val (nop))
203 : return nop;
204 : }
205 :
206 4509 : tree nop = make_ssa_name (type);
207 4509 : gimple *g = gimple_build_assign (nop, NOP_EXPR, op);
208 4509 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
209 4509 : return nop;
210 : }
211 :
212 : /* Return true if STMT can be handled in a loop from least to most
213 : significant limb together with its dependencies. */
214 :
215 : bool
216 47544 : mergeable_op (gimple *stmt)
217 : {
218 47544 : if (!is_gimple_assign (stmt))
219 : return false;
220 38926 : switch (gimple_assign_rhs_code (stmt))
221 : {
222 : case PLUS_EXPR:
223 : case MINUS_EXPR:
224 : case NEGATE_EXPR:
225 : case BIT_AND_EXPR:
226 : case BIT_IOR_EXPR:
227 : case BIT_XOR_EXPR:
228 : case BIT_NOT_EXPR:
229 : case SSA_NAME:
230 : case INTEGER_CST:
231 : case BIT_FIELD_REF:
232 : return true;
233 407 : case LSHIFT_EXPR:
234 407 : {
235 407 : tree cnt = gimple_assign_rhs2 (stmt);
236 407 : if (tree_fits_uhwi_p (cnt)
237 198 : && tree_to_uhwi (cnt) < (unsigned HOST_WIDE_INT) limb_prec)
238 : return true;
239 : }
240 : break;
241 6225 : CASE_CONVERT:
242 6225 : case VIEW_CONVERT_EXPR:
243 6225 : {
244 6225 : tree lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
245 6225 : tree rhs_type = TREE_TYPE (gimple_assign_rhs1 (stmt));
246 6225 : if (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
247 6187 : && TREE_CODE (lhs_type) == BITINT_TYPE
248 3642 : && TREE_CODE (rhs_type) == BITINT_TYPE
249 3295 : && bitint_precision_kind (lhs_type) >= bitint_prec_large
250 3259 : && bitint_precision_kind (rhs_type) >= bitint_prec_large
251 9339 : && (CEIL (TYPE_PRECISION (lhs_type), limb_prec)
252 3114 : == CEIL (TYPE_PRECISION (rhs_type), limb_prec)))
253 : {
254 2176 : if (TYPE_PRECISION (rhs_type) >= TYPE_PRECISION (lhs_type))
255 : return true;
256 168 : if ((unsigned) TYPE_PRECISION (lhs_type) % (2 * limb_prec) != 0)
257 : return true;
258 17 : if (bitint_precision_kind (lhs_type) == bitint_prec_large)
259 : return true;
260 : }
261 : break;
262 : }
263 : default:
264 : break;
265 : }
266 : return false;
267 : }
268 :
269 : /* Return non-zero if stmt is .{ADD,SUB,MUL}_OVERFLOW call with
270 : _Complex large/huge _BitInt lhs which has at most two immediate uses,
271 : at most one use in REALPART_EXPR stmt in the same bb and exactly one
272 : IMAGPART_EXPR use in the same bb with a single use which casts it to
273 : non-BITINT_TYPE integral type. If there is a REALPART_EXPR use,
274 : return 2. Such cases (most common uses of those builtins) can be
275 : optimized by marking their lhs and lhs of IMAGPART_EXPR and maybe lhs
276 : of REALPART_EXPR as not needed to be backed up by a stack variable.
277 : For .UBSAN_CHECK_{ADD,SUB,MUL} return 3. */
278 :
279 : int
280 21045 : optimizable_arith_overflow (gimple *stmt)
281 : {
282 21045 : bool is_ubsan = false;
283 21045 : if (!is_gimple_call (stmt) || !gimple_call_internal_p (stmt))
284 : return false;
285 5171 : switch (gimple_call_internal_fn (stmt))
286 : {
287 : case IFN_ADD_OVERFLOW:
288 : case IFN_SUB_OVERFLOW:
289 : case IFN_MUL_OVERFLOW:
290 : break;
291 48 : case IFN_UBSAN_CHECK_ADD:
292 48 : case IFN_UBSAN_CHECK_SUB:
293 48 : case IFN_UBSAN_CHECK_MUL:
294 48 : is_ubsan = true;
295 48 : break;
296 : default:
297 : return 0;
298 : }
299 5171 : tree lhs = gimple_call_lhs (stmt);
300 5171 : if (!lhs)
301 : return 0;
302 5171 : if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
303 : return 0;
304 5171 : tree type = is_ubsan ? TREE_TYPE (lhs) : TREE_TYPE (TREE_TYPE (lhs));
305 5171 : if (TREE_CODE (type) != BITINT_TYPE
306 5171 : || bitint_precision_kind (type) < bitint_prec_large)
307 0 : return 0;
308 :
309 5171 : if (is_ubsan)
310 : {
311 48 : use_operand_p use_p;
312 48 : gimple *use_stmt;
313 48 : if (!single_imm_use (lhs, &use_p, &use_stmt)
314 48 : || gimple_bb (use_stmt) != gimple_bb (stmt)
315 48 : || !gimple_store_p (use_stmt)
316 48 : || !is_gimple_assign (use_stmt)
317 48 : || gimple_has_volatile_ops (use_stmt)
318 96 : || stmt_ends_bb_p (use_stmt))
319 0 : return 0;
320 : return 3;
321 : }
322 :
323 5123 : imm_use_iterator ui;
324 5123 : use_operand_p use_p;
325 5123 : int seen = 0;
326 5123 : gimple *realpart = NULL, *cast = NULL;
327 19827 : FOR_EACH_IMM_USE_FAST (use_p, ui, lhs)
328 : {
329 9777 : gimple *g = USE_STMT (use_p);
330 9777 : if (is_gimple_debug (g))
331 0 : continue;
332 9777 : if (!is_gimple_assign (g) || gimple_bb (g) != gimple_bb (stmt))
333 : return 0;
334 9777 : if (gimple_assign_rhs_code (g) == REALPART_EXPR)
335 : {
336 4654 : if ((seen & 1) != 0)
337 : return 0;
338 4654 : seen |= 1;
339 4654 : realpart = g;
340 : }
341 5123 : else if (gimple_assign_rhs_code (g) == IMAGPART_EXPR)
342 : {
343 5123 : if ((seen & 2) != 0)
344 196 : return 0;
345 5123 : seen |= 2;
346 :
347 5123 : use_operand_p use2_p;
348 5123 : gimple *use_stmt;
349 5123 : tree lhs2 = gimple_assign_lhs (g);
350 5123 : if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs2))
351 : return 0;
352 5123 : if (!single_imm_use (lhs2, &use2_p, &use_stmt)
353 5123 : || gimple_bb (use_stmt) != gimple_bb (stmt)
354 10246 : || !gimple_assign_cast_p (use_stmt))
355 : return 0;
356 :
357 4931 : lhs2 = gimple_assign_lhs (use_stmt);
358 9862 : if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs2))
359 9862 : || TREE_CODE (TREE_TYPE (lhs2)) == BITINT_TYPE)
360 : return 0;
361 4927 : cast = use_stmt;
362 : }
363 : else
364 : return 0;
365 196 : }
366 4927 : if ((seen & 2) == 0)
367 : return 0;
368 4927 : if (seen == 3)
369 : {
370 : /* Punt if the cast stmt appears before realpart stmt, because
371 : if both appear, the lowering wants to emit all the code
372 : at the location of realpart stmt. */
373 4654 : gimple_stmt_iterator gsi = gsi_for_stmt (realpart);
374 4654 : unsigned int cnt = 0;
375 4657 : do
376 : {
377 4657 : gsi_prev_nondebug (&gsi);
378 4657 : if (gsi_end_p (gsi) || gsi_stmt (gsi) == cast)
379 : return 0;
380 4654 : if (gsi_stmt (gsi) == stmt)
381 : return 2;
382 : /* If realpart is too far from stmt, punt as well.
383 : Usually it will appear right after it. */
384 3 : if (++cnt == 32)
385 : return 0;
386 : }
387 : while (1);
388 : }
389 : return 1;
390 : }
391 :
392 : /* If STMT is some kind of comparison (GIMPLE_COND, comparison assignment)
393 : comparing large/huge _BitInt types, return the comparison code and if
394 : non-NULL fill in the comparison operands to *POP1 and *POP2. */
395 :
396 : tree_code
397 35742 : comparison_op (gimple *stmt, tree *pop1, tree *pop2)
398 : {
399 35742 : tree op1 = NULL_TREE, op2 = NULL_TREE;
400 35742 : tree_code code = ERROR_MARK;
401 35742 : if (gimple_code (stmt) == GIMPLE_COND)
402 : {
403 6511 : code = gimple_cond_code (stmt);
404 6511 : op1 = gimple_cond_lhs (stmt);
405 6511 : op2 = gimple_cond_rhs (stmt);
406 : }
407 29231 : else if (is_gimple_assign (stmt))
408 : {
409 29216 : code = gimple_assign_rhs_code (stmt);
410 29216 : op1 = gimple_assign_rhs1 (stmt);
411 29216 : if (TREE_CODE_CLASS (code) == tcc_comparison
412 29216 : || TREE_CODE_CLASS (code) == tcc_binary)
413 2184 : op2 = gimple_assign_rhs2 (stmt);
414 : }
415 35742 : if (TREE_CODE_CLASS (code) != tcc_comparison)
416 : return ERROR_MARK;
417 7285 : tree type = TREE_TYPE (op1);
418 7285 : if (TREE_CODE (type) != BITINT_TYPE
419 7285 : || bitint_precision_kind (type) < bitint_prec_large)
420 0 : return ERROR_MARK;
421 7285 : if (pop1)
422 : {
423 7223 : *pop1 = op1;
424 7223 : *pop2 = op2;
425 : }
426 : return code;
427 : }
428 :
429 : /* Class used during large/huge _BitInt lowering containing all the
430 : state for the methods. */
431 :
432 : struct bitint_large_huge
433 : {
434 7222 : bitint_large_huge ()
435 7222 : : m_names (NULL), m_loads (NULL), m_preserved (NULL),
436 7222 : m_single_use_names (NULL), m_map (NULL), m_vars (NULL),
437 7222 : m_limb_type (NULL_TREE), m_data (vNULL),
438 7222 : m_returns_twice_calls (vNULL) {}
439 :
440 : ~bitint_large_huge ();
441 :
442 : void insert_before (gimple *);
443 : tree limb_access_type (tree, tree);
444 : tree limb_access (tree, tree, tree, bool, bool = false);
445 : tree build_bit_field_ref (tree, tree, unsigned HOST_WIDE_INT,
446 : unsigned HOST_WIDE_INT);
447 : void if_then (gimple *, profile_probability, edge &, edge &);
448 : void if_then_else (gimple *, profile_probability, edge &, edge &);
449 : void if_then_if_then_else (gimple *g, gimple *,
450 : profile_probability, profile_probability,
451 : edge &, edge &, edge &);
452 : tree handle_operand (tree, tree);
453 : tree prepare_data_in_out (tree, tree, tree *, tree = NULL_TREE);
454 : tree add_cast (tree, tree);
455 : tree handle_plus_minus (tree_code, tree, tree, tree);
456 : tree handle_lshift (tree, tree, tree);
457 : tree handle_cast (tree, tree, tree);
458 : tree handle_bit_field_ref (tree, tree);
459 : tree handle_load (gimple *, tree);
460 : tree handle_stmt (gimple *, tree);
461 : tree handle_operand_addr (tree, gimple *, int *, int *);
462 : tree create_loop (tree, tree *);
463 : tree lower_mergeable_stmt (gimple *, tree_code &, tree, tree);
464 : tree lower_comparison_stmt (gimple *, tree_code &, tree, tree);
465 : void lower_shift_stmt (tree, gimple *);
466 : void lower_muldiv_stmt (tree, gimple *);
467 : void lower_float_conv_stmt (tree, gimple *);
468 : tree arith_overflow_extract_bits (unsigned int, unsigned int, tree,
469 : unsigned int, bool);
470 : void finish_arith_overflow (tree, tree, tree, tree, tree, tree, gimple *,
471 : unsigned, tree_code);
472 : void lower_addsub_overflow (tree, gimple *);
473 : void lower_mul_overflow (tree, gimple *);
474 : void lower_cplxpart_stmt (tree, gimple *);
475 : void lower_complexexpr_stmt (gimple *);
476 : void lower_bit_query (gimple *);
477 : void lower_call (tree, gimple *);
478 : void lower_asm (gimple *);
479 : void lower_stmt (gimple *);
480 :
481 : /* Bitmap of large/huge _BitInt SSA_NAMEs except those can be
482 : merged with their uses. */
483 : bitmap m_names;
484 : /* Subset of those for lhs of load statements. These will be
485 : cleared in m_names if the loads will be mergeable with all
486 : their uses. */
487 : bitmap m_loads;
488 : /* Bitmap of large/huge _BitInt SSA_NAMEs that should survive
489 : to later passes (arguments or return values of calls). */
490 : bitmap m_preserved;
491 : /* Subset of m_names which have a single use. As the lowering
492 : can replace various original statements with their lowered
493 : form even before it is done iterating over all basic blocks,
494 : testing has_single_use for the purpose of emitting clobbers
495 : doesn't work properly. */
496 : bitmap m_single_use_names;
497 : /* Used for coalescing/partitioning of large/huge _BitInt SSA_NAMEs
498 : set in m_names. */
499 : var_map m_map;
500 : /* Mapping of the partitions to corresponding decls. */
501 : tree *m_vars;
502 : /* Unsigned integer type with limb precision. */
503 : tree m_limb_type;
504 : /* Its TYPE_SIZE_UNIT. */
505 : unsigned HOST_WIDE_INT m_limb_size;
506 : /* Location of a gimple stmt which is being currently lowered. */
507 : location_t m_loc;
508 : /* Current stmt iterator where code is being lowered currently. */
509 : gimple_stmt_iterator m_gsi;
510 : /* Statement after which any clobbers should be added if non-NULL. */
511 : gimple *m_after_stmt;
512 : /* Set when creating loops to the loop header bb and its preheader. */
513 : basic_block m_bb, m_preheader_bb;
514 : /* Stmt iterator after which initialization statements should be emitted. */
515 : gimple_stmt_iterator m_init_gsi;
516 : /* Decl into which a mergeable statement stores result. */
517 : tree m_lhs;
518 : /* handle_operand/handle_stmt can be invoked in various ways.
519 :
520 : lower_mergeable_stmt for large _BitInt calls those with constant
521 : idx only, expanding to straight line code, for huge _BitInt
522 : emits a loop from least significant limb upwards, where each loop
523 : iteration handles 2 limbs, plus there can be up to one full limb
524 : and one partial limb processed after the loop, where handle_operand
525 : and/or handle_stmt are called with constant idx. m_upwards_2limb
526 : is set for this case, false otherwise. m_upwards is true if it
527 : is either large or huge _BitInt handled by lower_mergeable_stmt,
528 : i.e. indexes always increase.
529 :
530 : Another way is used by lower_comparison_stmt, which walks limbs
531 : from most significant to least significant, partial limb if any
532 : processed first with constant idx and then loop processing a single
533 : limb per iteration with non-constant idx.
534 :
535 : Another way is used in lower_shift_stmt, where for LSHIFT_EXPR
536 : destination limbs are processed from most significant to least
537 : significant or for RSHIFT_EXPR the other way around, in loops or
538 : straight line code, but idx usually is non-constant (so from
539 : handle_operand/handle_stmt POV random access). The LSHIFT_EXPR
540 : handling there can access even partial limbs using non-constant
541 : idx (then m_var_msb should be true, for all the other cases
542 : including lower_mergeable_stmt/lower_comparison_stmt that is
543 : not the case and so m_var_msb should be false.
544 :
545 : m_first should be set the first time handle_operand/handle_stmt
546 : is called and clear when it is called for some other limb with
547 : the same argument. If the lowering of an operand (e.g. INTEGER_CST)
548 : or statement (e.g. +/-/<< with < limb_prec constant) needs some
549 : state between the different calls, when m_first is true it should
550 : push some trees to m_data vector and also make sure m_data_cnt is
551 : incremented by how many trees were pushed, and when m_first is
552 : false, it can use the m_data[m_data_cnt] etc. data or update them,
553 : just needs to bump m_data_cnt by the same amount as when it was
554 : called with m_first set. The toplevel calls to
555 : handle_operand/handle_stmt should set m_data_cnt to 0 and truncate
556 : m_data vector when setting m_first to true.
557 :
558 : m_cast_conditional and m_bitfld_load are used when handling a
559 : bit-field load inside of a widening cast. handle_cast sometimes
560 : needs to do runtime comparisons and handle_operand only conditionally
561 : or even in two separate conditional blocks for one idx (once with
562 : constant index after comparing the runtime one for equality with the
563 : constant). In these cases, m_cast_conditional is set to true and
564 : the bit-field load then communicates its m_data_cnt to handle_cast
565 : using m_bitfld_load. */
566 : bool m_first;
567 : bool m_var_msb;
568 : unsigned m_upwards_2limb;
569 : bool m_upwards;
570 : bool m_cast_conditional;
571 : unsigned m_bitfld_load;
572 : vec<tree> m_data;
573 : unsigned int m_data_cnt;
574 : vec<gimple *> m_returns_twice_calls;
575 : };
576 :
577 7222 : bitint_large_huge::~bitint_large_huge ()
578 : {
579 7222 : BITMAP_FREE (m_names);
580 7222 : BITMAP_FREE (m_loads);
581 7222 : BITMAP_FREE (m_preserved);
582 7222 : BITMAP_FREE (m_single_use_names);
583 7222 : if (m_map)
584 5576 : delete_var_map (m_map);
585 7222 : XDELETEVEC (m_vars);
586 7222 : m_data.release ();
587 7222 : m_returns_twice_calls.release ();
588 7222 : }
589 :
590 : /* Insert gimple statement G before current location
591 : and set its gimple_location. */
592 :
593 : void
594 360776 : bitint_large_huge::insert_before (gimple *g)
595 : {
596 360776 : gimple_set_location (g, m_loc);
597 360776 : gsi_insert_before (&m_gsi, g, GSI_SAME_STMT);
598 360776 : }
599 :
600 : /* Return type for accessing limb IDX of BITINT_TYPE TYPE.
601 : This is normally m_limb_type, except for a partial most
602 : significant limb if any. */
603 :
604 : tree
605 130157 : bitint_large_huge::limb_access_type (tree type, tree idx)
606 : {
607 130157 : if (type == NULL_TREE)
608 5868 : return m_limb_type;
609 124289 : unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
610 124289 : unsigned int prec = TYPE_PRECISION (type);
611 124289 : gcc_assert (i * limb_prec < prec
612 : || (bitint_extended == bitint_ext_full
613 : && abi_limb_prec > limb_prec
614 : && i * limb_prec
615 : < CEIL (prec, abi_limb_prec) * abi_limb_prec));
616 248578 : if (bitint_big_endian
617 124289 : ? (i != 0 || (prec % limb_prec) == 0)
618 124289 : : (i + 1) * limb_prec <= prec)
619 81261 : return m_limb_type;
620 : else
621 86056 : return build_nonstandard_integer_type (prec % limb_prec,
622 43028 : TYPE_UNSIGNED (type));
623 : }
624 :
625 : /* Return a tree how to access limb IDX of VAR corresponding to BITINT_TYPE
626 : TYPE. If WRITE_P is true, it will be a store, otherwise a read. */
627 :
628 : tree
629 155092 : bitint_large_huge::limb_access (tree type, tree var, tree idx, bool write_p,
630 : bool abi_load_p)
631 : {
632 155092 : tree atype = (tree_fits_uhwi_p (idx)
633 155092 : ? limb_access_type (type, idx) : m_limb_type);
634 :
635 155092 : tree ltype = (bitint_extended && abi_load_p) ? atype : m_limb_type;
636 :
637 155092 : addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (var));
638 155092 : tree ret;
639 155092 : if (DECL_P (var) && tree_fits_uhwi_p (idx))
640 : {
641 95530 : if (as != TYPE_ADDR_SPACE (ltype))
642 0 : ltype = build_qualified_type (ltype, TYPE_QUALS (ltype)
643 0 : | ENCODE_QUAL_ADDR_SPACE (as));
644 95530 : tree ptype = build_pointer_type (strip_array_types (TREE_TYPE (var)));
645 95530 : unsigned HOST_WIDE_INT off = tree_to_uhwi (idx) * m_limb_size;
646 95530 : if (bitint_big_endian)
647 0 : off += m_limb_size - tree_to_uhwi (TYPE_SIZE_UNIT (ltype));
648 95530 : ret = build2 (MEM_REF, ltype,
649 : build_fold_addr_expr (var),
650 95530 : build_int_cst (ptype, off));
651 95530 : TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
652 95530 : TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (var);
653 95530 : }
654 59562 : else if (TREE_CODE (var) == MEM_REF && tree_fits_uhwi_p (idx))
655 : {
656 4302 : if (as != TYPE_ADDR_SPACE (ltype))
657 0 : ltype = build_qualified_type (ltype, TYPE_QUALS (ltype)
658 0 : | ENCODE_QUAL_ADDR_SPACE (as));
659 4302 : unsigned HOST_WIDE_INT off = tree_to_uhwi (idx) * m_limb_size;
660 4302 : if (bitint_big_endian)
661 0 : off += m_limb_size - tree_to_uhwi (TYPE_SIZE_UNIT (ltype));
662 4302 : ret
663 8604 : = build2 (MEM_REF, ltype, unshare_expr (TREE_OPERAND (var, 0)),
664 8604 : size_binop (PLUS_EXPR, TREE_OPERAND (var, 1),
665 : build_int_cst (TREE_TYPE (TREE_OPERAND (var, 1)),
666 : off)));
667 4302 : TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
668 4302 : TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (var);
669 4302 : TREE_THIS_NOTRAP (ret) = TREE_THIS_NOTRAP (var);
670 4302 : }
671 : else
672 : {
673 55260 : ltype = m_limb_type;
674 55260 : if (as != TYPE_ADDR_SPACE (ltype))
675 17 : ltype = build_qualified_type (ltype, TYPE_QUALS (ltype)
676 17 : | ENCODE_QUAL_ADDR_SPACE (as));
677 55260 : var = unshare_expr (var);
678 55260 : if (TREE_CODE (TREE_TYPE (var)) != ARRAY_TYPE
679 82948 : || !useless_type_conversion_p (m_limb_type,
680 27688 : TREE_TYPE (TREE_TYPE (var))))
681 : {
682 28496 : unsigned HOST_WIDE_INT nelts
683 28496 : = CEIL (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var))), limb_prec);
684 28496 : tree atype = build_array_type_nelts (ltype, nelts);
685 28496 : var = build1 (VIEW_CONVERT_EXPR, atype, var);
686 : }
687 55260 : ret = build4 (ARRAY_REF, ltype, var, idx, NULL_TREE, NULL_TREE);
688 : }
689 155092 : if (!write_p && !useless_type_conversion_p (atype, ltype))
690 : {
691 18890 : gimple *g = gimple_build_assign (make_ssa_name (m_limb_type), ret);
692 18890 : insert_before (g);
693 18890 : ret = gimple_assign_lhs (g);
694 18890 : ret = build1 (NOP_EXPR, atype, ret);
695 : }
696 155092 : return ret;
697 : }
698 :
699 : /* Build a BIT_FIELD_REF to access BITSIZE bits with FTYPE type at
700 : offset BITPOS inside of OBJ. */
701 :
702 : tree
703 265 : bitint_large_huge::build_bit_field_ref (tree ftype, tree obj,
704 : unsigned HOST_WIDE_INT bitsize,
705 : unsigned HOST_WIDE_INT bitpos)
706 : {
707 530 : if (INTEGRAL_TYPE_P (TREE_TYPE (obj))
708 274 : && !type_has_mode_precision_p (TREE_TYPE (obj)))
709 : {
710 9 : unsigned HOST_WIDE_INT nelts
711 9 : = CEIL (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))), limb_prec);
712 9 : tree ltype = m_limb_type;
713 9 : addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (obj));
714 9 : if (as != TYPE_ADDR_SPACE (ltype))
715 0 : ltype = build_qualified_type (ltype, TYPE_QUALS (ltype)
716 0 : | ENCODE_QUAL_ADDR_SPACE (as));
717 9 : tree atype = build_array_type_nelts (ltype, nelts);
718 9 : obj = build1 (VIEW_CONVERT_EXPR, atype, obj);
719 : }
720 265 : return build3 (BIT_FIELD_REF, ftype, obj, bitsize_int (bitsize),
721 265 : bitsize_int (bitpos));
722 : }
723 :
724 : /* Emit a half diamond,
725 : if (COND)
726 : |\
727 : | \
728 : | \
729 : | new_bb1
730 : | /
731 : | /
732 : |/
733 : or if (COND) new_bb1;
734 : PROB is the probability that the condition is true.
735 : Updates m_gsi to start of new_bb1.
736 : Sets EDGE_TRUE to edge from new_bb1 to successor and
737 : EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
738 :
739 : void
740 4276 : bitint_large_huge::if_then (gimple *cond, profile_probability prob,
741 : edge &edge_true, edge &edge_false)
742 : {
743 4276 : insert_before (cond);
744 4276 : edge e1 = split_block (gsi_bb (m_gsi), cond);
745 4276 : edge e2 = split_block (e1->dest, (gimple *) NULL);
746 4276 : edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
747 4276 : e1->flags = EDGE_TRUE_VALUE;
748 4276 : e1->probability = prob;
749 4276 : e3->probability = prob.invert ();
750 4276 : set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
751 4276 : edge_true = e2;
752 4276 : edge_false = e3;
753 4276 : m_gsi = gsi_after_labels (e1->dest);
754 4276 : }
755 :
756 : /* Emit a full diamond,
757 : if (COND)
758 : /\
759 : / \
760 : / \
761 : new_bb1 new_bb2
762 : \ /
763 : \ /
764 : \/
765 : or if (COND) new_bb2; else new_bb1;
766 : PROB is the probability that the condition is true.
767 : Updates m_gsi to start of new_bb2.
768 : Sets EDGE_TRUE to edge from new_bb1 to successor and
769 : EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
770 :
771 : void
772 110 : bitint_large_huge::if_then_else (gimple *cond, profile_probability prob,
773 : edge &edge_true, edge &edge_false)
774 : {
775 110 : insert_before (cond);
776 110 : edge e1 = split_block (gsi_bb (m_gsi), cond);
777 110 : edge e2 = split_block (e1->dest, (gimple *) NULL);
778 110 : basic_block bb = create_empty_bb (e1->dest);
779 110 : add_bb_to_loop (bb, e1->dest->loop_father);
780 110 : edge e3 = make_edge (e1->src, bb, EDGE_TRUE_VALUE);
781 110 : e1->flags = EDGE_FALSE_VALUE;
782 110 : e3->probability = prob;
783 110 : e1->probability = prob.invert ();
784 110 : bb->count = e1->src->count.apply_probability (prob);
785 110 : set_immediate_dominator (CDI_DOMINATORS, bb, e1->src);
786 110 : set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
787 110 : edge_true = make_single_succ_edge (bb, e2->dest, EDGE_FALLTHRU);
788 110 : edge_false = e2;
789 110 : m_gsi = gsi_after_labels (bb);
790 110 : }
791 :
792 : /* Emit a half diamond with full diamond in it
793 : if (COND1)
794 : |\
795 : | \
796 : | \
797 : | if (COND2)
798 : | / \
799 : | / \
800 : |new_bb1 new_bb2
801 : | | /
802 : \ | /
803 : \ | /
804 : \ | /
805 : \|/
806 : or if (COND1) { if (COND2) new_bb2; else new_bb1; }
807 : PROB1 is the probability that the condition 1 is true.
808 : PROB2 is the probability that the condition 2 is true.
809 : Updates m_gsi to start of new_bb1.
810 : Sets EDGE_TRUE_TRUE to edge from new_bb2 to successor,
811 : EDGE_TRUE_FALSE to edge from new_bb1 to successor and
812 : EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND1) bb.
813 : If COND2 is NULL, this is equivalent to
814 : if_then (COND1, PROB1, EDGE_TRUE_FALSE, EDGE_FALSE);
815 : EDGE_TRUE_TRUE = NULL; */
816 :
817 : void
818 2080 : bitint_large_huge::if_then_if_then_else (gimple *cond1, gimple *cond2,
819 : profile_probability prob1,
820 : profile_probability prob2,
821 : edge &edge_true_true,
822 : edge &edge_true_false,
823 : edge &edge_false)
824 : {
825 2080 : edge e2, e3, e4 = NULL;
826 2080 : if_then (cond1, prob1, e2, e3);
827 2080 : if (cond2 == NULL)
828 : {
829 1240 : edge_true_true = NULL;
830 1240 : edge_true_false = e2;
831 1240 : edge_false = e3;
832 1240 : return;
833 : }
834 840 : insert_before (cond2);
835 840 : e2 = split_block (gsi_bb (m_gsi), cond2);
836 840 : basic_block bb = create_empty_bb (e2->dest);
837 840 : add_bb_to_loop (bb, e2->dest->loop_father);
838 840 : e4 = make_edge (e2->src, bb, EDGE_TRUE_VALUE);
839 840 : set_immediate_dominator (CDI_DOMINATORS, bb, e2->src);
840 840 : e4->probability = prob2;
841 840 : e2->flags = EDGE_FALSE_VALUE;
842 840 : e2->probability = prob2.invert ();
843 840 : bb->count = e2->src->count.apply_probability (prob2);
844 840 : e4 = make_single_succ_edge (bb, e3->dest, EDGE_FALLTHRU);
845 840 : e2 = find_edge (e2->dest, e3->dest);
846 840 : edge_true_true = e4;
847 840 : edge_true_false = e2;
848 840 : edge_false = e3;
849 840 : m_gsi = gsi_after_labels (e2->src);
850 : }
851 :
852 : /* Emit code to access limb IDX from OP. */
853 :
854 : tree
855 110447 : bitint_large_huge::handle_operand (tree op, tree idx)
856 : {
857 110447 : switch (TREE_CODE (op))
858 : {
859 76245 : case SSA_NAME:
860 76245 : if (m_names == NULL
861 76245 : || !bitmap_bit_p (m_names, SSA_NAME_VERSION (op)))
862 : {
863 14233 : if (SSA_NAME_IS_DEFAULT_DEF (op))
864 : {
865 5 : if (m_first)
866 : {
867 2 : tree v = create_tmp_reg (m_limb_type);
868 2 : if (SSA_NAME_VAR (op) && VAR_P (SSA_NAME_VAR (op)))
869 : {
870 2 : DECL_NAME (v) = DECL_NAME (SSA_NAME_VAR (op));
871 2 : DECL_SOURCE_LOCATION (v)
872 2 : = DECL_SOURCE_LOCATION (SSA_NAME_VAR (op));
873 : }
874 2 : v = get_or_create_ssa_default_def (cfun, v);
875 2 : m_data.safe_push (v);
876 : }
877 5 : tree ret = m_data[m_data_cnt];
878 5 : m_data_cnt++;
879 5 : if (tree_fits_uhwi_p (idx))
880 : {
881 3 : tree type = limb_access_type (TREE_TYPE (op), idx);
882 3 : ret = add_cast (type, ret);
883 : }
884 5 : return ret;
885 : }
886 14228 : location_t loc_save = m_loc;
887 14228 : m_loc = gimple_location (SSA_NAME_DEF_STMT (op));
888 14228 : tree ret = handle_stmt (SSA_NAME_DEF_STMT (op), idx);
889 14228 : m_loc = loc_save;
890 14228 : return ret;
891 : }
892 62012 : int p;
893 62012 : gimple *g;
894 62012 : tree t;
895 62012 : p = var_to_partition (m_map, op);
896 62012 : gcc_assert (m_vars[p] != NULL_TREE);
897 62012 : t = limb_access (TREE_TYPE (op), m_vars[p], idx, false);
898 62012 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
899 62012 : insert_before (g);
900 62012 : t = gimple_assign_lhs (g);
901 62012 : if (m_first
902 22441 : && m_single_use_names
903 21569 : && m_vars[p] != m_lhs
904 21472 : && m_after_stmt
905 70691 : && bitmap_bit_p (m_single_use_names, SSA_NAME_VERSION (op)))
906 : {
907 8395 : tree clobber = build_clobber (TREE_TYPE (m_vars[p]),
908 : CLOBBER_STORAGE_END);
909 8395 : g = gimple_build_assign (m_vars[p], clobber);
910 8395 : gimple_stmt_iterator gsi = gsi_for_stmt (m_after_stmt);
911 8395 : gsi_insert_after (&gsi, g, GSI_SAME_STMT);
912 : }
913 : return t;
914 34202 : case INTEGER_CST:
915 34202 : if (tree_fits_uhwi_p (idx))
916 : {
917 23725 : tree c, type = limb_access_type (TREE_TYPE (op), idx);
918 23725 : unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
919 23725 : if (m_first)
920 : {
921 6196 : m_data.safe_push (NULL_TREE);
922 6196 : m_data.safe_push (NULL_TREE);
923 : }
924 23725 : if (bitint_big_endian)
925 0 : i = CEIL (TYPE_PRECISION (TREE_TYPE (op)), limb_prec) - 1 - i;
926 23725 : if (limb_prec != HOST_BITS_PER_WIDE_INT)
927 : {
928 0 : wide_int w = wi::rshift (wi::to_wide (op), i * limb_prec,
929 0 : TYPE_SIGN (TREE_TYPE (op)));
930 0 : c = wide_int_to_tree (type,
931 0 : wide_int::from (w, TYPE_PRECISION (type),
932 : UNSIGNED));
933 0 : }
934 23725 : else if (i >= TREE_INT_CST_EXT_NUNITS (op))
935 7451 : c = build_int_cst (type,
936 13245 : tree_int_cst_sgn (op) < 0 ? -1 : 0);
937 : else
938 16274 : c = build_int_cst (type, TREE_INT_CST_ELT (op, i));
939 23725 : m_data_cnt += 2;
940 23725 : return c;
941 : }
942 10477 : if (m_first
943 10477 : || (m_data[m_data_cnt] == NULL_TREE
944 159 : && m_data[m_data_cnt + 1] == NULL_TREE))
945 : {
946 5284 : unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
947 5284 : unsigned int rem = prec % ((m_upwards_2limb ? 2 : 1) * limb_prec);
948 5284 : int ext;
949 5284 : unsigned min_prec = bitint_min_cst_precision (op, ext);
950 5284 : if (m_first)
951 : {
952 5125 : m_data.safe_push (NULL_TREE);
953 5125 : m_data.safe_push (NULL_TREE);
954 : }
955 5284 : if (integer_zerop (op))
956 : {
957 837 : tree c = build_zero_cst (m_limb_type);
958 837 : m_data[m_data_cnt] = c;
959 837 : m_data[m_data_cnt + 1] = c;
960 : }
961 4447 : else if (integer_all_onesp (op))
962 : {
963 667 : tree c = build_all_ones_cst (m_limb_type);
964 667 : m_data[m_data_cnt] = c;
965 667 : m_data[m_data_cnt + 1] = c;
966 : }
967 3780 : else if (m_upwards_2limb && min_prec <= (unsigned) limb_prec)
968 : {
969 : /* Single limb constant. Use a phi with that limb from
970 : the preheader edge and 0 or -1 constant from the other edge
971 : and for the second limb in the loop. */
972 858 : tree out;
973 858 : gcc_assert (m_first);
974 858 : m_data.pop ();
975 858 : m_data.pop ();
976 858 : prepare_data_in_out (fold_convert (m_limb_type, op), idx, &out,
977 858 : build_int_cst (m_limb_type, ext));
978 858 : }
979 2922 : else if (min_prec > prec - rem - 2 * limb_prec)
980 : {
981 : /* Constant which has enough significant bits that it isn't
982 : worth trying to save .rodata space by extending from smaller
983 : number. */
984 2403 : tree type;
985 2403 : if (m_var_msb)
986 25 : type = TREE_TYPE (op);
987 : else
988 : /* If we have a guarantee the most significant partial limb
989 : (if any) will be only accessed through handle_operand
990 : with INTEGER_CST idx, we don't need to include the partial
991 : limb in .rodata. */
992 2378 : type = build_bitint_type (prec - rem, 1);
993 2403 : tree c = tree_output_constant_def (fold_convert (type, op));
994 2403 : m_data[m_data_cnt] = c;
995 2403 : m_data[m_data_cnt + 1] = NULL_TREE;
996 : }
997 519 : else if (m_upwards_2limb)
998 : {
999 : /* Constant with smaller number of bits. Trade conditional
1000 : code for .rodata space by extending from smaller number. */
1001 444 : min_prec = CEIL (min_prec, 2 * limb_prec) * (2 * limb_prec);
1002 444 : tree type = build_bitint_type (min_prec, 1);
1003 444 : tree c = tree_output_constant_def (fold_convert (type, op));
1004 444 : tree ridx = idx;
1005 444 : if (bitint_big_endian)
1006 : {
1007 0 : ridx = make_ssa_name (sizetype);
1008 0 : g = gimple_build_assign (ridx, PLUS_EXPR, idx,
1009 0 : size_int (min_prec / limb_prec
1010 : - ((HOST_WIDE_INT)
1011 : CEIL (prec,
1012 : limb_prec))));
1013 0 : insert_before (g);
1014 : }
1015 444 : tree ridx2 = make_ssa_name (sizetype);
1016 444 : g = gimple_build_assign (ridx2, PLUS_EXPR, ridx,
1017 : bitint_big_endian
1018 0 : ? size_int (-1) : size_one_node);
1019 444 : insert_before (g);
1020 444 : if (bitint_big_endian)
1021 0 : g = gimple_build_cond (GE_EXPR, idx,
1022 0 : size_int (CEIL (prec, limb_prec)
1023 : - min_prec / limb_prec),
1024 : NULL_TREE, NULL_TREE);
1025 : else
1026 444 : g = gimple_build_cond (LT_EXPR, idx,
1027 444 : size_int (min_prec / limb_prec),
1028 : NULL_TREE, NULL_TREE);
1029 444 : edge edge_true, edge_false;
1030 888 : if_then (g, (min_prec >= (prec - rem) / 2
1031 312 : ? profile_probability::likely ()
1032 132 : : profile_probability::unlikely ()),
1033 : edge_true, edge_false);
1034 444 : tree c1 = limb_access (TREE_TYPE (op), c, ridx, false);
1035 444 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (c1)), c1);
1036 444 : insert_before (g);
1037 444 : c1 = gimple_assign_lhs (g);
1038 444 : tree c2 = limb_access (TREE_TYPE (op), c, ridx2, false);
1039 444 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (c2)), c2);
1040 444 : insert_before (g);
1041 444 : c2 = gimple_assign_lhs (g);
1042 444 : tree c3 = build_int_cst (m_limb_type, ext);
1043 444 : m_gsi = gsi_after_labels (edge_true->dest);
1044 444 : m_data[m_data_cnt] = make_ssa_name (m_limb_type);
1045 444 : m_data[m_data_cnt + 1] = make_ssa_name (m_limb_type);
1046 444 : gphi *phi = create_phi_node (m_data[m_data_cnt],
1047 : edge_true->dest);
1048 444 : add_phi_arg (phi, c1, edge_true, UNKNOWN_LOCATION);
1049 444 : add_phi_arg (phi, c3, edge_false, UNKNOWN_LOCATION);
1050 444 : phi = create_phi_node (m_data[m_data_cnt + 1], edge_true->dest);
1051 444 : add_phi_arg (phi, c2, edge_true, UNKNOWN_LOCATION);
1052 444 : add_phi_arg (phi, c3, edge_false, UNKNOWN_LOCATION);
1053 : }
1054 : else
1055 : {
1056 : /* Constant with smaller number of bits. Trade conditional
1057 : code for .rodata space by extending from smaller number.
1058 : Version for loops with random access to the limbs or
1059 : downwards loops. */
1060 75 : min_prec = CEIL (min_prec, limb_prec) * limb_prec;
1061 75 : tree c;
1062 75 : if (min_prec <= (unsigned) limb_prec)
1063 21 : c = fold_convert (m_limb_type, op);
1064 : else
1065 : {
1066 54 : tree type = build_bitint_type (min_prec, 1);
1067 54 : c = tree_output_constant_def (fold_convert (type, op));
1068 : }
1069 75 : m_data[m_data_cnt] = c;
1070 75 : m_data[m_data_cnt + 1] = integer_type_node;
1071 : }
1072 5284 : t = m_data[m_data_cnt];
1073 : }
1074 : else
1075 5193 : t = m_data[m_data_cnt + 1];
1076 10477 : if (m_data[m_data_cnt + 1] == NULL_TREE)
1077 : {
1078 4750 : tree ridx = idx;
1079 4750 : unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
1080 4750 : tree c = m_data[m_data_cnt];
1081 4750 : unsigned int min_prec = TYPE_PRECISION (TREE_TYPE (c));
1082 4750 : if (bitint_big_endian
1083 0 : && CEIL (min_prec, limb_prec) != CEIL (prec, limb_prec))
1084 : {
1085 0 : ridx = make_ssa_name (sizetype);
1086 0 : g = gimple_build_assign (ridx, PLUS_EXPR, idx,
1087 0 : size_int (CEIL (min_prec, limb_prec)
1088 : - ((HOST_WIDE_INT)
1089 : CEIL (prec, limb_prec))));
1090 0 : insert_before (g);
1091 : }
1092 4750 : t = limb_access (TREE_TYPE (op), c, ridx, false);
1093 4750 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
1094 4750 : insert_before (g);
1095 4750 : t = gimple_assign_lhs (g);
1096 : }
1097 5727 : else if (m_data[m_data_cnt + 1] == integer_type_node)
1098 : {
1099 115 : unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
1100 115 : unsigned rem = prec % ((m_upwards_2limb ? 2 : 1) * limb_prec);
1101 115 : int ext = wi::neg_p (wi::to_wide (op)) ? -1 : 0;
1102 115 : tree c = m_data[m_data_cnt];
1103 115 : unsigned min_prec = TYPE_PRECISION (TREE_TYPE (c));
1104 115 : if (bitint_big_endian)
1105 0 : g = gimple_build_cond (GE_EXPR, idx,
1106 0 : size_int (CEIL (prec, limb_prec)
1107 : - min_prec / limb_prec),
1108 : NULL_TREE, NULL_TREE);
1109 : else
1110 115 : g = gimple_build_cond (LT_EXPR, idx,
1111 115 : size_int (min_prec / limb_prec),
1112 : NULL_TREE, NULL_TREE);
1113 115 : edge edge_true, edge_false;
1114 230 : if_then (g, (min_prec >= (prec - rem) / 2
1115 29 : ? profile_probability::likely ()
1116 86 : : profile_probability::unlikely ()),
1117 : edge_true, edge_false);
1118 115 : if (min_prec > (unsigned) limb_prec)
1119 : {
1120 70 : tree ridx = idx;
1121 70 : if (bitint_big_endian)
1122 : {
1123 0 : ridx = make_ssa_name (sizetype);
1124 0 : g = gimple_build_assign (ridx, PLUS_EXPR, idx,
1125 0 : size_int (min_prec / limb_prec
1126 : - ((HOST_WIDE_INT)
1127 : CEIL (prec,
1128 : limb_prec))));
1129 0 : insert_before (g);
1130 : }
1131 70 : c = limb_access (TREE_TYPE (op), c, ridx, false);
1132 70 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (c)), c);
1133 70 : insert_before (g);
1134 70 : c = gimple_assign_lhs (g);
1135 : }
1136 115 : tree c2 = build_int_cst (m_limb_type, ext);
1137 115 : m_gsi = gsi_after_labels (edge_true->dest);
1138 115 : t = make_ssa_name (m_limb_type);
1139 115 : gphi *phi = create_phi_node (t, edge_true->dest);
1140 115 : add_phi_arg (phi, c, edge_true, UNKNOWN_LOCATION);
1141 115 : add_phi_arg (phi, c2, edge_false, UNKNOWN_LOCATION);
1142 : }
1143 10477 : m_data_cnt += 2;
1144 10477 : return t;
1145 0 : default:
1146 0 : gcc_unreachable ();
1147 : }
1148 : }
1149 :
1150 : /* Helper method, add a PHI node with VAL from preheader edge if
1151 : inside of a loop and m_first. Keep state in a pair of m_data
1152 : elements. If VAL_OUT is non-NULL, use that as PHI argument from
1153 : the latch edge, otherwise create a new SSA_NAME for it and let
1154 : caller initialize it. */
1155 :
1156 : tree
1157 15616 : bitint_large_huge::prepare_data_in_out (tree val, tree idx, tree *data_out,
1158 : tree val_out)
1159 : {
1160 15616 : if (!m_first)
1161 : {
1162 9313 : *data_out = tree_fits_uhwi_p (idx) ? NULL_TREE : m_data[m_data_cnt + 1];
1163 9313 : return m_data[m_data_cnt];
1164 : }
1165 :
1166 6303 : *data_out = NULL_TREE;
1167 6303 : if (tree_fits_uhwi_p (idx))
1168 : {
1169 1986 : m_data.safe_push (val);
1170 1986 : m_data.safe_push (NULL_TREE);
1171 1986 : return val;
1172 : }
1173 :
1174 4317 : tree in = make_ssa_name (TREE_TYPE (val));
1175 4317 : gphi *phi = create_phi_node (in, m_bb);
1176 4317 : edge e1 = find_edge (m_preheader_bb, m_bb);
1177 4317 : edge e2 = EDGE_PRED (m_bb, 0);
1178 4317 : if (e1 == e2)
1179 4317 : e2 = EDGE_PRED (m_bb, 1);
1180 4317 : add_phi_arg (phi, val, e1, UNKNOWN_LOCATION);
1181 4317 : tree out = val_out ? val_out : make_ssa_name (TREE_TYPE (val));
1182 4317 : add_phi_arg (phi, out, e2, UNKNOWN_LOCATION);
1183 4317 : m_data.safe_push (in);
1184 4317 : m_data.safe_push (out);
1185 4317 : return in;
1186 : }
1187 :
1188 : /* Return VAL cast to TYPE. If VAL is INTEGER_CST, just
1189 : convert it without emitting any code, otherwise emit
1190 : the conversion statement before the current location. */
1191 :
1192 : tree
1193 38215 : bitint_large_huge::add_cast (tree type, tree val)
1194 : {
1195 38215 : if (TREE_CODE (val) == INTEGER_CST)
1196 4500 : return fold_convert (type, val);
1197 :
1198 33715 : tree lhs = make_ssa_name (type);
1199 33715 : gimple *g = gimple_build_assign (lhs, NOP_EXPR, val);
1200 33715 : insert_before (g);
1201 33715 : return lhs;
1202 : }
1203 :
1204 : /* Helper of handle_stmt method, handle PLUS_EXPR or MINUS_EXPR. */
1205 :
1206 : tree
1207 13202 : bitint_large_huge::handle_plus_minus (tree_code code, tree rhs1, tree rhs2,
1208 : tree idx)
1209 : {
1210 13202 : tree lhs, data_out, ctype;
1211 13202 : tree rhs1_type = TREE_TYPE (rhs1);
1212 13202 : gimple *g;
1213 13202 : tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
1214 : &data_out);
1215 :
1216 18930 : if (optab_handler (code == PLUS_EXPR ? uaddc5_optab : usubc5_optab,
1217 13202 : TYPE_MODE (m_limb_type)) != CODE_FOR_nothing)
1218 : {
1219 13202 : ctype = build_complex_type (m_limb_type);
1220 13202 : if (!types_compatible_p (rhs1_type, m_limb_type))
1221 : {
1222 1038 : if (!TYPE_UNSIGNED (rhs1_type))
1223 : {
1224 329 : tree type = unsigned_type_for (rhs1_type);
1225 329 : rhs1 = add_cast (type, rhs1);
1226 329 : rhs2 = add_cast (type, rhs2);
1227 : }
1228 1038 : rhs1 = add_cast (m_limb_type, rhs1);
1229 1038 : rhs2 = add_cast (m_limb_type, rhs2);
1230 : }
1231 13202 : lhs = make_ssa_name (ctype);
1232 18930 : g = gimple_build_call_internal (code == PLUS_EXPR
1233 : ? IFN_UADDC : IFN_USUBC,
1234 : 3, rhs1, rhs2, data_in);
1235 13202 : gimple_call_set_lhs (g, lhs);
1236 13202 : insert_before (g);
1237 13202 : if (data_out == NULL_TREE)
1238 11021 : data_out = make_ssa_name (m_limb_type);
1239 13202 : g = gimple_build_assign (data_out, IMAGPART_EXPR,
1240 : build1 (IMAGPART_EXPR, m_limb_type, lhs));
1241 13202 : insert_before (g);
1242 : }
1243 0 : else if (types_compatible_p (rhs1_type, m_limb_type))
1244 : {
1245 0 : ctype = build_complex_type (m_limb_type);
1246 0 : lhs = make_ssa_name (ctype);
1247 0 : g = gimple_build_call_internal (code == PLUS_EXPR
1248 : ? IFN_ADD_OVERFLOW : IFN_SUB_OVERFLOW,
1249 : 2, rhs1, rhs2);
1250 0 : gimple_call_set_lhs (g, lhs);
1251 0 : insert_before (g);
1252 0 : if (data_out == NULL_TREE)
1253 0 : data_out = make_ssa_name (m_limb_type);
1254 0 : if (!integer_zerop (data_in))
1255 : {
1256 0 : rhs1 = make_ssa_name (m_limb_type);
1257 0 : g = gimple_build_assign (rhs1, REALPART_EXPR,
1258 : build1 (REALPART_EXPR, m_limb_type, lhs));
1259 0 : insert_before (g);
1260 0 : rhs2 = make_ssa_name (m_limb_type);
1261 0 : g = gimple_build_assign (rhs2, IMAGPART_EXPR,
1262 : build1 (IMAGPART_EXPR, m_limb_type, lhs));
1263 0 : insert_before (g);
1264 0 : lhs = make_ssa_name (ctype);
1265 0 : g = gimple_build_call_internal (code == PLUS_EXPR
1266 : ? IFN_ADD_OVERFLOW
1267 : : IFN_SUB_OVERFLOW,
1268 : 2, rhs1, data_in);
1269 0 : gimple_call_set_lhs (g, lhs);
1270 0 : insert_before (g);
1271 0 : data_in = make_ssa_name (m_limb_type);
1272 0 : g = gimple_build_assign (data_in, IMAGPART_EXPR,
1273 : build1 (IMAGPART_EXPR, m_limb_type, lhs));
1274 0 : insert_before (g);
1275 0 : g = gimple_build_assign (data_out, PLUS_EXPR, rhs2, data_in);
1276 0 : insert_before (g);
1277 : }
1278 : else
1279 : {
1280 0 : g = gimple_build_assign (data_out, IMAGPART_EXPR,
1281 : build1 (IMAGPART_EXPR, m_limb_type, lhs));
1282 0 : insert_before (g);
1283 : }
1284 : }
1285 : else
1286 : {
1287 0 : tree in = add_cast (rhs1_type, data_in);
1288 0 : lhs = make_ssa_name (rhs1_type);
1289 0 : g = gimple_build_assign (lhs, code, rhs1, rhs2);
1290 0 : insert_before (g);
1291 0 : rhs1 = make_ssa_name (rhs1_type);
1292 0 : g = gimple_build_assign (rhs1, code, lhs, in);
1293 0 : insert_before (g);
1294 0 : m_data[m_data_cnt] = NULL_TREE;
1295 0 : m_data_cnt += 2;
1296 0 : return rhs1;
1297 : }
1298 13202 : rhs1 = make_ssa_name (m_limb_type);
1299 13202 : g = gimple_build_assign (rhs1, REALPART_EXPR,
1300 : build1 (REALPART_EXPR, m_limb_type, lhs));
1301 13202 : insert_before (g);
1302 13202 : if (!types_compatible_p (rhs1_type, m_limb_type))
1303 1038 : rhs1 = add_cast (rhs1_type, rhs1);
1304 13202 : m_data[m_data_cnt] = data_out;
1305 13202 : m_data_cnt += 2;
1306 13202 : return rhs1;
1307 : }
1308 :
1309 : /* Helper function for handle_stmt method, handle LSHIFT_EXPR by
1310 : count in [0, limb_prec - 1] range. */
1311 :
1312 : tree
1313 152 : bitint_large_huge::handle_lshift (tree rhs1, tree rhs2, tree idx)
1314 : {
1315 152 : unsigned HOST_WIDE_INT cnt = tree_to_uhwi (rhs2);
1316 152 : gcc_checking_assert (cnt < (unsigned) limb_prec);
1317 152 : if (cnt == 0)
1318 : return rhs1;
1319 :
1320 152 : tree lhs, data_out, rhs1_type = TREE_TYPE (rhs1);
1321 152 : gimple *g;
1322 152 : tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
1323 : &data_out);
1324 :
1325 152 : if (!integer_zerop (data_in))
1326 : {
1327 136 : lhs = make_ssa_name (m_limb_type);
1328 136 : g = gimple_build_assign (lhs, RSHIFT_EXPR, data_in,
1329 : build_int_cst (unsigned_type_node,
1330 136 : limb_prec - cnt));
1331 136 : insert_before (g);
1332 136 : if (!types_compatible_p (rhs1_type, m_limb_type))
1333 35 : lhs = add_cast (rhs1_type, lhs);
1334 : data_in = lhs;
1335 : }
1336 152 : if (types_compatible_p (rhs1_type, m_limb_type))
1337 : {
1338 117 : if (data_out == NULL_TREE)
1339 82 : data_out = make_ssa_name (m_limb_type);
1340 117 : g = gimple_build_assign (data_out, rhs1);
1341 117 : insert_before (g);
1342 : }
1343 152 : if (cnt < (unsigned) TYPE_PRECISION (rhs1_type))
1344 : {
1345 137 : lhs = make_ssa_name (rhs1_type);
1346 137 : g = gimple_build_assign (lhs, LSHIFT_EXPR, rhs1, rhs2);
1347 137 : insert_before (g);
1348 137 : if (!integer_zerop (data_in))
1349 : {
1350 121 : rhs1 = lhs;
1351 121 : lhs = make_ssa_name (rhs1_type);
1352 121 : g = gimple_build_assign (lhs, BIT_IOR_EXPR, rhs1, data_in);
1353 121 : insert_before (g);
1354 : }
1355 : }
1356 : else
1357 : lhs = data_in;
1358 152 : m_data[m_data_cnt] = data_out;
1359 152 : m_data_cnt += 2;
1360 152 : return lhs;
1361 : }
1362 :
1363 : /* Helper function for handle_stmt method, handle an integral
1364 : to integral conversion. */
1365 :
1366 : tree
1367 7457 : bitint_large_huge::handle_cast (tree lhs_type, tree rhs1, tree idx)
1368 : {
1369 7457 : tree rhs_type = TREE_TYPE (rhs1);
1370 7457 : gimple *g;
1371 7457 : if ((TREE_CODE (rhs1) == SSA_NAME || TREE_CODE (rhs1) == INTEGER_CST)
1372 7457 : && TREE_CODE (lhs_type) == BITINT_TYPE
1373 7457 : && TREE_CODE (rhs_type) == BITINT_TYPE
1374 6468 : && bitint_precision_kind (lhs_type) >= bitint_prec_large
1375 13925 : && bitint_precision_kind (rhs_type) >= bitint_prec_large)
1376 : {
1377 5877 : if (TYPE_PRECISION (rhs_type) >= TYPE_PRECISION (lhs_type)
1378 : /* If lhs has bigger precision than rhs, we can use
1379 : the simple case only if there is a guarantee that
1380 : the most significant limb is handled in straight
1381 : line code. If m_var_msb (on left shifts) or
1382 : if m_upwards_2limb * limb_prec is equal to
1383 : lhs precision or if not m_upwards_2limb and lhs_type
1384 : has precision which is multiple of limb_prec that is
1385 : not the case. */
1386 5877 : || (!m_var_msb
1387 1481 : && (CEIL (TYPE_PRECISION (lhs_type), limb_prec)
1388 1481 : == CEIL (TYPE_PRECISION (rhs_type), limb_prec))
1389 346 : && ((!m_upwards_2limb
1390 182 : && (TYPE_PRECISION (lhs_type) % limb_prec != 0))
1391 243 : || (m_upwards_2limb
1392 328 : && (m_upwards_2limb * limb_prec
1393 164 : < TYPE_PRECISION (lhs_type))))))
1394 : {
1395 4637 : tree ridx = idx;
1396 4637 : if (bitint_big_endian
1397 4637 : && (CEIL (TYPE_PRECISION (lhs_type), limb_prec)
1398 0 : != CEIL (TYPE_PRECISION (rhs_type), limb_prec)))
1399 : {
1400 0 : HOST_WIDE_INT diff = CEIL (TYPE_PRECISION (rhs_type), limb_prec);
1401 0 : diff -= CEIL (TYPE_PRECISION (lhs_type), limb_prec);
1402 0 : if (tree_fits_uhwi_p (idx))
1403 0 : ridx = size_int (tree_to_uhwi (idx) + diff);
1404 : else
1405 : {
1406 0 : tree t = make_ssa_name (sizetype);
1407 0 : g = gimple_build_assign (t, PLUS_EXPR, idx, size_int (diff));
1408 0 : insert_before (g);
1409 0 : ridx = t;
1410 : }
1411 : }
1412 4637 : rhs1 = handle_operand (rhs1, ridx);
1413 4637 : if (tree_fits_uhwi_p (idx))
1414 : {
1415 2372 : tree type = limb_access_type (lhs_type, idx);
1416 2372 : if (!types_compatible_p (type, TREE_TYPE (rhs1)))
1417 1241 : rhs1 = add_cast (type, rhs1);
1418 : }
1419 4637 : return rhs1;
1420 : }
1421 1240 : tree t;
1422 : /* Indexes lower than this don't need any special processing. */
1423 1240 : unsigned low = ((unsigned) TYPE_PRECISION (rhs_type)
1424 1240 : - !TYPE_UNSIGNED (rhs_type)) / limb_prec;
1425 : /* Indexes >= than this always contain an extension. */
1426 1240 : unsigned high = CEIL ((unsigned) TYPE_PRECISION (rhs_type), limb_prec);
1427 1240 : unsigned lcnt = CEIL ((unsigned) TYPE_PRECISION (lhs_type), limb_prec);
1428 1240 : unsigned lowe = bitint_big_endian ? lcnt - 1 - low : low;
1429 1240 : bool save_first = m_first;
1430 1240 : if (m_first)
1431 : {
1432 405 : m_data.safe_push (NULL_TREE);
1433 405 : m_data.safe_push (NULL_TREE);
1434 405 : m_data.safe_push (NULL_TREE);
1435 405 : if (TYPE_UNSIGNED (rhs_type))
1436 : /* No need to keep state between iterations. */
1437 : ;
1438 192 : else if (m_upwards && !m_upwards_2limb)
1439 : /* We need to keep state between iterations, but
1440 : not within any loop, everything is straight line
1441 : code with only increasing indexes. */
1442 : ;
1443 152 : else if (!m_upwards_2limb)
1444 : {
1445 3 : unsigned save_data_cnt = m_data_cnt;
1446 3 : gimple_stmt_iterator save_gsi = m_gsi;
1447 3 : m_gsi = m_init_gsi;
1448 3 : if (gsi_end_p (m_gsi))
1449 0 : m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1450 : else
1451 3 : gsi_next (&m_gsi);
1452 3 : m_data_cnt = save_data_cnt + 3;
1453 3 : t = handle_operand (rhs1, size_int (bitint_big_endian
1454 : ? high - 1 - low : low));
1455 3 : m_first = false;
1456 3 : m_data[save_data_cnt + 2]
1457 3 : = build_int_cst (NULL_TREE, m_data_cnt);
1458 3 : m_data_cnt = save_data_cnt;
1459 3 : t = add_cast (signed_type_for (m_limb_type), t);
1460 3 : tree lpm1 = build_int_cst (unsigned_type_node, limb_prec - 1);
1461 3 : tree n = make_ssa_name (TREE_TYPE (t));
1462 3 : g = gimple_build_assign (n, RSHIFT_EXPR, t, lpm1);
1463 3 : insert_before (g);
1464 3 : m_data[save_data_cnt + 1] = add_cast (m_limb_type, n);
1465 3 : m_init_gsi = m_gsi;
1466 3 : if (gsi_end_p (m_init_gsi))
1467 0 : m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1468 : else
1469 3 : gsi_prev (&m_init_gsi);
1470 3 : m_gsi = save_gsi;
1471 : }
1472 149 : else if (m_upwards_2limb * limb_prec < TYPE_PRECISION (rhs_type))
1473 : /* We need to keep state between iterations, but
1474 : fortunately not within the loop, only afterwards. */
1475 : ;
1476 : else
1477 : {
1478 145 : tree out;
1479 145 : m_data.truncate (m_data_cnt);
1480 145 : prepare_data_in_out (build_zero_cst (m_limb_type), idx, &out);
1481 145 : m_data.safe_push (NULL_TREE);
1482 : }
1483 : }
1484 :
1485 1240 : unsigned save_data_cnt = m_data_cnt;
1486 1240 : m_data_cnt += 3;
1487 1240 : if (!tree_fits_uhwi_p (idx))
1488 : {
1489 670 : if (m_upwards_2limb
1490 650 : && low >= m_upwards_2limb - m_first)
1491 : {
1492 158 : if (bitint_big_endian
1493 158 : && (CEIL (TYPE_PRECISION (lhs_type), limb_prec)
1494 0 : != CEIL (TYPE_PRECISION (rhs_type), limb_prec)))
1495 : {
1496 0 : HOST_WIDE_INT diff
1497 0 : = CEIL (TYPE_PRECISION (rhs_type), limb_prec);
1498 0 : diff -= CEIL (TYPE_PRECISION (lhs_type), limb_prec);
1499 0 : tree t = make_ssa_name (sizetype);
1500 0 : g = gimple_build_assign (t, PLUS_EXPR, idx, size_int (diff));
1501 0 : insert_before (g);
1502 0 : idx = t;
1503 : }
1504 158 : rhs1 = handle_operand (rhs1, idx);
1505 158 : if (m_first)
1506 131 : m_data[save_data_cnt + 2]
1507 262 : = build_int_cst (NULL_TREE, m_data_cnt);
1508 158 : m_first = save_first;
1509 158 : return rhs1;
1510 : }
1511 1289 : bool single_comparison
1512 512 : = low == high || (m_upwards_2limb && (low & 1) == m_first);
1513 265 : tree idxc = idx;
1514 265 : if (!single_comparison
1515 265 : && m_upwards_2limb
1516 245 : && !m_first
1517 112 : && low + 1 == m_upwards_2limb)
1518 : /* In this case we know that idx <= low always,
1519 : so effectively we just needs a single comparison,
1520 : idx < low or idx == low, but we'd need to emit different
1521 : code for the 2 branches than single_comparison normally
1522 : emits. So, instead of special-casing that, emit a
1523 : low <= low comparison which cfg cleanup will clean up
1524 : at the end of the pass. */
1525 89 : idxc = size_int (lowe);
1526 512 : if (bitint_big_endian)
1527 0 : g = gimple_build_cond (single_comparison ? GT_EXPR : GE_EXPR,
1528 0 : idxc, size_int (lowe),
1529 : NULL_TREE, NULL_TREE);
1530 : else
1531 777 : g = gimple_build_cond (single_comparison ? LT_EXPR : LE_EXPR,
1532 512 : idxc, size_int (low), NULL_TREE, NULL_TREE);
1533 512 : edge edge_true_true, edge_true_false, edge_false;
1534 777 : if_then_if_then_else (g, (single_comparison ? NULL
1535 265 : : gimple_build_cond (EQ_EXPR, idx,
1536 265 : size_int (lowe),
1537 : NULL_TREE,
1538 : NULL_TREE)),
1539 : profile_probability::likely (),
1540 : profile_probability::unlikely (),
1541 : edge_true_true, edge_true_false, edge_false);
1542 512 : bool save_cast_conditional = m_cast_conditional;
1543 512 : m_cast_conditional = true;
1544 512 : m_bitfld_load = 0;
1545 512 : tree t1 = idx, t2 = NULL_TREE;
1546 512 : if (bitint_big_endian
1547 512 : && (CEIL (TYPE_PRECISION (lhs_type), limb_prec)
1548 0 : != CEIL (TYPE_PRECISION (rhs_type), limb_prec)))
1549 : {
1550 0 : HOST_WIDE_INT diff = CEIL (TYPE_PRECISION (rhs_type), limb_prec);
1551 0 : diff -= CEIL (TYPE_PRECISION (lhs_type), limb_prec);
1552 0 : t1 = make_ssa_name (sizetype);
1553 0 : g = gimple_build_assign (t1, PLUS_EXPR, idx, size_int (diff));
1554 0 : insert_before (g);
1555 : }
1556 512 : t1 = handle_operand (rhs1, t1);
1557 512 : if (m_first)
1558 199 : m_data[save_data_cnt + 2]
1559 398 : = build_int_cst (NULL_TREE, m_data_cnt);
1560 512 : tree ext = NULL_TREE;
1561 512 : tree bitfld = NULL_TREE;
1562 512 : if (!single_comparison)
1563 : {
1564 265 : m_gsi = gsi_after_labels (edge_true_true->src);
1565 265 : m_first = false;
1566 265 : m_data_cnt = save_data_cnt + 3;
1567 265 : if (m_bitfld_load)
1568 : {
1569 4 : bitfld = m_data[m_bitfld_load];
1570 4 : m_data[m_bitfld_load] = m_data[m_bitfld_load + 2];
1571 4 : m_bitfld_load = 0;
1572 : }
1573 265 : t2 = handle_operand (rhs1, size_int (bitint_big_endian
1574 : ? high - 1 - low : low));
1575 265 : if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t2)))
1576 220 : t2 = add_cast (m_limb_type, t2);
1577 265 : if (!TYPE_UNSIGNED (rhs_type) && m_upwards_2limb)
1578 : {
1579 145 : ext = add_cast (signed_type_for (m_limb_type), t2);
1580 290 : tree lpm1 = build_int_cst (unsigned_type_node,
1581 145 : limb_prec - 1);
1582 145 : tree n = make_ssa_name (TREE_TYPE (ext));
1583 145 : g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
1584 145 : insert_before (g);
1585 145 : ext = add_cast (m_limb_type, n);
1586 : }
1587 : }
1588 512 : tree t3;
1589 512 : if (TYPE_UNSIGNED (rhs_type))
1590 262 : t3 = build_zero_cst (m_limb_type);
1591 250 : else if (m_upwards_2limb && (save_first || ext != NULL_TREE))
1592 167 : t3 = m_data[save_data_cnt];
1593 : else
1594 83 : t3 = m_data[save_data_cnt + 1];
1595 512 : m_gsi = gsi_after_labels (edge_true_false->dest);
1596 512 : t = make_ssa_name (m_limb_type);
1597 512 : gphi *phi = create_phi_node (t, edge_true_false->dest);
1598 512 : add_phi_arg (phi, t1, edge_true_false, UNKNOWN_LOCATION);
1599 512 : add_phi_arg (phi, t3, edge_false, UNKNOWN_LOCATION);
1600 512 : if (edge_true_true)
1601 265 : add_phi_arg (phi, t2, edge_true_true, UNKNOWN_LOCATION);
1602 512 : if (ext)
1603 : {
1604 145 : tree t4 = make_ssa_name (m_limb_type);
1605 145 : phi = create_phi_node (t4, edge_true_false->dest);
1606 145 : add_phi_arg (phi, build_zero_cst (m_limb_type), edge_true_false,
1607 : UNKNOWN_LOCATION);
1608 145 : add_phi_arg (phi, m_data[save_data_cnt], edge_false,
1609 : UNKNOWN_LOCATION);
1610 145 : add_phi_arg (phi, ext, edge_true_true, UNKNOWN_LOCATION);
1611 145 : if (!save_cast_conditional)
1612 : {
1613 135 : g = gimple_build_assign (m_data[save_data_cnt + 1], t4);
1614 135 : insert_before (g);
1615 : }
1616 : else
1617 10 : for (basic_block bb = gsi_bb (m_gsi);;)
1618 : {
1619 10 : edge e1 = single_succ_edge (bb);
1620 10 : edge e2 = find_edge (e1->dest, m_bb), e3;
1621 10 : tree t5 = (e2 ? m_data[save_data_cnt + 1]
1622 10 : : make_ssa_name (m_limb_type));
1623 10 : phi = create_phi_node (t5, e1->dest);
1624 10 : edge_iterator ei;
1625 30 : FOR_EACH_EDGE (e3, ei, e1->dest->preds)
1626 30 : add_phi_arg (phi, (e3 == e1 ? t4
1627 10 : : build_zero_cst (m_limb_type)),
1628 : e3, UNKNOWN_LOCATION);
1629 10 : if (e2)
1630 : break;
1631 0 : t4 = t5;
1632 0 : bb = e1->dest;
1633 0 : }
1634 : }
1635 512 : if (m_bitfld_load)
1636 : {
1637 8 : tree t4;
1638 8 : if (!save_first && !save_cast_conditional)
1639 2 : t4 = m_data[m_bitfld_load + 1];
1640 : else
1641 6 : t4 = make_ssa_name (m_limb_type);
1642 8 : phi = create_phi_node (t4, edge_true_false->dest);
1643 12 : add_phi_arg (phi,
1644 4 : edge_true_true ? bitfld : m_data[m_bitfld_load],
1645 : edge_true_false, UNKNOWN_LOCATION);
1646 8 : add_phi_arg (phi, m_data[m_bitfld_load + 2],
1647 : edge_false, UNKNOWN_LOCATION);
1648 8 : if (edge_true_true)
1649 4 : add_phi_arg (phi, m_data[m_bitfld_load], edge_true_true,
1650 : UNKNOWN_LOCATION);
1651 8 : if (save_cast_conditional)
1652 4 : for (basic_block bb = gsi_bb (m_gsi);;)
1653 : {
1654 4 : edge e1 = single_succ_edge (bb);
1655 4 : edge e2 = find_edge (e1->dest, m_bb), e3;
1656 4 : tree t5 = ((e2 && !save_first) ? m_data[m_bitfld_load + 1]
1657 4 : : make_ssa_name (m_limb_type));
1658 4 : phi = create_phi_node (t5, e1->dest);
1659 4 : edge_iterator ei;
1660 14 : FOR_EACH_EDGE (e3, ei, e1->dest->preds)
1661 16 : add_phi_arg (phi, (e3 == e1 ? t4
1662 6 : : build_zero_cst (m_limb_type)),
1663 : e3, UNKNOWN_LOCATION);
1664 4 : t4 = t5;
1665 4 : if (e2)
1666 : break;
1667 0 : bb = e1->dest;
1668 0 : }
1669 8 : m_data[m_bitfld_load] = t4;
1670 8 : m_data[m_bitfld_load + 2] = t4;
1671 8 : m_bitfld_load = 0;
1672 : }
1673 512 : m_cast_conditional = save_cast_conditional;
1674 512 : m_first = save_first;
1675 512 : return t;
1676 : }
1677 : else
1678 : {
1679 570 : unsigned tidx = tree_to_uhwi (idx);
1680 570 : if (bitint_big_endian)
1681 0 : tidx = lcnt - 1 - tidx;
1682 570 : if (tidx < low)
1683 : {
1684 152 : t = handle_operand (rhs1, (bitint_big_endian
1685 0 : ? size_int (high - 1 - tidx) : idx));
1686 152 : if (m_first)
1687 71 : m_data[save_data_cnt + 2]
1688 142 : = build_int_cst (NULL_TREE, m_data_cnt);
1689 : }
1690 418 : else if (tidx < high)
1691 : {
1692 68 : t = handle_operand (rhs1, size_int (bitint_big_endian
1693 : ? high - 1 - low : low));
1694 68 : if (m_first)
1695 1 : m_data[save_data_cnt + 2]
1696 2 : = build_int_cst (NULL_TREE, m_data_cnt);
1697 68 : if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t)))
1698 60 : t = add_cast (m_limb_type, t);
1699 68 : tree ext = NULL_TREE;
1700 68 : if (!TYPE_UNSIGNED (rhs_type) && m_upwards)
1701 : {
1702 44 : ext = add_cast (signed_type_for (m_limb_type), t);
1703 88 : tree lpm1 = build_int_cst (unsigned_type_node,
1704 44 : limb_prec - 1);
1705 44 : tree n = make_ssa_name (TREE_TYPE (ext));
1706 44 : g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
1707 44 : insert_before (g);
1708 44 : ext = add_cast (m_limb_type, n);
1709 44 : m_data[save_data_cnt + 1] = ext;
1710 : }
1711 : }
1712 : else
1713 : {
1714 350 : if (TYPE_UNSIGNED (rhs_type) && m_first)
1715 : {
1716 0 : handle_operand (rhs1, (bitint_big_endian
1717 0 : ? size_int (high - 1)
1718 : : size_zero_node));
1719 0 : m_data[save_data_cnt + 2]
1720 0 : = build_int_cst (NULL_TREE, m_data_cnt);
1721 : }
1722 : else
1723 350 : m_data_cnt = tree_to_uhwi (m_data[save_data_cnt + 2]);
1724 350 : if (TYPE_UNSIGNED (rhs_type))
1725 184 : t = build_zero_cst (m_limb_type);
1726 166 : else if (m_bb
1727 16 : && m_data[save_data_cnt]
1728 179 : && ((tidx & 1) == 0 || tidx != low + 1))
1729 : t = m_data[save_data_cnt];
1730 : else
1731 160 : t = m_data[save_data_cnt + 1];
1732 : }
1733 570 : tree type = limb_access_type (lhs_type, idx);
1734 570 : if (!useless_type_conversion_p (type, m_limb_type))
1735 291 : t = add_cast (type, t);
1736 570 : m_first = save_first;
1737 570 : return t;
1738 : }
1739 : }
1740 1580 : else if (TREE_CODE (lhs_type) == BITINT_TYPE
1741 1580 : && bitint_precision_kind (lhs_type) >= bitint_prec_large
1742 3160 : && INTEGRAL_TYPE_P (rhs_type))
1743 : {
1744 : /* Add support for 3 or more limbs filled in from normal integral
1745 : type if this assert fails. If no target chooses limb mode smaller
1746 : than half of largest supported normal integral type, this will not
1747 : be needed. */
1748 1580 : gcc_assert (TYPE_PRECISION (rhs_type) <= 2 * limb_prec);
1749 1580 : tree r1 = NULL_TREE, r2 = NULL_TREE, rext = NULL_TREE;
1750 1580 : if (m_first)
1751 : {
1752 577 : gimple_stmt_iterator save_gsi = m_gsi;
1753 577 : m_gsi = m_init_gsi;
1754 577 : if (gsi_end_p (m_gsi))
1755 57 : m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1756 : else
1757 520 : gsi_next (&m_gsi);
1758 577 : if (TREE_CODE (rhs_type) == BITINT_TYPE
1759 577 : && bitint_precision_kind (rhs_type) == bitint_prec_middle)
1760 : {
1761 63 : tree type = NULL_TREE;
1762 63 : rhs1 = maybe_cast_middle_bitint (&m_gsi, rhs1, type);
1763 63 : rhs_type = TREE_TYPE (rhs1);
1764 : }
1765 577 : r1 = rhs1;
1766 577 : if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
1767 509 : r1 = add_cast (m_limb_type, rhs1);
1768 577 : if (TYPE_PRECISION (rhs_type) > limb_prec)
1769 : {
1770 109 : g = gimple_build_assign (make_ssa_name (rhs_type),
1771 : RSHIFT_EXPR, rhs1,
1772 : build_int_cst (unsigned_type_node,
1773 109 : limb_prec));
1774 109 : insert_before (g);
1775 109 : r2 = add_cast (m_limb_type, gimple_assign_lhs (g));
1776 : }
1777 577 : if (TYPE_UNSIGNED (rhs_type))
1778 279 : rext = build_zero_cst (m_limb_type);
1779 : else
1780 : {
1781 298 : rext = add_cast (signed_type_for (m_limb_type), r2 ? r2 : r1);
1782 298 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (rext)),
1783 : RSHIFT_EXPR, rext,
1784 : build_int_cst (unsigned_type_node,
1785 298 : limb_prec - 1));
1786 298 : insert_before (g);
1787 298 : rext = add_cast (m_limb_type, gimple_assign_lhs (g));
1788 : }
1789 577 : m_init_gsi = m_gsi;
1790 577 : if (gsi_end_p (m_init_gsi))
1791 564 : m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1792 : else
1793 295 : gsi_prev (&m_init_gsi);
1794 577 : m_gsi = save_gsi;
1795 : }
1796 1580 : tree t;
1797 1580 : if (m_upwards_2limb)
1798 : {
1799 724 : if (m_first)
1800 : {
1801 280 : tree out1, out2;
1802 280 : prepare_data_in_out (r1, idx, &out1, rext);
1803 280 : if (TYPE_PRECISION (rhs_type) > limb_prec)
1804 : {
1805 70 : prepare_data_in_out (r2, idx, &out2, rext);
1806 70 : m_data.pop ();
1807 70 : t = m_data.pop ();
1808 70 : m_data[m_data_cnt + 1] = t;
1809 : }
1810 : else
1811 210 : m_data[m_data_cnt + 1] = rext;
1812 280 : m_data.safe_push (rext);
1813 280 : t = m_data[m_data_cnt];
1814 : }
1815 444 : else if (!tree_fits_uhwi_p (idx))
1816 280 : t = m_data[m_data_cnt + 1];
1817 : else
1818 : {
1819 164 : tree type = limb_access_type (lhs_type, idx);
1820 164 : t = m_data[m_data_cnt + 2];
1821 164 : if (!useless_type_conversion_p (type, m_limb_type))
1822 136 : t = add_cast (type, t);
1823 : }
1824 724 : m_data_cnt += 3;
1825 724 : return t;
1826 : }
1827 856 : else if (m_first)
1828 : {
1829 297 : m_data.safe_push (r1);
1830 297 : m_data.safe_push (r2);
1831 297 : m_data.safe_push (rext);
1832 : }
1833 856 : unsigned lcnt = CEIL ((unsigned) TYPE_PRECISION (lhs_type), limb_prec);
1834 856 : if (tree_fits_uhwi_p (idx))
1835 : {
1836 812 : tree type = limb_access_type (lhs_type, idx);
1837 812 : if (bitint_big_endian
1838 812 : ? tree_to_uhwi (idx) == lcnt - 1 : integer_zerop (idx))
1839 269 : t = m_data[m_data_cnt];
1840 543 : else if (TYPE_PRECISION (rhs_type) > limb_prec
1841 543 : && (bitint_big_endian
1842 72 : ? tree_to_uhwi (idx) == lcnt - 2
1843 72 : : integer_onep (idx)))
1844 33 : t = m_data[m_data_cnt + 1];
1845 : else
1846 510 : t = m_data[m_data_cnt + 2];
1847 812 : if (!useless_type_conversion_p (type, m_limb_type))
1848 250 : t = add_cast (type, t);
1849 812 : m_data_cnt += 3;
1850 812 : return t;
1851 : }
1852 44 : g = gimple_build_cond (NE_EXPR, idx,
1853 : bitint_big_endian
1854 0 : ? size_int (lcnt - 1) : size_zero_node,
1855 : NULL_TREE, NULL_TREE);
1856 44 : edge e2, e3, e4 = NULL;
1857 44 : if_then (g, profile_probability::likely (), e2, e3);
1858 44 : if (m_data[m_data_cnt + 1])
1859 : {
1860 14 : g = gimple_build_cond (EQ_EXPR, idx,
1861 : bitint_big_endian
1862 0 : ? size_int (lcnt - 2) : size_one_node,
1863 : NULL_TREE, NULL_TREE);
1864 14 : insert_before (g);
1865 14 : edge e5 = split_block (gsi_bb (m_gsi), g);
1866 14 : e4 = make_edge (e5->src, e2->dest, EDGE_TRUE_VALUE);
1867 14 : e2 = find_edge (e5->dest, e2->dest);
1868 14 : e4->probability = profile_probability::unlikely ();
1869 14 : e5->flags = EDGE_FALSE_VALUE;
1870 14 : e5->probability = e4->probability.invert ();
1871 : }
1872 44 : m_gsi = gsi_after_labels (e2->dest);
1873 44 : t = make_ssa_name (m_limb_type);
1874 44 : gphi *phi = create_phi_node (t, e2->dest);
1875 44 : add_phi_arg (phi, m_data[m_data_cnt + 2], e2, UNKNOWN_LOCATION);
1876 44 : add_phi_arg (phi, m_data[m_data_cnt], e3, UNKNOWN_LOCATION);
1877 44 : if (e4)
1878 14 : add_phi_arg (phi, m_data[m_data_cnt + 1], e4, UNKNOWN_LOCATION);
1879 44 : m_data_cnt += 3;
1880 44 : return t;
1881 : }
1882 : return NULL_TREE;
1883 : }
1884 :
1885 : /* Helper function for handle_stmt method, handle a BIT_FIELD_REF. */
1886 :
1887 : tree
1888 33 : bitint_large_huge::handle_bit_field_ref (tree op, tree idx)
1889 : {
1890 33 : if (tree_fits_uhwi_p (idx))
1891 : {
1892 21 : if (m_first)
1893 6 : m_data.safe_push (NULL);
1894 21 : ++m_data_cnt;
1895 21 : unsigned HOST_WIDE_INT sz = tree_to_uhwi (TYPE_SIZE (m_limb_type));
1896 21 : unsigned i = tree_to_uhwi (idx);
1897 21 : if (bitint_big_endian)
1898 0 : i = CEIL (TYPE_PRECISION (TREE_TYPE (op)), limb_prec) - 1 - i;
1899 42 : tree bfr = build3 (BIT_FIELD_REF, m_limb_type,
1900 21 : TREE_OPERAND (op, 0),
1901 21 : TYPE_SIZE (m_limb_type),
1902 21 : size_binop (PLUS_EXPR, TREE_OPERAND (op, 2),
1903 : bitsize_int (i * sz)));
1904 21 : tree r = make_ssa_name (m_limb_type);
1905 21 : gimple *g = gimple_build_assign (r, bfr);
1906 21 : insert_before (g);
1907 21 : tree type = limb_access_type (TREE_TYPE (op), idx);
1908 21 : if (!useless_type_conversion_p (type, m_limb_type))
1909 0 : r = add_cast (type, r);
1910 21 : return r;
1911 : }
1912 12 : tree var;
1913 12 : if (m_first)
1914 : {
1915 6 : unsigned HOST_WIDE_INT sz = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (op)));
1916 6 : machine_mode mode;
1917 6 : tree type, bfr;
1918 6 : if (bitwise_mode_for_size (sz).exists (&mode)
1919 2 : && known_eq (GET_MODE_BITSIZE (mode), sz))
1920 1 : type = bitwise_type_for_mode (mode);
1921 : else
1922 : {
1923 5 : mode = VOIDmode;
1924 5 : type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (op, 0)));
1925 : }
1926 6 : if (TYPE_ALIGN (type) < TYPE_ALIGN (TREE_TYPE (op)))
1927 0 : type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op)));
1928 6 : var = create_tmp_var (type);
1929 6 : TREE_ADDRESSABLE (var) = 1;
1930 6 : gimple *g;
1931 6 : if (mode != VOIDmode)
1932 : {
1933 1 : bfr = build3 (BIT_FIELD_REF, type, TREE_OPERAND (op, 0),
1934 1 : TYPE_SIZE (type), TREE_OPERAND (op, 2));
1935 1 : g = gimple_build_assign (make_ssa_name (type),
1936 : BIT_FIELD_REF, bfr);
1937 1 : gimple_set_location (g, m_loc);
1938 1 : gsi_insert_after (&m_init_gsi, g, GSI_NEW_STMT);
1939 1 : bfr = gimple_assign_lhs (g);
1940 : }
1941 : else
1942 5 : bfr = TREE_OPERAND (op, 0);
1943 6 : g = gimple_build_assign (var, bfr);
1944 6 : gimple_set_location (g, m_loc);
1945 6 : gsi_insert_after (&m_init_gsi, g, GSI_NEW_STMT);
1946 6 : if (mode == VOIDmode)
1947 : {
1948 5 : unsigned HOST_WIDE_INT nelts
1949 5 : = CEIL (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (op))), limb_prec);
1950 5 : tree atype = build_array_type_nelts (m_limb_type, nelts);
1951 5 : var = build2 (MEM_REF, atype, build_fold_addr_expr (var),
1952 : build_int_cst (build_pointer_type (type),
1953 5 : tree_to_uhwi (TREE_OPERAND (op, 2))
1954 5 : / BITS_PER_UNIT));
1955 : }
1956 6 : m_data.safe_push (var);
1957 : }
1958 : else
1959 6 : var = unshare_expr (m_data[m_data_cnt]);
1960 12 : ++m_data_cnt;
1961 12 : var = limb_access (TREE_TYPE (op), var, idx, false);
1962 12 : tree r = make_ssa_name (m_limb_type);
1963 12 : gimple *g = gimple_build_assign (r, var);
1964 12 : insert_before (g);
1965 12 : return r;
1966 : }
1967 :
1968 : /* Add a new EH edge from SRC to EH_EDGE->dest, where EH_EDGE
1969 : is an older EH edge, and except for virtual PHIs duplicate the
1970 : PHI argument from the EH_EDGE to the new EH edge. */
1971 :
1972 : static void
1973 20 : add_eh_edge (basic_block src, edge eh_edge)
1974 : {
1975 20 : edge e = make_edge (src, eh_edge->dest, EDGE_EH);
1976 20 : e->probability = profile_probability::very_unlikely ();
1977 20 : for (gphi_iterator gsi = gsi_start_phis (eh_edge->dest);
1978 27 : !gsi_end_p (gsi); gsi_next (&gsi))
1979 : {
1980 7 : gphi *phi = gsi.phi ();
1981 7 : tree lhs = gimple_phi_result (phi);
1982 14 : if (virtual_operand_p (lhs))
1983 4 : continue;
1984 3 : const phi_arg_d *arg = gimple_phi_arg (phi, eh_edge->dest_idx);
1985 3 : add_phi_arg (phi, arg->def, e, arg->locus);
1986 : }
1987 20 : }
1988 :
1989 : /* Helper function for handle_stmt method, handle a load from memory. */
1990 :
1991 : tree
1992 21290 : bitint_large_huge::handle_load (gimple *stmt, tree idx)
1993 : {
1994 21290 : tree rhs1 = gimple_assign_rhs1 (stmt);
1995 21290 : tree rhs_type = TREE_TYPE (rhs1);
1996 21290 : bool eh = stmt_ends_bb_p (stmt);
1997 21290 : bool load_bitfield_p = false;
1998 21290 : edge eh_edge = NULL;
1999 21290 : gimple *g;
2000 :
2001 21290 : if (TREE_CODE (rhs1) == BIT_FIELD_REF
2002 21290 : && integer_zerop (TREE_OPERAND (rhs1, 2)))
2003 2 : rhs1 = TREE_OPERAND (rhs1, 0);
2004 :
2005 21290 : if (eh)
2006 : {
2007 10 : edge_iterator ei;
2008 10 : basic_block bb = gimple_bb (stmt);
2009 :
2010 10 : FOR_EACH_EDGE (eh_edge, ei, bb->succs)
2011 10 : if (eh_edge->flags & EDGE_EH)
2012 : break;
2013 : }
2014 :
2015 21290 : if (TREE_CODE (rhs1) == COMPONENT_REF
2016 21290 : && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
2017 : {
2018 1213 : tree fld = TREE_OPERAND (rhs1, 1);
2019 : /* For little-endian, we can allow as inputs bit-fields
2020 : which start at a limb boundary. */
2021 1213 : gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
2022 1213 : if (!bitint_big_endian
2023 1213 : && DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
2024 2426 : && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % limb_prec) == 0)
2025 : {
2026 739 : load_bitfield_p = true;
2027 751 : goto normal_load;
2028 : }
2029 : /* Even if DECL_FIELD_BIT_OFFSET (fld) is a multiple of BITS_PER_UNIT,
2030 : handle it normally for now. */
2031 474 : if (!bitint_big_endian
2032 474 : && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % BITS_PER_UNIT) == 0)
2033 : {
2034 12 : load_bitfield_p = true;
2035 12 : goto normal_load;
2036 : }
2037 462 : tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
2038 462 : poly_int64 bitoffset;
2039 462 : poly_uint64 field_offset, repr_offset;
2040 462 : bool var_field_off = false;
2041 462 : if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
2042 924 : && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
2043 462 : bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
2044 : else
2045 : {
2046 : bitoffset = 0;
2047 : var_field_off = true;
2048 : }
2049 462 : bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
2050 462 : - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
2051 924 : tree nrhs1 = build3 (COMPONENT_REF, TREE_TYPE (repr),
2052 462 : TREE_OPERAND (rhs1, 0), repr,
2053 0 : var_field_off ? TREE_OPERAND (rhs1, 2) : NULL_TREE);
2054 462 : HOST_WIDE_INT bo = bitoffset.to_constant ();
2055 462 : unsigned bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
2056 462 : unsigned bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
2057 462 : unsigned bo_last = 0;
2058 462 : unsigned bo_shift = bo_bit;
2059 462 : unsigned nelts = CEIL (TYPE_PRECISION (rhs_type), limb_prec);
2060 462 : if (bitint_big_endian)
2061 : {
2062 0 : bo_last = CEIL (TYPE_PRECISION (rhs_type) + bo_bit, limb_prec) - 1;
2063 0 : bo_shift = (TYPE_PRECISION (rhs_type) + bo_bit) % limb_prec;
2064 0 : if (bo_shift)
2065 0 : bo_shift = limb_prec - bo_shift;
2066 : }
2067 462 : if (m_first)
2068 : {
2069 137 : if (m_upwards && bo_shift)
2070 : {
2071 134 : gimple_stmt_iterator save_gsi = m_gsi;
2072 134 : m_gsi = m_init_gsi;
2073 134 : if (gsi_end_p (m_gsi))
2074 53 : m_gsi = gsi_after_labels (gsi_bb (m_gsi));
2075 : else
2076 81 : gsi_next (&m_gsi);
2077 134 : tree t = limb_access (NULL_TREE, nrhs1,
2078 134 : size_int (bo_idx + bo_last), true);
2079 134 : tree iv = make_ssa_name (m_limb_type);
2080 134 : g = gimple_build_assign (iv, t);
2081 134 : insert_before (g);
2082 134 : if (eh)
2083 : {
2084 2 : maybe_duplicate_eh_stmt (g, stmt);
2085 2 : if (eh_edge)
2086 : {
2087 2 : edge e = split_block (gsi_bb (m_gsi), g);
2088 2 : add_eh_edge (e->src, eh_edge);
2089 2 : m_gsi = gsi_after_labels (e->dest);
2090 2 : if (gsi_bb (save_gsi) == e->src)
2091 : {
2092 1 : if (gsi_end_p (save_gsi))
2093 0 : save_gsi = gsi_end_bb (e->dest);
2094 : else
2095 1 : save_gsi = gsi_for_stmt (gsi_stmt (save_gsi));
2096 : }
2097 2 : if (m_preheader_bb == e->src)
2098 1 : m_preheader_bb = e->dest;
2099 : }
2100 : }
2101 134 : m_init_gsi = m_gsi;
2102 134 : if (gsi_end_p (m_init_gsi))
2103 218 : m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
2104 : else
2105 25 : gsi_prev (&m_init_gsi);
2106 134 : m_gsi = save_gsi;
2107 134 : tree out;
2108 134 : prepare_data_in_out (iv, idx, &out);
2109 134 : out = m_data[m_data_cnt];
2110 134 : m_data.safe_push (out);
2111 134 : }
2112 : else
2113 : {
2114 3 : m_data.safe_push (NULL_TREE);
2115 3 : m_data.safe_push (NULL_TREE);
2116 3 : m_data.safe_push (NULL_TREE);
2117 : }
2118 : }
2119 :
2120 462 : tree nidx0 = NULL_TREE, nidx1 = NULL_TREE;
2121 462 : tree iv = m_data[m_data_cnt];
2122 462 : if (m_cast_conditional && iv)
2123 : {
2124 12 : gcc_assert (!m_bitfld_load);
2125 12 : m_bitfld_load = m_data_cnt;
2126 : }
2127 462 : if (tree_fits_uhwi_p (idx))
2128 : {
2129 264 : unsigned prec = TYPE_PRECISION (rhs_type);
2130 264 : unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
2131 264 : if (bitint_big_endian)
2132 0 : i = nelts - 1 - i;
2133 264 : gcc_assert (i * limb_prec < prec);
2134 264 : if (bo_shift)
2135 264 : nidx1 = size_int (bo_idx + (bitint_big_endian
2136 : ? bo_last - i - 1 : i + 1));
2137 264 : if ((i + 1) * limb_prec > prec)
2138 : {
2139 96 : prec %= limb_prec;
2140 96 : if (prec + bo_bit <= (unsigned) limb_prec)
2141 264 : nidx1 = NULL_TREE;
2142 : }
2143 264 : if (!iv)
2144 4 : nidx0 = size_int (bo_idx + (bitint_big_endian ? bo_last - i : i));
2145 : }
2146 : else
2147 : {
2148 198 : HOST_WIDE_INT adj = bo_idx;
2149 198 : if (bitint_big_endian)
2150 0 : adj += (HOST_WIDE_INT) bo_last + 1 - nelts;
2151 198 : if (!iv)
2152 : {
2153 4 : if (adj == 0)
2154 : nidx0 = idx;
2155 : else
2156 : {
2157 0 : nidx0 = make_ssa_name (sizetype);
2158 0 : g = gimple_build_assign (nidx0, PLUS_EXPR, idx,
2159 0 : size_int (adj));
2160 0 : insert_before (g);
2161 : }
2162 : }
2163 198 : if (bo_shift)
2164 : {
2165 198 : if (bitint_big_endian && adj == 1)
2166 : nidx1 = idx;
2167 : else
2168 : {
2169 198 : nidx1 = make_ssa_name (sizetype);
2170 198 : g = gimple_build_assign (nidx1, PLUS_EXPR, idx,
2171 396 : size_int (adj + (bitint_big_endian
2172 : ? -1 : 1)));
2173 198 : insert_before (g);
2174 : }
2175 : }
2176 : }
2177 :
2178 664 : tree iv2 = NULL_TREE;
2179 202 : if (nidx0)
2180 : {
2181 8 : tree t = limb_access (NULL_TREE, nrhs1, nidx0, true);
2182 8 : iv = make_ssa_name (m_limb_type);
2183 8 : g = gimple_build_assign (iv, t);
2184 8 : insert_before (g);
2185 8 : if (eh)
2186 : {
2187 0 : maybe_duplicate_eh_stmt (g, stmt);
2188 0 : if (eh_edge)
2189 : {
2190 0 : edge e = split_block (gsi_bb (m_gsi), g);
2191 0 : m_gsi = gsi_after_labels (e->dest);
2192 0 : add_eh_edge (e->src, eh_edge);
2193 : }
2194 : }
2195 : }
2196 462 : if (nidx1)
2197 : {
2198 377 : bool conditional = m_var_msb && !tree_fits_uhwi_p (idx);
2199 377 : unsigned prec = TYPE_PRECISION (rhs_type);
2200 377 : if (conditional)
2201 : {
2202 3 : if ((prec % limb_prec) == 0
2203 3 : || ((prec % limb_prec) + bo_bit > (unsigned) limb_prec))
2204 374 : conditional = false;
2205 : }
2206 377 : edge edge_true = NULL, edge_false = NULL;
2207 377 : if (conditional)
2208 : {
2209 6 : g = gimple_build_cond (NE_EXPR, idx,
2210 : bitint_big_endian
2211 : ? size_zero_node
2212 3 : : size_int (prec / limb_prec),
2213 : NULL_TREE, NULL_TREE);
2214 3 : if_then (g, profile_probability::likely (),
2215 : edge_true, edge_false);
2216 : }
2217 377 : tree t = limb_access (NULL_TREE, nrhs1, nidx1, true);
2218 377 : if (m_upwards_2limb
2219 279 : && !m_first
2220 182 : && !m_bitfld_load
2221 176 : && !tree_fits_uhwi_p (idx))
2222 93 : iv2 = m_data[m_data_cnt + 1];
2223 : else
2224 284 : iv2 = make_ssa_name (m_limb_type);
2225 377 : g = gimple_build_assign (iv2, t);
2226 377 : insert_before (g);
2227 377 : if (eh)
2228 : {
2229 5 : maybe_duplicate_eh_stmt (g, stmt);
2230 5 : if (eh_edge)
2231 : {
2232 5 : edge e = split_block (gsi_bb (m_gsi), g);
2233 5 : m_gsi = gsi_after_labels (e->dest);
2234 5 : add_eh_edge (e->src, eh_edge);
2235 : }
2236 : }
2237 377 : if (conditional)
2238 : {
2239 3 : tree iv3 = make_ssa_name (m_limb_type);
2240 3 : if (eh)
2241 0 : edge_true = find_edge (gsi_bb (m_gsi), edge_false->dest);
2242 3 : gphi *phi = create_phi_node (iv3, edge_true->dest);
2243 3 : add_phi_arg (phi, iv2, edge_true, UNKNOWN_LOCATION);
2244 3 : add_phi_arg (phi, build_zero_cst (m_limb_type),
2245 : edge_false, UNKNOWN_LOCATION);
2246 3 : m_gsi = gsi_after_labels (edge_true->dest);
2247 3 : iv2 = iv3;
2248 : }
2249 : }
2250 462 : if (bo_shift)
2251 : {
2252 462 : g = gimple_build_assign (make_ssa_name (m_limb_type), RSHIFT_EXPR,
2253 : iv, build_int_cst (unsigned_type_node,
2254 462 : bo_shift));
2255 462 : insert_before (g);
2256 462 : iv = gimple_assign_lhs (g);
2257 : }
2258 462 : if (iv2)
2259 : {
2260 377 : g = gimple_build_assign (make_ssa_name (m_limb_type), LSHIFT_EXPR,
2261 : iv2, build_int_cst (unsigned_type_node,
2262 377 : limb_prec - bo_shift));
2263 377 : insert_before (g);
2264 377 : g = gimple_build_assign (make_ssa_name (m_limb_type), BIT_IOR_EXPR,
2265 : gimple_assign_lhs (g), iv);
2266 377 : insert_before (g);
2267 377 : iv = gimple_assign_lhs (g);
2268 377 : if (m_data[m_data_cnt])
2269 371 : m_data[m_data_cnt] = iv2;
2270 : }
2271 462 : if (tree_fits_uhwi_p (idx))
2272 : {
2273 264 : tree atype = limb_access_type (rhs_type, idx);
2274 264 : if (!useless_type_conversion_p (atype, TREE_TYPE (iv)))
2275 96 : iv = add_cast (atype, iv);
2276 : }
2277 462 : m_data_cnt += 3;
2278 462 : return iv;
2279 : }
2280 :
2281 20828 : normal_load:
2282 : /* Use write_p = true for loads with EH edges to make
2283 : sure limb_access doesn't add a cast as separate
2284 : statement after it. */
2285 20828 : rhs1 = limb_access (rhs_type, rhs1, idx, eh, !load_bitfield_p);
2286 20828 : tree ret = make_ssa_name (TREE_TYPE (rhs1));
2287 20828 : g = gimple_build_assign (ret, rhs1);
2288 20828 : insert_before (g);
2289 20828 : if (eh)
2290 : {
2291 3 : maybe_duplicate_eh_stmt (g, stmt);
2292 3 : if (eh_edge)
2293 : {
2294 3 : edge e = split_block (gsi_bb (m_gsi), g);
2295 3 : m_gsi = gsi_after_labels (e->dest);
2296 3 : add_eh_edge (e->src, eh_edge);
2297 : }
2298 3 : if (tree_fits_uhwi_p (idx))
2299 : {
2300 1 : tree atype = limb_access_type (rhs_type, idx);
2301 1 : if (!useless_type_conversion_p (atype, TREE_TYPE (rhs1)))
2302 1 : ret = add_cast (atype, ret);
2303 : }
2304 : }
2305 : return ret;
2306 : }
2307 :
2308 : /* Return a limb IDX from a mergeable statement STMT. */
2309 :
2310 : tree
2311 38808 : bitint_large_huge::handle_stmt (gimple *stmt, tree idx)
2312 : {
2313 38808 : tree lhs, rhs1, rhs2 = NULL_TREE;
2314 38808 : gimple *g;
2315 38808 : switch (gimple_code (stmt))
2316 : {
2317 38808 : case GIMPLE_ASSIGN:
2318 38808 : if (gimple_assign_load_p (stmt))
2319 21290 : return handle_load (stmt, idx);
2320 17518 : switch (gimple_assign_rhs_code (stmt))
2321 : {
2322 964 : case BIT_AND_EXPR:
2323 964 : case BIT_IOR_EXPR:
2324 964 : case BIT_XOR_EXPR:
2325 964 : rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
2326 : /* FALLTHRU */
2327 1360 : case BIT_NOT_EXPR:
2328 1360 : rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
2329 1360 : lhs = make_ssa_name (TREE_TYPE (rhs1));
2330 1360 : g = gimple_build_assign (lhs, gimple_assign_rhs_code (stmt),
2331 : rhs1, rhs2);
2332 1360 : insert_before (g);
2333 1360 : return lhs;
2334 3944 : case PLUS_EXPR:
2335 3944 : case MINUS_EXPR:
2336 3944 : rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
2337 3944 : rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
2338 3944 : return handle_plus_minus (gimple_assign_rhs_code (stmt),
2339 3944 : rhs1, rhs2, idx);
2340 123 : case NEGATE_EXPR:
2341 123 : rhs2 = handle_operand (gimple_assign_rhs1 (stmt), idx);
2342 123 : rhs1 = build_zero_cst (TREE_TYPE (rhs2));
2343 123 : return handle_plus_minus (MINUS_EXPR, rhs1, rhs2, idx);
2344 152 : case LSHIFT_EXPR:
2345 152 : return handle_lshift (handle_operand (gimple_assign_rhs1 (stmt),
2346 : idx),
2347 152 : gimple_assign_rhs2 (stmt), idx);
2348 5033 : case SSA_NAME:
2349 5033 : case PAREN_EXPR:
2350 5033 : case INTEGER_CST:
2351 5033 : return handle_operand (gimple_assign_rhs1 (stmt), idx);
2352 6865 : CASE_CONVERT:
2353 6865 : return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt)),
2354 6865 : gimple_assign_rhs1 (stmt), idx);
2355 8 : case VIEW_CONVERT_EXPR:
2356 8 : return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt)),
2357 8 : TREE_OPERAND (gimple_assign_rhs1 (stmt), 0),
2358 8 : idx);
2359 33 : case BIT_FIELD_REF:
2360 33 : return handle_bit_field_ref (gimple_assign_rhs1 (stmt), idx);
2361 : default:
2362 : break;
2363 : }
2364 : break;
2365 : default:
2366 : break;
2367 : }
2368 0 : gcc_unreachable ();
2369 : }
2370 :
2371 : /* Return minimum precision of OP at STMT.
2372 : Positive value is minimum precision above which all bits
2373 : are zero, negative means all bits above negation of the
2374 : value are copies of the sign bit. */
2375 :
2376 : static int
2377 8374 : range_to_prec (tree op, gimple *stmt)
2378 : {
2379 8374 : int_range_max r;
2380 8374 : wide_int w;
2381 8374 : tree type = TREE_TYPE (op);
2382 8374 : unsigned int prec = TYPE_PRECISION (type);
2383 :
2384 8374 : if (!optimize
2385 7392 : || !get_range_query (cfun)->range_of_expr (r, op, stmt)
2386 12070 : || r.undefined_p ())
2387 : {
2388 4679 : if (TYPE_UNSIGNED (type))
2389 1897 : return prec;
2390 : else
2391 2782 : return MIN ((int) -prec, -2);
2392 : }
2393 :
2394 3695 : if (!TYPE_UNSIGNED (TREE_TYPE (op)))
2395 : {
2396 2297 : w = r.lower_bound ();
2397 2297 : if (wi::neg_p (w))
2398 : {
2399 1897 : int min_prec1 = wi::min_precision (w, SIGNED);
2400 1897 : w = r.upper_bound ();
2401 1897 : int min_prec2 = wi::min_precision (w, SIGNED);
2402 1897 : int min_prec = MAX (min_prec1, min_prec2);
2403 1897 : return MIN (-min_prec, -2);
2404 : }
2405 : }
2406 :
2407 1798 : w = r.upper_bound ();
2408 1798 : int min_prec = wi::min_precision (w, UNSIGNED);
2409 1798 : return MAX (min_prec, 1);
2410 8374 : }
2411 :
2412 : /* Return address of the first limb of OP and write into *PREC
2413 : its precision. If positive, the operand is zero extended
2414 : from that precision, if it is negative, the operand is sign-extended
2415 : from -*PREC. If PREC_STORED is NULL, it is the toplevel call,
2416 : otherwise *PREC_STORED is prec from the innermost call without
2417 : range optimizations (0 for uninitialized SSA_NAME). */
2418 :
2419 : tree
2420 3782 : bitint_large_huge::handle_operand_addr (tree op, gimple *stmt,
2421 : int *prec_stored, int *prec)
2422 : {
2423 3782 : wide_int w;
2424 3782 : location_t loc_save = m_loc;
2425 3782 : tree ret = NULL_TREE;
2426 3782 : int precs = 0;
2427 3782 : if ((TREE_CODE (TREE_TYPE (op)) != BITINT_TYPE
2428 3768 : || bitint_precision_kind (TREE_TYPE (op)) < bitint_prec_large)
2429 3877 : && TREE_CODE (op) != INTEGER_CST)
2430 : {
2431 115 : do_int:
2432 115 : *prec = range_to_prec (op, stmt);
2433 115 : bitint_prec_kind kind = bitint_prec_small;
2434 115 : gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (op)));
2435 115 : if (TREE_CODE (TREE_TYPE (op)) == BITINT_TYPE)
2436 104 : kind = bitint_precision_kind (TREE_TYPE (op));
2437 104 : if (kind == bitint_prec_middle)
2438 : {
2439 12 : tree type = NULL_TREE;
2440 12 : op = maybe_cast_middle_bitint (&m_gsi, op, type);
2441 : }
2442 115 : tree op_type = TREE_TYPE (op);
2443 115 : unsigned HOST_WIDE_INT nelts
2444 115 : = CEIL (TYPE_PRECISION (op_type), limb_prec);
2445 : /* Add support for 3 or more limbs filled in from normal
2446 : integral type if this assert fails. If no target chooses
2447 : limb mode smaller than half of largest supported normal
2448 : integral type, this will not be needed. */
2449 115 : gcc_assert (nelts <= 2);
2450 115 : precs = (TYPE_UNSIGNED (op_type)
2451 115 : ? TYPE_PRECISION (op_type) : -TYPE_PRECISION (op_type));
2452 115 : if (*prec <= limb_prec && *prec >= -limb_prec)
2453 : {
2454 102 : nelts = 1;
2455 102 : if (TYPE_UNSIGNED (op_type))
2456 : {
2457 26 : if (precs > limb_prec)
2458 115 : precs = limb_prec;
2459 : }
2460 76 : else if (precs < -limb_prec)
2461 115 : precs = -limb_prec;
2462 : }
2463 115 : if (prec_stored)
2464 0 : *prec_stored = precs;
2465 115 : tree atype = build_array_type_nelts (m_limb_type, nelts);
2466 115 : tree var = create_tmp_var (atype);
2467 115 : tree t1 = op;
2468 115 : if (!useless_type_conversion_p (m_limb_type, op_type))
2469 115 : t1 = add_cast (m_limb_type, t1);
2470 115 : tree v = build4 (ARRAY_REF, m_limb_type, var,
2471 0 : bitint_big_endian && nelts > 1
2472 : ? size_one_node : size_zero_node,
2473 : NULL_TREE, NULL_TREE);
2474 115 : gimple *g = gimple_build_assign (v, t1);
2475 115 : insert_before (g);
2476 115 : if (nelts > 1)
2477 : {
2478 13 : tree lp = build_int_cst (unsigned_type_node, limb_prec);
2479 13 : g = gimple_build_assign (make_ssa_name (op_type),
2480 : RSHIFT_EXPR, op, lp);
2481 13 : insert_before (g);
2482 13 : tree t2 = gimple_assign_lhs (g);
2483 13 : t2 = add_cast (m_limb_type, t2);
2484 13 : v = build4 (ARRAY_REF, m_limb_type, var,
2485 : bitint_big_endian ? size_zero_node : size_one_node,
2486 : NULL_TREE, NULL_TREE);
2487 13 : g = gimple_build_assign (v, t2);
2488 13 : insert_before (g);
2489 : }
2490 115 : ret = build_fold_addr_expr (var);
2491 115 : if (!stmt_ends_bb_p (gsi_stmt (m_gsi)))
2492 : {
2493 114 : tree clobber = build_clobber (atype, CLOBBER_STORAGE_END);
2494 114 : g = gimple_build_assign (var, clobber);
2495 114 : gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
2496 : }
2497 115 : m_loc = loc_save;
2498 115 : goto do_ret;
2499 : }
2500 3709 : switch (TREE_CODE (op))
2501 : {
2502 2877 : case SSA_NAME:
2503 2877 : if (m_names == NULL
2504 2877 : || !bitmap_bit_p (m_names, SSA_NAME_VERSION (op)))
2505 : {
2506 96 : gimple *g = SSA_NAME_DEF_STMT (op);
2507 96 : m_loc = gimple_location (g);
2508 96 : if (gimple_assign_load_p (g))
2509 : {
2510 36 : *prec = range_to_prec (op, NULL);
2511 36 : precs = (TYPE_UNSIGNED (TREE_TYPE (op))
2512 36 : ? TYPE_PRECISION (TREE_TYPE (op))
2513 25 : : -TYPE_PRECISION (TREE_TYPE (op)));
2514 36 : if (prec_stored)
2515 6 : *prec_stored = precs;
2516 36 : ret = build_fold_addr_expr (gimple_assign_rhs1 (g));
2517 36 : ret = force_gimple_operand_gsi (&m_gsi, ret, true,
2518 : NULL_TREE, true, GSI_SAME_STMT);
2519 : }
2520 60 : else if (gimple_code (g) == GIMPLE_NOP)
2521 : {
2522 2 : *prec = TYPE_UNSIGNED (TREE_TYPE (op)) ? limb_prec : -limb_prec;
2523 2 : precs = *prec;
2524 2 : if (prec_stored)
2525 1 : *prec_stored = 0;
2526 2 : tree var = create_tmp_var (m_limb_type);
2527 2 : TREE_ADDRESSABLE (var) = 1;
2528 2 : ret = build_fold_addr_expr (var);
2529 2 : if (!stmt_ends_bb_p (gsi_stmt (m_gsi)))
2530 : {
2531 2 : tree clobber = build_clobber (m_limb_type,
2532 : CLOBBER_STORAGE_END);
2533 2 : g = gimple_build_assign (var, clobber);
2534 2 : gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
2535 : }
2536 : }
2537 : else
2538 : {
2539 58 : gcc_assert (gimple_assign_cast_p (g));
2540 58 : tree rhs1 = gimple_assign_rhs1 (g);
2541 58 : bitint_prec_kind kind = bitint_prec_small;
2542 58 : if (TREE_CODE (rhs1) == VIEW_CONVERT_EXPR)
2543 1 : rhs1 = TREE_OPERAND (rhs1, 0);
2544 58 : gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)));
2545 58 : if (TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE)
2546 49 : kind = bitint_precision_kind (TREE_TYPE (rhs1));
2547 49 : if (kind >= bitint_prec_large)
2548 : {
2549 16 : tree lhs_type = TREE_TYPE (op);
2550 16 : tree rhs_type = TREE_TYPE (rhs1);
2551 16 : int prec_stored_val = 0;
2552 16 : ret = handle_operand_addr (rhs1, g, &prec_stored_val, prec);
2553 16 : precs = prec_stored_val;
2554 16 : if (prec_stored)
2555 0 : *prec_stored = prec_stored_val;
2556 16 : if (precs == 0)
2557 : {
2558 1 : gcc_assert (*prec == limb_prec || *prec == -limb_prec);
2559 : precs = *prec;
2560 : }
2561 16 : if (TYPE_PRECISION (lhs_type) > TYPE_PRECISION (rhs_type))
2562 : {
2563 4 : if (TYPE_UNSIGNED (lhs_type)
2564 4 : && !TYPE_UNSIGNED (rhs_type))
2565 1 : gcc_assert (*prec >= 0 || prec_stored == NULL);
2566 : }
2567 : else
2568 : {
2569 12 : if (prec_stored_val == 0)
2570 : /* Non-widening cast of uninitialized value. */;
2571 11 : else if (*prec > 0 && *prec < TYPE_PRECISION (lhs_type))
2572 : ;
2573 11 : else if (TYPE_UNSIGNED (lhs_type))
2574 : {
2575 8 : gcc_assert (*prec > 0
2576 : || prec_stored_val > 0
2577 : || (-prec_stored_val
2578 : >= TYPE_PRECISION (lhs_type)));
2579 8 : *prec = TYPE_PRECISION (lhs_type);
2580 : }
2581 3 : else if (*prec < 0 && -*prec < TYPE_PRECISION (lhs_type))
2582 : ;
2583 : else
2584 3 : *prec = -TYPE_PRECISION (lhs_type);
2585 : }
2586 : }
2587 : else
2588 : {
2589 42 : op = rhs1;
2590 42 : stmt = g;
2591 42 : goto do_int;
2592 : }
2593 : }
2594 54 : m_loc = loc_save;
2595 54 : goto do_ret;
2596 : }
2597 : else
2598 : {
2599 2781 : int p = var_to_partition (m_map, op);
2600 2781 : gcc_assert (m_vars[p] != NULL_TREE);
2601 2781 : *prec = range_to_prec (op, stmt);
2602 2781 : precs = (TYPE_UNSIGNED (TREE_TYPE (op))
2603 2781 : ? TYPE_PRECISION (TREE_TYPE (op))
2604 1652 : : -TYPE_PRECISION (TREE_TYPE (op)));
2605 2781 : if (prec_stored)
2606 9 : *prec_stored = precs;
2607 2781 : ret = build_fold_addr_expr (m_vars[p]);
2608 2781 : goto do_ret;
2609 : }
2610 832 : case INTEGER_CST:
2611 832 : unsigned int min_prec, mp;
2612 832 : tree type;
2613 832 : w = wi::to_wide (op);
2614 832 : if (tree_int_cst_sgn (op) >= 0)
2615 : {
2616 617 : min_prec = wi::min_precision (w, UNSIGNED);
2617 617 : *prec = MAX (min_prec, 1);
2618 : }
2619 : else
2620 : {
2621 215 : min_prec = wi::min_precision (w, SIGNED);
2622 215 : *prec = MIN ((int) -min_prec, -2);
2623 : }
2624 832 : mp = CEIL (min_prec, limb_prec) * limb_prec;
2625 832 : if (mp == 0)
2626 : mp = 1;
2627 832 : if (mp >= (unsigned) TYPE_PRECISION (TREE_TYPE (op))
2628 832 : && (TREE_CODE (TREE_TYPE (op)) == BITINT_TYPE
2629 12 : || TYPE_PRECISION (TREE_TYPE (op)) <= limb_prec))
2630 295 : type = TREE_TYPE (op);
2631 : else
2632 537 : type = build_bitint_type (mp, 1);
2633 832 : if (TREE_CODE (type) != BITINT_TYPE
2634 832 : || bitint_precision_kind (type) == bitint_prec_small)
2635 : {
2636 562 : if (TYPE_PRECISION (type) <= limb_prec)
2637 562 : type = m_limb_type;
2638 : else
2639 : {
2640 0 : while (bitint_precision_kind (mp) == bitint_prec_small)
2641 0 : mp += limb_prec;
2642 : /* This case is for targets which e.g. have 64-bit
2643 : limb but categorize up to 128-bits _BitInts as
2644 : small. We could use type of m_limb_type[2] and
2645 : similar instead to save space. */
2646 0 : type = build_bitint_type (mp, 1);
2647 : }
2648 : }
2649 832 : if (tree_int_cst_sgn (op) >= 0)
2650 617 : precs = MAX (TYPE_PRECISION (type), 1);
2651 : else
2652 215 : precs = MIN ((int) -TYPE_PRECISION (type), -2);
2653 832 : if (prec_stored)
2654 0 : *prec_stored = precs;
2655 832 : op = tree_output_constant_def (fold_convert (type, op));
2656 832 : ret = build_fold_addr_expr (op);
2657 832 : goto do_ret;
2658 0 : default:
2659 0 : gcc_unreachable ();
2660 : }
2661 3782 : do_ret:
2662 3782 : if (bitint_big_endian && prec_stored == NULL)
2663 : {
2664 0 : int p1 = *prec < 0 ? -*prec : *prec;
2665 0 : int p2 = precs < 0 ? -precs : precs;
2666 0 : int c1 = CEIL (p1, limb_prec);
2667 0 : int c2 = CEIL (p2, limb_prec);
2668 0 : gcc_assert (c1 <= c2);
2669 0 : if (c1 != c2)
2670 : {
2671 0 : gimple *g
2672 0 : = gimple_build_assign (make_ssa_name (TREE_TYPE (ret)),
2673 : POINTER_PLUS_EXPR, ret,
2674 0 : size_int ((c2 - c1) * m_limb_size));
2675 0 : insert_before (g);
2676 0 : ret = gimple_assign_lhs (g);
2677 : }
2678 : }
2679 3782 : return ret;
2680 3782 : }
2681 :
2682 : /* Helper function, create a loop before the current location,
2683 : start with sizetype INIT value from the preheader edge. Return
2684 : a PHI result and set *IDX_NEXT to SSA_NAME it creates and uses
2685 : from the latch edge. */
2686 :
2687 : tree
2688 14497 : bitint_large_huge::create_loop (tree init, tree *idx_next)
2689 : {
2690 14497 : if (!gsi_end_p (m_gsi))
2691 12339 : gsi_prev (&m_gsi);
2692 : else
2693 4316 : m_gsi = gsi_last_bb (gsi_bb (m_gsi));
2694 14497 : edge e1 = split_block (gsi_bb (m_gsi), gsi_stmt (m_gsi));
2695 14497 : edge e2 = split_block (e1->dest, (gimple *) NULL);
2696 14497 : edge e3 = make_edge (e1->dest, e1->dest, EDGE_TRUE_VALUE);
2697 14497 : e3->probability = profile_probability::very_unlikely ();
2698 14497 : e2->flags = EDGE_FALSE_VALUE;
2699 14497 : e2->probability = e3->probability.invert ();
2700 14497 : tree idx = make_ssa_name (sizetype);
2701 14497 : gphi *phi = create_phi_node (idx, e1->dest);
2702 14497 : add_phi_arg (phi, init, e1, UNKNOWN_LOCATION);
2703 14497 : *idx_next = make_ssa_name (sizetype);
2704 14497 : add_phi_arg (phi, *idx_next, e3, UNKNOWN_LOCATION);
2705 14497 : m_gsi = gsi_after_labels (e1->dest);
2706 14497 : m_bb = e1->dest;
2707 14497 : m_preheader_bb = e1->src;
2708 14497 : class loop *loop = alloc_loop ();
2709 14497 : loop->header = e1->dest;
2710 14497 : add_loop (loop, e1->src->loop_father);
2711 14497 : return idx;
2712 : }
2713 :
2714 : /* Lower large/huge _BitInt statement mergeable or similar STMT which can be
2715 : lowered using iteration from the least significant limb up to the most
2716 : significant limb. For large _BitInt it is emitted as straight line code
2717 : before current location, for huge _BitInt as a loop handling two limbs
2718 : at once, followed by handling up to limbs in straight line code (at most
2719 : one full and one partial limb). It can also handle EQ_EXPR/NE_EXPR
2720 : comparisons, in that case CMP_CODE should be the comparison code and
2721 : CMP_OP1/CMP_OP2 the comparison operands. */
2722 :
2723 : tree
2724 23212 : bitint_large_huge::lower_mergeable_stmt (gimple *stmt, tree_code &cmp_code,
2725 : tree cmp_op1, tree cmp_op2)
2726 : {
2727 23212 : bool eq_p = cmp_code != ERROR_MARK;
2728 23212 : tree type;
2729 23212 : if (eq_p)
2730 6501 : type = TREE_TYPE (cmp_op1);
2731 : else
2732 16711 : type = TREE_TYPE (gimple_assign_lhs (stmt));
2733 23212 : gcc_assert (TREE_CODE (type) == BITINT_TYPE);
2734 23212 : bitint_prec_kind kind = bitint_precision_kind (type);
2735 23212 : gcc_assert (kind >= bitint_prec_large);
2736 23212 : gimple *g;
2737 23212 : tree lhs = gimple_get_lhs (stmt);
2738 23212 : tree rhs1, lhs_type = lhs ? TREE_TYPE (lhs) : NULL_TREE;
2739 17143 : if (lhs
2740 17143 : && TREE_CODE (lhs) == SSA_NAME
2741 8686 : && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
2742 8254 : && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
2743 : {
2744 8254 : int p = var_to_partition (m_map, lhs);
2745 8254 : gcc_assert (m_vars[p] != NULL_TREE);
2746 8254 : m_lhs = lhs = m_vars[p];
2747 : }
2748 23212 : unsigned cnt, rem = 0, end = 0, prec = TYPE_PRECISION (type);
2749 23212 : bool sext = false;
2750 23212 : tree ext = NULL_TREE, store_operand = NULL_TREE;
2751 23212 : bool eh = false;
2752 23212 : basic_block eh_pad = NULL;
2753 23212 : tree nlhs = NULL_TREE;
2754 23212 : unsigned HOST_WIDE_INT bo_idx = 0;
2755 23212 : unsigned HOST_WIDE_INT bo_bit = 0;
2756 23212 : unsigned bo_shift = 0;
2757 23212 : unsigned bo_last = 0;
2758 23212 : bool bo_be_p = false;
2759 23212 : tree bf_cur = NULL_TREE, bf_next = NULL_TREE;
2760 23212 : if (gimple_store_p (stmt))
2761 : {
2762 8457 : store_operand = gimple_assign_rhs1 (stmt);
2763 8457 : eh = stmt_ends_bb_p (stmt);
2764 8457 : if (eh)
2765 : {
2766 2 : edge e;
2767 2 : edge_iterator ei;
2768 2 : basic_block bb = gimple_bb (stmt);
2769 :
2770 2 : FOR_EACH_EDGE (e, ei, bb->succs)
2771 2 : if (e->flags & EDGE_EH)
2772 : {
2773 2 : eh_pad = e->dest;
2774 2 : break;
2775 : }
2776 : }
2777 8457 : if (TREE_CODE (lhs) == COMPONENT_REF
2778 8457 : && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
2779 : {
2780 158 : tree fld = TREE_OPERAND (lhs, 1);
2781 158 : gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
2782 158 : tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
2783 158 : poly_int64 bitoffset;
2784 158 : poly_uint64 field_offset, repr_offset;
2785 158 : if (!bitint_big_endian
2786 158 : && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
2787 158 : % BITS_PER_UNIT) == 0)
2788 : nlhs = lhs;
2789 : else
2790 : {
2791 139 : bool var_field_off = false;
2792 139 : if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
2793 278 : && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
2794 139 : bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
2795 : else
2796 : {
2797 : bitoffset = 0;
2798 : var_field_off = true;
2799 : }
2800 139 : bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
2801 139 : - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
2802 278 : nlhs = build3 (COMPONENT_REF, TREE_TYPE (repr),
2803 139 : TREE_OPERAND (lhs, 0), repr,
2804 : var_field_off
2805 0 : ? TREE_OPERAND (lhs, 2) : NULL_TREE);
2806 139 : HOST_WIDE_INT bo = bitoffset.to_constant ();
2807 139 : bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
2808 139 : bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
2809 139 : bo_shift = bo_bit;
2810 139 : if (bitint_big_endian)
2811 : {
2812 0 : bo_last = CEIL (prec + bo_bit, limb_prec) - 1;
2813 0 : bo_shift = (prec + bo_bit) % limb_prec;
2814 0 : bo_be_p = true;
2815 0 : if (bo_shift)
2816 0 : bo_shift = limb_prec - bo_shift;
2817 : }
2818 : }
2819 : }
2820 : }
2821 8457 : if ((store_operand
2822 8457 : && TREE_CODE (store_operand) == SSA_NAME
2823 7485 : && (m_names == NULL
2824 7467 : || !bitmap_bit_p (m_names, SSA_NAME_VERSION (store_operand)))
2825 1125 : && gimple_assign_cast_p (SSA_NAME_DEF_STMT (store_operand)))
2826 31123 : || gimple_assign_cast_p (stmt))
2827 : {
2828 2134 : rhs1 = gimple_assign_rhs1 (store_operand
2829 546 : ? SSA_NAME_DEF_STMT (store_operand)
2830 : : stmt);
2831 1588 : if (TREE_CODE (rhs1) == VIEW_CONVERT_EXPR)
2832 2 : rhs1 = TREE_OPERAND (rhs1, 0);
2833 : /* Optimize mergeable ops ending with widening cast to _BitInt
2834 : (or followed by store). We can lower just the limbs of the
2835 : cast operand and widen afterwards. */
2836 1588 : if (TREE_CODE (rhs1) == SSA_NAME
2837 1588 : && (m_names == NULL
2838 1581 : || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1)))
2839 609 : && TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE
2840 471 : && bitint_precision_kind (TREE_TYPE (rhs1)) >= bitint_prec_large
2841 2012 : && (CEIL ((unsigned) TYPE_PRECISION (TREE_TYPE (rhs1)),
2842 424 : limb_prec) < CEIL (prec, limb_prec)
2843 308 : || (kind == bitint_prec_huge
2844 262 : && TYPE_PRECISION (TREE_TYPE (rhs1)) < prec)))
2845 : {
2846 122 : store_operand = rhs1;
2847 122 : prec = TYPE_PRECISION (TREE_TYPE (rhs1));
2848 122 : kind = bitint_precision_kind (TREE_TYPE (rhs1));
2849 122 : if (!TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2850 46 : sext = true;
2851 : }
2852 : }
2853 23212 : tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
2854 23212 : if (kind == bitint_prec_large)
2855 12165 : cnt = CEIL (prec, limb_prec);
2856 : else
2857 : {
2858 11047 : rem = (prec % (2 * limb_prec));
2859 11047 : end = (prec - rem) / limb_prec;
2860 11047 : cnt = 2 + CEIL (rem, limb_prec);
2861 11047 : idx = idx_first = create_loop (bitint_big_endian
2862 11047 : ? size_int (cnt - 2 + end - 1)
2863 : : size_zero_node, &idx_next);
2864 : }
2865 :
2866 23212 : basic_block edge_bb = NULL;
2867 23212 : if (eq_p)
2868 : {
2869 6501 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2870 6501 : gsi_prev (&gsi);
2871 6501 : edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
2872 6501 : edge_bb = e->src;
2873 6501 : if (kind == bitint_prec_large)
2874 7156 : m_gsi = gsi_end_bb (edge_bb);
2875 : }
2876 : else
2877 16711 : m_after_stmt = stmt;
2878 23212 : if (kind != bitint_prec_large)
2879 11047 : m_upwards_2limb = end;
2880 23212 : m_upwards = true;
2881 :
2882 23212 : bool separate_ext
2883 23212 : = (prec != (unsigned) TYPE_PRECISION (type)
2884 23212 : && (CEIL ((unsigned) TYPE_PRECISION (type), limb_prec)
2885 122 : > CEIL (prec, limb_prec)));
2886 23212 : bool zero_ms_limb = false;
2887 23212 : if (bitint_extended == bitint_ext_full
2888 0 : && !eq_p
2889 0 : && !nlhs
2890 0 : && abi_limb_prec > limb_prec
2891 23212 : && ((CEIL ((unsigned) TYPE_PRECISION (type), abi_limb_prec)
2892 0 : * abi_limb_prec / limb_prec)
2893 0 : > CEIL ((unsigned) TYPE_PRECISION (type), limb_prec)))
2894 : {
2895 0 : if (prec == (unsigned) TYPE_PRECISION (type))
2896 : {
2897 0 : sext = !TYPE_UNSIGNED (type);
2898 0 : separate_ext = true;
2899 : }
2900 0 : else if (TYPE_UNSIGNED (type) && sext)
2901 : zero_ms_limb = true;
2902 : else
2903 : separate_ext = true;
2904 : }
2905 23328 : unsigned dst_idx_off = 0;
2906 23212 : if (separate_ext && bitint_big_endian)
2907 0 : dst_idx_off = (CEIL ((unsigned) TYPE_PRECISION (type), limb_prec)
2908 0 : - CEIL (prec, limb_prec));
2909 :
2910 93086 : for (unsigned i = 0; i < cnt; i++)
2911 : {
2912 69874 : m_data_cnt = 0;
2913 69874 : if (kind == bitint_prec_large)
2914 37106 : idx = size_int (bitint_big_endian ? cnt - 1 - i : i);
2915 32768 : else if (i >= 2)
2916 10674 : idx = size_int (bitint_big_endian ? cnt - 1 - i : end + (i > 2));
2917 69874 : if (eq_p)
2918 : {
2919 19690 : rhs1 = handle_operand (cmp_op1, idx);
2920 19690 : tree rhs2 = handle_operand (cmp_op2, idx);
2921 19690 : g = gimple_build_cond (NE_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2922 19690 : insert_before (g);
2923 19690 : edge e1 = split_block (gsi_bb (m_gsi), g);
2924 19690 : e1->flags = EDGE_FALSE_VALUE;
2925 19690 : edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2926 19690 : e1->probability = profile_probability::unlikely ();
2927 19690 : e2->probability = e1->probability.invert ();
2928 19690 : if (i == 0)
2929 6501 : set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
2930 19690 : m_gsi = gsi_after_labels (e1->dest);
2931 : }
2932 : else
2933 : {
2934 50184 : if (store_operand)
2935 25604 : rhs1 = handle_operand (store_operand, idx);
2936 : else
2937 24580 : rhs1 = handle_stmt (stmt, idx);
2938 50184 : if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
2939 11961 : rhs1 = add_cast (m_limb_type, rhs1);
2940 50184 : if (sext && i == cnt - 1)
2941 50184 : ext = rhs1;
2942 50184 : tree nidx = idx;
2943 50184 : HOST_WIDE_INT adj = bo_idx;
2944 50184 : if (bo_be_p)
2945 0 : adj += bo_last - (CEIL (prec, limb_prec) - 1);
2946 : else
2947 50184 : adj += dst_idx_off;
2948 50184 : if (adj)
2949 : {
2950 208 : if (tree_fits_uhwi_p (idx))
2951 108 : nidx = size_int (tree_to_uhwi (idx) + adj);
2952 : else
2953 : {
2954 100 : nidx = make_ssa_name (sizetype);
2955 100 : g = gimple_build_assign (nidx, PLUS_EXPR, idx,
2956 100 : size_int (adj));
2957 100 : insert_before (g);
2958 : }
2959 : }
2960 50184 : bool done = false;
2961 50184 : basic_block new_bb = NULL;
2962 : /* Handle stores into bit-fields. */
2963 50184 : if (bo_shift)
2964 : {
2965 443 : if (i == 0)
2966 : {
2967 139 : edge e2 = NULL;
2968 139 : if (kind != bitint_prec_large)
2969 : {
2970 110 : prepare_data_in_out (build_zero_cst (m_limb_type),
2971 : idx, &bf_next);
2972 110 : bf_next = m_data.pop ();
2973 110 : bf_cur = m_data.pop ();
2974 110 : g = gimple_build_cond (EQ_EXPR, idx,
2975 : bitint_big_endian
2976 0 : ? size_int (CEIL (prec,
2977 : limb_prec) - 1)
2978 : : size_zero_node,
2979 : NULL_TREE, NULL_TREE);
2980 110 : edge edge_true;
2981 110 : if_then_else (g, profile_probability::unlikely (),
2982 : edge_true, e2);
2983 110 : new_bb = e2->dest;
2984 : }
2985 139 : tree ftype
2986 139 : = build_nonstandard_integer_type (limb_prec - bo_shift, 1);
2987 278 : tree bfr = build_bit_field_ref (ftype, unshare_expr (nlhs),
2988 139 : limb_prec - bo_shift,
2989 : bitint_big_endian
2990 0 : ? (bo_idx + bo_last)
2991 0 : * limb_prec
2992 139 : : bo_idx * limb_prec
2993 : + bo_bit);
2994 139 : tree t = add_cast (ftype, rhs1);
2995 139 : g = gimple_build_assign (bfr, t);
2996 139 : insert_before (g);
2997 139 : if (eh)
2998 : {
2999 0 : maybe_duplicate_eh_stmt (g, stmt);
3000 0 : if (eh_pad)
3001 : {
3002 0 : edge e = split_block (gsi_bb (m_gsi), g);
3003 0 : m_gsi = gsi_after_labels (e->dest);
3004 0 : add_eh_edge (e->src,
3005 : find_edge (gimple_bb (stmt), eh_pad));
3006 : }
3007 : }
3008 139 : if (kind == bitint_prec_large)
3009 : {
3010 : bf_cur = rhs1;
3011 : done = true;
3012 : }
3013 110 : else if (e2)
3014 110 : m_gsi = gsi_after_labels (e2->src);
3015 : }
3016 110 : if (!done)
3017 : {
3018 414 : tree t1 = make_ssa_name (m_limb_type);
3019 414 : tree t2 = make_ssa_name (m_limb_type);
3020 414 : tree t3 = make_ssa_name (m_limb_type);
3021 414 : g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
3022 : build_int_cst (unsigned_type_node,
3023 414 : limb_prec
3024 414 : - bo_shift));
3025 414 : insert_before (g);
3026 414 : g = gimple_build_assign (t2, LSHIFT_EXPR, rhs1,
3027 : build_int_cst (unsigned_type_node,
3028 414 : bo_shift));
3029 414 : insert_before (g);
3030 414 : bf_cur = rhs1;
3031 414 : g = gimple_build_assign (t3, BIT_IOR_EXPR, t1, t2);
3032 414 : insert_before (g);
3033 414 : rhs1 = t3;
3034 414 : if (bf_next && i == 1)
3035 : {
3036 110 : g = gimple_build_assign (bf_next, bf_cur);
3037 110 : insert_before (g);
3038 : }
3039 : }
3040 : }
3041 29 : if (!done)
3042 : {
3043 : /* Handle bit-field access to partial last limb if needed. */
3044 50155 : if (nlhs
3045 470 : && i == cnt - 1
3046 158 : && !separate_ext
3047 134 : && tree_fits_uhwi_p (idx))
3048 : {
3049 119 : unsigned int tprec = TYPE_PRECISION (type);
3050 119 : unsigned int rprec = (tprec - 1) % limb_prec + 1;
3051 119 : if (rprec + bo_shift < (unsigned) limb_prec)
3052 : {
3053 42 : tree ftype
3054 42 : = build_nonstandard_integer_type (rprec + bo_shift, 1);
3055 42 : tree bfr
3056 84 : = build_bit_field_ref (ftype, unshare_expr (nlhs),
3057 : rprec + bo_shift,
3058 : bitint_big_endian
3059 0 : ? bo_idx * limb_prec + bo_bit
3060 42 : : (bo_idx + tprec / limb_prec)
3061 42 : * limb_prec);
3062 42 : tree t = add_cast (ftype, rhs1);
3063 42 : g = gimple_build_assign (bfr, t);
3064 42 : done = true;
3065 42 : bf_cur = NULL_TREE;
3066 : }
3067 77 : else if (rprec + bo_shift == (unsigned) limb_prec)
3068 50113 : bf_cur = NULL_TREE;
3069 : }
3070 : /* Otherwise, stores to any other lhs. */
3071 42 : if (!done)
3072 : {
3073 99798 : tree l = limb_access (nlhs ? NULL_TREE : lhs_type,
3074 : nlhs ? nlhs : lhs, nidx, true);
3075 50113 : g = gimple_build_assign (l, rhs1);
3076 : }
3077 50155 : insert_before (g);
3078 50155 : if (eh)
3079 : {
3080 6 : maybe_duplicate_eh_stmt (g, stmt);
3081 6 : if (eh_pad)
3082 : {
3083 6 : edge e = split_block (gsi_bb (m_gsi), g);
3084 6 : m_gsi = gsi_after_labels (e->dest);
3085 6 : add_eh_edge (e->src,
3086 : find_edge (gimple_bb (stmt), eh_pad));
3087 : }
3088 : }
3089 50155 : if (new_bb)
3090 110 : m_gsi = gsi_after_labels (new_bb);
3091 : }
3092 : }
3093 69874 : m_first = false;
3094 69874 : if (kind == bitint_prec_huge && i <= 1)
3095 : {
3096 22094 : if (i == 0)
3097 : {
3098 11047 : idx = make_ssa_name (sizetype);
3099 11047 : g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
3100 : bitint_big_endian
3101 0 : ? size_int (-1) : size_one_node);
3102 11047 : insert_before (g);
3103 : }
3104 : else
3105 : {
3106 11047 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
3107 22094 : size_int (bitint_big_endian ? -2 : 2));
3108 11047 : insert_before (g);
3109 11047 : if (bitint_big_endian)
3110 0 : g = gimple_build_cond (NE_EXPR, idx_first, size_int (cnt - 1),
3111 : NULL_TREE, NULL_TREE);
3112 : else
3113 11047 : g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
3114 : NULL_TREE, NULL_TREE);
3115 11047 : insert_before (g);
3116 11047 : if (eq_p)
3117 2923 : m_gsi = gsi_after_labels (edge_bb);
3118 : else
3119 8124 : m_gsi = gsi_for_stmt (stmt);
3120 11047 : m_bb = NULL;
3121 : }
3122 : }
3123 : }
3124 :
3125 23212 : if (separate_ext)
3126 : {
3127 116 : if (sext)
3128 : {
3129 43 : ext = add_cast (signed_type_for (m_limb_type), ext);
3130 86 : tree lpm1 = build_int_cst (unsigned_type_node,
3131 43 : limb_prec - 1);
3132 43 : tree n = make_ssa_name (TREE_TYPE (ext));
3133 43 : g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
3134 43 : insert_before (g);
3135 43 : ext = add_cast (m_limb_type, n);
3136 : }
3137 : else
3138 73 : ext = build_zero_cst (m_limb_type);
3139 116 : kind = bitint_precision_kind (type);
3140 116 : unsigned start = CEIL (prec, limb_prec);
3141 116 : prec = TYPE_PRECISION (type);
3142 116 : if (bitint_extended == bitint_ext_full
3143 0 : && !nlhs
3144 0 : && !zero_ms_limb
3145 0 : && abi_limb_prec > limb_prec)
3146 : {
3147 0 : prec = CEIL (prec, abi_limb_prec) * abi_limb_prec;
3148 0 : kind = bitint_precision_kind (prec);
3149 : }
3150 116 : unsigned total = CEIL (prec, limb_prec);
3151 116 : idx = idx_first = idx_next = NULL_TREE;
3152 116 : if (prec <= (start + 2 + (bo_shift != 0)) * limb_prec)
3153 : kind = bitint_prec_large;
3154 78 : if (kind == bitint_prec_large)
3155 38 : cnt = total - start;
3156 : else
3157 : {
3158 78 : rem = prec % limb_prec;
3159 78 : end = (prec - rem) / limb_prec;
3160 148 : cnt = (bo_shift != 0) + 1 + (rem != 0);
3161 : }
3162 116 : if (bitint_big_endian && bo_shift != 0 && (prec % limb_prec) == 0)
3163 0 : ++total;
3164 296 : for (unsigned i = 0; i < cnt; i++)
3165 : {
3166 180 : if (kind == bitint_prec_large || (i == 0 && bo_shift != 0))
3167 48 : idx = size_int (bo_idx
3168 : + (bitint_big_endian
3169 : ? total - 1 - start - i : start + i));
3170 132 : else if (i == cnt - 1 && rem != 0)
3171 108 : idx = size_int (bo_idx + (bitint_big_endian ? 0 : end));
3172 78 : else if (i == (bo_shift != 0))
3173 78 : idx = create_loop (size_int (bo_idx
3174 : + (bitint_big_endian
3175 : ? total - 1 - start - i
3176 : : start + i)), &idx_next);
3177 180 : rhs1 = ext;
3178 180 : if (bf_cur != NULL_TREE && bf_cur != ext)
3179 : {
3180 20 : tree t1 = make_ssa_name (m_limb_type);
3181 20 : g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
3182 : build_int_cst (unsigned_type_node,
3183 20 : limb_prec - bo_shift));
3184 20 : insert_before (g);
3185 20 : if (integer_zerop (ext))
3186 : rhs1 = t1;
3187 : else
3188 : {
3189 10 : tree t2 = make_ssa_name (m_limb_type);
3190 10 : rhs1 = make_ssa_name (m_limb_type);
3191 10 : g = gimple_build_assign (t2, LSHIFT_EXPR, ext,
3192 : build_int_cst (unsigned_type_node,
3193 10 : bo_shift));
3194 10 : insert_before (g);
3195 10 : g = gimple_build_assign (rhs1, BIT_IOR_EXPR, t1, t2);
3196 10 : insert_before (g);
3197 : }
3198 : bf_cur = ext;
3199 : }
3200 180 : bool done = false;
3201 : /* Handle bit-field access to partial last limb if needed. */
3202 180 : if (nlhs && i == cnt - 1)
3203 : {
3204 24 : unsigned int tprec = TYPE_PRECISION (type);
3205 24 : unsigned int rprec = (tprec - 1) % limb_prec + 1;
3206 24 : if (rprec + bo_shift < (unsigned) limb_prec)
3207 : {
3208 12 : tree ftype
3209 12 : = build_nonstandard_integer_type (rprec + bo_shift, 1);
3210 12 : tree bfr
3211 24 : = build_bit_field_ref (ftype, unshare_expr (nlhs),
3212 : rprec + bo_shift,
3213 : bitint_big_endian
3214 0 : ? bo_idx * limb_prec + bo_bit
3215 12 : : (bo_idx + tprec / limb_prec)
3216 12 : * limb_prec);
3217 12 : tree t = add_cast (ftype, rhs1);
3218 12 : g = gimple_build_assign (bfr, t);
3219 12 : done = true;
3220 12 : bf_cur = NULL_TREE;
3221 : }
3222 12 : else if (rprec + bo_shift == (unsigned) limb_prec)
3223 : bf_cur = NULL_TREE;
3224 : }
3225 : /* Otherwise, stores to any other lhs. */
3226 12 : if (!done)
3227 : {
3228 306 : tree l = limb_access (nlhs ? NULL_TREE : lhs_type,
3229 : nlhs ? nlhs : lhs, idx, true);
3230 :
3231 168 : if (bitint_extended
3232 0 : && sext
3233 0 : && TYPE_UNSIGNED (lhs_type)
3234 0 : && tree_fits_uhwi_p (idx)
3235 168 : && !nlhs)
3236 : {
3237 0 : rhs1 = add_cast (limb_access_type (lhs_type, idx), rhs1);
3238 0 : rhs1 = add_cast (TREE_TYPE (l), rhs1);
3239 : }
3240 :
3241 168 : g = gimple_build_assign (l, rhs1);
3242 : }
3243 180 : insert_before (g);
3244 180 : if (eh)
3245 : {
3246 0 : maybe_duplicate_eh_stmt (g, stmt);
3247 0 : if (eh_pad)
3248 : {
3249 0 : edge e = split_block (gsi_bb (m_gsi), g);
3250 0 : m_gsi = gsi_after_labels (e->dest);
3251 0 : add_eh_edge (e->src, find_edge (gimple_bb (stmt), eh_pad));
3252 : }
3253 : }
3254 180 : if (kind == bitint_prec_huge && i == (bo_shift != 0))
3255 : {
3256 78 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
3257 : bitint_big_endian
3258 0 : ? size_int (-1) : size_one_node);
3259 78 : insert_before (g);
3260 78 : if (bitint_big_endian && rem != 0)
3261 0 : g = gimple_build_cond (NE_EXPR, idx,
3262 0 : size_int (bo_idx + 1),
3263 : NULL_TREE, NULL_TREE);
3264 : else
3265 78 : g = gimple_build_cond (NE_EXPR, idx_next,
3266 156 : size_int (bo_idx
3267 : + (bitint_big_endian
3268 : ? 0 : end)),
3269 : NULL_TREE, NULL_TREE);
3270 78 : insert_before (g);
3271 78 : m_gsi = gsi_for_stmt (stmt);
3272 78 : m_bb = NULL;
3273 : }
3274 : }
3275 : }
3276 23212 : if (bf_cur != NULL_TREE)
3277 : {
3278 72 : unsigned int tprec = TYPE_PRECISION (type);
3279 72 : unsigned int rprec = (tprec + bo_shift) % limb_prec;
3280 72 : tree ftype = build_nonstandard_integer_type (rprec, 1);
3281 144 : tree bfr = build_bit_field_ref (ftype, unshare_expr (nlhs),
3282 : rprec,
3283 : bitint_big_endian
3284 0 : ? bo_idx * limb_prec + bo_bit
3285 72 : : (bo_idx + (tprec + bo_bit) / limb_prec)
3286 : * limb_prec);
3287 72 : rhs1 = bf_cur;
3288 72 : if (bf_cur != ext)
3289 : {
3290 64 : rhs1 = make_ssa_name (TREE_TYPE (rhs1));
3291 64 : g = gimple_build_assign (rhs1, RSHIFT_EXPR, bf_cur,
3292 : build_int_cst (unsigned_type_node,
3293 64 : limb_prec - bo_shift));
3294 64 : insert_before (g);
3295 : }
3296 72 : rhs1 = add_cast (ftype, rhs1);
3297 72 : g = gimple_build_assign (bfr, rhs1);
3298 72 : insert_before (g);
3299 72 : if (eh)
3300 : {
3301 0 : maybe_duplicate_eh_stmt (g, stmt);
3302 0 : if (eh_pad)
3303 : {
3304 0 : edge e = split_block (gsi_bb (m_gsi), g);
3305 0 : m_gsi = gsi_after_labels (e->dest);
3306 0 : add_eh_edge (e->src, find_edge (gimple_bb (stmt), eh_pad));
3307 : }
3308 : }
3309 : }
3310 23212 : if (zero_ms_limb)
3311 : {
3312 0 : tree p2 = build_int_cst (sizetype,
3313 0 : CEIL ((unsigned) TYPE_PRECISION (type),
3314 : abi_limb_prec)
3315 0 : * abi_limb_prec / limb_prec - 1);
3316 0 : tree l = limb_access (lhs_type, lhs, p2, true);
3317 0 : g = gimple_build_assign (l, build_zero_cst (m_limb_type));
3318 0 : insert_before (g);
3319 0 : if (eh)
3320 : {
3321 0 : maybe_duplicate_eh_stmt (g, stmt);
3322 0 : if (eh_pad)
3323 : {
3324 0 : edge e = split_block (gsi_bb (m_gsi), g);
3325 0 : m_gsi = gsi_after_labels (e->dest);
3326 0 : add_eh_edge (e->src, find_edge (gimple_bb (stmt), eh_pad));
3327 : }
3328 : }
3329 : }
3330 :
3331 23212 : if (gimple_store_p (stmt))
3332 : {
3333 8457 : unlink_stmt_vdef (stmt);
3334 16914 : release_ssa_name (gimple_vdef (stmt));
3335 8457 : gsi_remove (&m_gsi, true);
3336 : }
3337 23212 : if (eq_p)
3338 : {
3339 6501 : lhs = make_ssa_name (boolean_type_node);
3340 6501 : basic_block bb = gimple_bb (stmt);
3341 6501 : gphi *phi = create_phi_node (lhs, bb);
3342 6501 : edge e = find_edge (gsi_bb (m_gsi), bb);
3343 6501 : unsigned int n = EDGE_COUNT (bb->preds);
3344 32692 : for (unsigned int i = 0; i < n; i++)
3345 : {
3346 26191 : edge e2 = EDGE_PRED (bb, i);
3347 26191 : add_phi_arg (phi, e == e2 ? boolean_true_node : boolean_false_node,
3348 : e2, UNKNOWN_LOCATION);
3349 : }
3350 6501 : cmp_code = cmp_code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3351 6501 : return lhs;
3352 : }
3353 : else
3354 : return NULL_TREE;
3355 : }
3356 :
3357 : /* Handle a large/huge _BitInt comparison statement STMT other than
3358 : EQ_EXPR/NE_EXPR. CMP_CODE, CMP_OP1 and CMP_OP2 meaning is like in
3359 : lower_mergeable_stmt. The {GT,GE,LT,LE}_EXPR comparisons are
3360 : lowered by iteration from the most significant limb downwards to
3361 : the least significant one, for large _BitInt in straight line code,
3362 : otherwise with most significant limb handled in
3363 : straight line code followed by a loop handling one limb at a time.
3364 : Comparisons with unsigned huge _BitInt with precisions which are
3365 : multiples of limb precision can use just the loop and don't need to
3366 : handle most significant limb before the loop. The loop or straight
3367 : line code jumps to final basic block if a particular pair of limbs
3368 : is not equal. */
3369 :
3370 : tree
3371 722 : bitint_large_huge::lower_comparison_stmt (gimple *stmt, tree_code &cmp_code,
3372 : tree cmp_op1, tree cmp_op2)
3373 : {
3374 722 : tree type = TREE_TYPE (cmp_op1);
3375 722 : gcc_assert (TREE_CODE (type) == BITINT_TYPE);
3376 722 : bitint_prec_kind kind = bitint_precision_kind (type);
3377 722 : gcc_assert (kind >= bitint_prec_large);
3378 722 : gimple *g;
3379 722 : if (!TYPE_UNSIGNED (type)
3380 441 : && integer_zerop (cmp_op2)
3381 750 : && (cmp_code == GE_EXPR || cmp_code == LT_EXPR))
3382 : {
3383 28 : unsigned end = CEIL ((unsigned) TYPE_PRECISION (type), limb_prec) - 1;
3384 56 : tree idx = size_int (bitint_big_endian ? 0 : end);
3385 28 : m_data_cnt = 0;
3386 28 : tree rhs1 = handle_operand (cmp_op1, idx);
3387 28 : if (TYPE_UNSIGNED (TREE_TYPE (rhs1)))
3388 : {
3389 24 : tree stype = signed_type_for (TREE_TYPE (rhs1));
3390 24 : rhs1 = add_cast (stype, rhs1);
3391 : }
3392 28 : tree lhs = make_ssa_name (boolean_type_node);
3393 28 : g = gimple_build_assign (lhs, cmp_code, rhs1,
3394 28 : build_zero_cst (TREE_TYPE (rhs1)));
3395 28 : insert_before (g);
3396 28 : cmp_code = NE_EXPR;
3397 28 : return lhs;
3398 : }
3399 :
3400 694 : unsigned cnt, rem = 0, end = 0;
3401 694 : tree idx = NULL_TREE, idx_next = NULL_TREE;
3402 694 : if (kind == bitint_prec_large)
3403 377 : cnt = CEIL ((unsigned) TYPE_PRECISION (type), limb_prec);
3404 : else
3405 : {
3406 317 : rem = ((unsigned) TYPE_PRECISION (type) % limb_prec);
3407 317 : if (rem == 0 && !TYPE_UNSIGNED (type))
3408 : rem = limb_prec;
3409 317 : end = ((unsigned) TYPE_PRECISION (type) - rem) / limb_prec;
3410 317 : cnt = 1 + (rem != 0);
3411 : }
3412 :
3413 694 : basic_block edge_bb = NULL;
3414 694 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3415 694 : gsi_prev (&gsi);
3416 694 : edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
3417 694 : edge_bb = e->src;
3418 694 : m_gsi = gsi_end_bb (edge_bb);
3419 :
3420 694 : edge *edges = XALLOCAVEC (edge, cnt * 2);
3421 2519 : for (unsigned i = 0; i < cnt; i++)
3422 : {
3423 1825 : m_data_cnt = 0;
3424 1825 : if (kind == bitint_prec_large)
3425 1246 : idx = size_int (bitint_big_endian ? i : cnt - i - 1);
3426 579 : else if (i == cnt - 1)
3427 317 : idx = create_loop (size_int (bitint_big_endian ? cnt - 1 : end - 1),
3428 : &idx_next);
3429 : else
3430 524 : idx = size_int (bitint_big_endian ? 0 : end);
3431 1825 : tree rhs1 = handle_operand (cmp_op1, idx);
3432 1825 : tree rhs2 = handle_operand (cmp_op2, idx);
3433 1825 : if (i == 0
3434 694 : && !TYPE_UNSIGNED (type)
3435 2238 : && TYPE_UNSIGNED (TREE_TYPE (rhs1)))
3436 : {
3437 112 : tree stype = signed_type_for (TREE_TYPE (rhs1));
3438 112 : rhs1 = add_cast (stype, rhs1);
3439 112 : rhs2 = add_cast (stype, rhs2);
3440 : }
3441 1825 : g = gimple_build_cond (GT_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
3442 1825 : insert_before (g);
3443 1825 : edge e1 = split_block (gsi_bb (m_gsi), g);
3444 1825 : e1->flags = EDGE_FALSE_VALUE;
3445 1825 : edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
3446 1825 : e1->probability = profile_probability::likely ();
3447 1825 : e2->probability = e1->probability.invert ();
3448 1825 : if (i == 0)
3449 694 : set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
3450 1825 : m_gsi = gsi_after_labels (e1->dest);
3451 1825 : edges[2 * i] = e2;
3452 1825 : g = gimple_build_cond (LT_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
3453 1825 : insert_before (g);
3454 1825 : e1 = split_block (gsi_bb (m_gsi), g);
3455 1825 : e1->flags = EDGE_FALSE_VALUE;
3456 1825 : e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
3457 1825 : e1->probability = profile_probability::unlikely ();
3458 1825 : e2->probability = e1->probability.invert ();
3459 1825 : m_gsi = gsi_after_labels (e1->dest);
3460 1825 : edges[2 * i + 1] = e2;
3461 1825 : m_first = false;
3462 1825 : if (kind == bitint_prec_huge && i == cnt - 1)
3463 : {
3464 634 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
3465 : bitint_big_endian ? size_one_node
3466 317 : : size_int (-1));
3467 317 : insert_before (g);
3468 317 : g = gimple_build_cond (NE_EXPR, idx,
3469 : bitint_big_endian
3470 0 : ? size_int (end - 1 + (cnt != 1))
3471 : : size_zero_node,
3472 : NULL_TREE, NULL_TREE);
3473 317 : insert_before (g);
3474 317 : edge true_edge, false_edge;
3475 317 : extract_true_false_edges_from_block (gsi_bb (m_gsi),
3476 : &true_edge, &false_edge);
3477 317 : m_gsi = gsi_after_labels (false_edge->dest);
3478 317 : m_bb = NULL;
3479 : }
3480 : }
3481 :
3482 694 : tree lhs = make_ssa_name (boolean_type_node);
3483 694 : basic_block bb = gimple_bb (stmt);
3484 694 : gphi *phi = create_phi_node (lhs, bb);
3485 4344 : for (unsigned int i = 0; i < cnt * 2; i++)
3486 : {
3487 3650 : tree val = ((cmp_code == GT_EXPR || cmp_code == GE_EXPR)
3488 3650 : ^ (i & 1)) ? boolean_true_node : boolean_false_node;
3489 3650 : add_phi_arg (phi, val, edges[i], UNKNOWN_LOCATION);
3490 : }
3491 694 : add_phi_arg (phi, (cmp_code == GE_EXPR || cmp_code == LE_EXPR)
3492 : ? boolean_true_node : boolean_false_node,
3493 : find_edge (gsi_bb (m_gsi), bb), UNKNOWN_LOCATION);
3494 694 : cmp_code = NE_EXPR;
3495 694 : return lhs;
3496 : }
3497 :
3498 : /* Lower large/huge _BitInt left and right shift except for left
3499 : shift by < limb_prec constant. */
3500 :
3501 : void
3502 582 : bitint_large_huge::lower_shift_stmt (tree obj, gimple *stmt)
3503 : {
3504 582 : tree rhs1 = gimple_assign_rhs1 (stmt);
3505 582 : tree lhs = gimple_assign_lhs (stmt);
3506 582 : tree_code rhs_code = gimple_assign_rhs_code (stmt);
3507 582 : tree type = TREE_TYPE (rhs1);
3508 582 : gimple *final_stmt = gsi_stmt (m_gsi);
3509 582 : gcc_assert (TREE_CODE (type) == BITINT_TYPE
3510 : && bitint_precision_kind (type) >= bitint_prec_large);
3511 582 : int prec = TYPE_PRECISION (type);
3512 582 : tree n = gimple_assign_rhs2 (stmt), n1, n2, n3, n4;
3513 582 : gimple *g;
3514 582 : if (obj == NULL_TREE)
3515 : {
3516 464 : int part = var_to_partition (m_map, lhs);
3517 464 : gcc_assert (m_vars[part] != NULL_TREE);
3518 : obj = m_vars[part];
3519 : }
3520 : /* Preparation code common for both left and right shifts.
3521 : unsigned n1 = n % limb_prec;
3522 : size_t n2 = n / limb_prec;
3523 : size_t n3 = n1 != 0;
3524 : unsigned n4 = (limb_prec - n1) % limb_prec;
3525 : (for power of 2 limb_prec n4 can be -n1 & limb_prec). */
3526 582 : if (TREE_CODE (n) == INTEGER_CST)
3527 : {
3528 249 : tree lp = build_int_cst (TREE_TYPE (n), limb_prec);
3529 249 : n1 = int_const_binop (TRUNC_MOD_EXPR, n, lp);
3530 249 : n2 = fold_convert (sizetype, int_const_binop (TRUNC_DIV_EXPR, n, lp));
3531 249 : n3 = size_int (!integer_zerop (n1));
3532 249 : n4 = int_const_binop (TRUNC_MOD_EXPR,
3533 249 : int_const_binop (MINUS_EXPR, lp, n1), lp);
3534 : }
3535 : else
3536 : {
3537 333 : n1 = make_ssa_name (TREE_TYPE (n));
3538 333 : n2 = make_ssa_name (sizetype);
3539 333 : n3 = make_ssa_name (sizetype);
3540 333 : n4 = make_ssa_name (TREE_TYPE (n));
3541 333 : if (pow2p_hwi (limb_prec))
3542 : {
3543 333 : tree lpm1 = build_int_cst (TREE_TYPE (n), limb_prec - 1);
3544 333 : g = gimple_build_assign (n1, BIT_AND_EXPR, n, lpm1);
3545 333 : insert_before (g);
3546 983 : g = gimple_build_assign (useless_type_conversion_p (sizetype,
3547 333 : TREE_TYPE (n))
3548 317 : ? n2 : make_ssa_name (TREE_TYPE (n)),
3549 : RSHIFT_EXPR, n,
3550 333 : build_int_cst (TREE_TYPE (n),
3551 666 : exact_log2 (limb_prec)));
3552 333 : insert_before (g);
3553 333 : if (gimple_assign_lhs (g) != n2)
3554 : {
3555 317 : g = gimple_build_assign (n2, NOP_EXPR, gimple_assign_lhs (g));
3556 317 : insert_before (g);
3557 : }
3558 333 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (n)),
3559 : NEGATE_EXPR, n1);
3560 333 : insert_before (g);
3561 333 : g = gimple_build_assign (n4, BIT_AND_EXPR, gimple_assign_lhs (g),
3562 : lpm1);
3563 333 : insert_before (g);
3564 : }
3565 : else
3566 : {
3567 0 : tree lp = build_int_cst (TREE_TYPE (n), limb_prec);
3568 0 : g = gimple_build_assign (n1, TRUNC_MOD_EXPR, n, lp);
3569 0 : insert_before (g);
3570 0 : g = gimple_build_assign (useless_type_conversion_p (sizetype,
3571 0 : TREE_TYPE (n))
3572 0 : ? n2 : make_ssa_name (TREE_TYPE (n)),
3573 : TRUNC_DIV_EXPR, n, lp);
3574 0 : insert_before (g);
3575 0 : if (gimple_assign_lhs (g) != n2)
3576 : {
3577 0 : g = gimple_build_assign (n2, NOP_EXPR, gimple_assign_lhs (g));
3578 0 : insert_before (g);
3579 : }
3580 0 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (n)),
3581 : MINUS_EXPR, lp, n1);
3582 0 : insert_before (g);
3583 0 : g = gimple_build_assign (n4, TRUNC_MOD_EXPR, gimple_assign_lhs (g),
3584 : lp);
3585 0 : insert_before (g);
3586 : }
3587 333 : g = gimple_build_assign (make_ssa_name (boolean_type_node), NE_EXPR, n1,
3588 333 : build_zero_cst (TREE_TYPE (n)));
3589 333 : insert_before (g);
3590 333 : g = gimple_build_assign (n3, NOP_EXPR, gimple_assign_lhs (g));
3591 333 : insert_before (g);
3592 : }
3593 1164 : tree p = build_int_cst (sizetype,
3594 582 : prec / limb_prec - (prec % limb_prec == 0));
3595 582 : if (rhs_code == RSHIFT_EXPR)
3596 : {
3597 : /* Lower
3598 : dst = src >> n;
3599 : as
3600 : unsigned n1 = n % limb_prec;
3601 : size_t n2 = n / limb_prec;
3602 : size_t n3 = n1 != 0;
3603 : unsigned n4 = (limb_prec - n1) % limb_prec;
3604 : size_t idx;
3605 : size_t p = prec / limb_prec - (prec % limb_prec == 0);
3606 : int signed_p = (typeof (src) -1) < 0;
3607 : for (idx = n2; idx < ((!signed_p && (prec % limb_prec == 0))
3608 : ? p : p - n3); ++idx)
3609 : dst[idx - n2] = (src[idx] >> n1) | (src[idx + n3] << n4);
3610 : limb_type ext;
3611 : if (prec % limb_prec == 0)
3612 : ext = src[p];
3613 : else if (signed_p)
3614 : ext = ((signed limb_type) (src[p] << (limb_prec
3615 : - (prec % limb_prec))))
3616 : >> (limb_prec - (prec % limb_prec));
3617 : else
3618 : ext = src[p] & (((limb_type) 1 << (prec % limb_prec)) - 1);
3619 : if (!signed_p && (prec % limb_prec == 0))
3620 : ;
3621 : else if (idx < prec / 64)
3622 : {
3623 : dst[idx - n2] = (src[idx] >> n1) | (ext << n4);
3624 : ++idx;
3625 : }
3626 : idx -= n2;
3627 : if (signed_p)
3628 : {
3629 : dst[idx] = ((signed limb_type) ext) >> n1;
3630 : ext = ((signed limb_type) ext) >> (limb_prec - 1);
3631 : }
3632 : else
3633 : {
3634 : dst[idx] = ext >> n1;
3635 : ext = 0;
3636 : }
3637 : for (++idx; idx <= p; ++idx)
3638 : dst[idx] = ext; */
3639 387 : tree pmn3;
3640 387 : if (TYPE_UNSIGNED (type) && prec % limb_prec == 0)
3641 100 : pmn3 = bitint_big_endian ? size_zero_node : p;
3642 287 : else if (bitint_big_endian)
3643 : pmn3 = n3;
3644 287 : else if (TREE_CODE (n3) == INTEGER_CST)
3645 114 : pmn3 = int_const_binop (MINUS_EXPR, p, n3);
3646 : else
3647 : {
3648 173 : pmn3 = make_ssa_name (sizetype);
3649 173 : g = gimple_build_assign (pmn3, MINUS_EXPR, p, n3);
3650 173 : insert_before (g);
3651 : }
3652 387 : tree pmn2 = NULL_TREE;
3653 387 : if (bitint_big_endian)
3654 : {
3655 0 : if (TREE_CODE (n2) == INTEGER_CST)
3656 0 : pmn2 = int_const_binop (MINUS_EXPR, p, n2);
3657 : else
3658 : {
3659 0 : pmn2 = make_ssa_name (sizetype);
3660 0 : g = gimple_build_assign (pmn2, MINUS_EXPR, p, n2);
3661 0 : insert_before (g);
3662 : }
3663 0 : g = gimple_build_cond (GT_EXPR, pmn2, pmn3, NULL_TREE, NULL_TREE);
3664 : }
3665 : else
3666 387 : g = gimple_build_cond (LT_EXPR, n2, pmn3, NULL_TREE, NULL_TREE);
3667 387 : edge edge_true, edge_false;
3668 387 : if_then (g, profile_probability::likely (), edge_true, edge_false);
3669 387 : tree idx_next;
3670 774 : tree idx = create_loop (bitint_big_endian ? pmn2 : n2, &idx_next);
3671 387 : tree idxmn2 = make_ssa_name (sizetype);
3672 387 : tree idxpn3 = make_ssa_name (sizetype);
3673 774 : g = gimple_build_assign (idxmn2,
3674 : bitint_big_endian ? PLUS_EXPR : MINUS_EXPR,
3675 : idx, n2);
3676 387 : insert_before (g);
3677 774 : g = gimple_build_assign (idxpn3,
3678 : bitint_big_endian ? MINUS_EXPR : PLUS_EXPR,
3679 : idx, n3);
3680 387 : insert_before (g);
3681 387 : m_data_cnt = 0;
3682 387 : tree t1 = handle_operand (rhs1, idx);
3683 387 : m_first = false;
3684 387 : g = gimple_build_assign (make_ssa_name (m_limb_type),
3685 : RSHIFT_EXPR, t1, n1);
3686 387 : insert_before (g);
3687 387 : t1 = gimple_assign_lhs (g);
3688 387 : if (!integer_zerop (n3))
3689 : {
3690 299 : m_data_cnt = 0;
3691 299 : tree t2 = handle_operand (rhs1, idxpn3);
3692 299 : g = gimple_build_assign (make_ssa_name (m_limb_type),
3693 : LSHIFT_EXPR, t2, n4);
3694 299 : insert_before (g);
3695 299 : t2 = gimple_assign_lhs (g);
3696 299 : g = gimple_build_assign (make_ssa_name (m_limb_type),
3697 : BIT_IOR_EXPR, t1, t2);
3698 299 : insert_before (g);
3699 299 : t1 = gimple_assign_lhs (g);
3700 : }
3701 387 : tree l = limb_access (TREE_TYPE (lhs), obj, idxmn2, true);
3702 387 : g = gimple_build_assign (l, t1);
3703 387 : insert_before (g);
3704 387 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
3705 0 : bitint_big_endian ? size_int (-1)
3706 : : size_one_node);
3707 387 : insert_before (g);
3708 774 : g = gimple_build_cond (bitint_big_endian ? GT_EXPR : LT_EXPR,
3709 : idx_next, pmn3, NULL_TREE, NULL_TREE);
3710 387 : insert_before (g);
3711 387 : idx = make_ssa_name (sizetype);
3712 387 : m_gsi = gsi_for_stmt (final_stmt);
3713 387 : gphi *phi = create_phi_node (idx, gsi_bb (m_gsi));
3714 387 : edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3715 387 : edge_true = EDGE_PRED (gsi_bb (m_gsi),
3716 : EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3717 774 : add_phi_arg (phi, bitint_big_endian ? pmn2 : n2, edge_false,
3718 : UNKNOWN_LOCATION);
3719 387 : add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3720 387 : m_data_cnt = 0;
3721 387 : tree ms = handle_operand (rhs1, bitint_big_endian ? size_zero_node : p);
3722 387 : tree ext = ms;
3723 387 : if (!types_compatible_p (TREE_TYPE (ms), m_limb_type))
3724 230 : ext = add_cast (m_limb_type, ms);
3725 587 : if (!(TYPE_UNSIGNED (type) && prec % limb_prec == 0)
3726 487 : && !integer_zerop (n3))
3727 : {
3728 249 : if (bitint_big_endian)
3729 0 : g = gimple_build_cond (GT_EXPR, idx, size_zero_node,
3730 : NULL_TREE, NULL_TREE);
3731 : else
3732 249 : g = gimple_build_cond (LT_EXPR, idx, p, NULL_TREE, NULL_TREE);
3733 249 : if_then (g, profile_probability::likely (), edge_true, edge_false);
3734 249 : m_data_cnt = 0;
3735 249 : t1 = handle_operand (rhs1, idx);
3736 249 : g = gimple_build_assign (make_ssa_name (m_limb_type),
3737 : RSHIFT_EXPR, t1, n1);
3738 249 : insert_before (g);
3739 249 : t1 = gimple_assign_lhs (g);
3740 249 : g = gimple_build_assign (make_ssa_name (m_limb_type),
3741 : LSHIFT_EXPR, ext, n4);
3742 249 : insert_before (g);
3743 249 : tree t2 = gimple_assign_lhs (g);
3744 249 : g = gimple_build_assign (make_ssa_name (m_limb_type),
3745 : BIT_IOR_EXPR, t1, t2);
3746 249 : insert_before (g);
3747 249 : t1 = gimple_assign_lhs (g);
3748 249 : idxmn2 = make_ssa_name (sizetype);
3749 498 : g = gimple_build_assign (idxmn2, bitint_big_endian
3750 : ? PLUS_EXPR : MINUS_EXPR, idx, n2);
3751 249 : insert_before (g);
3752 249 : l = limb_access (TREE_TYPE (lhs), obj, idxmn2, true);
3753 249 : g = gimple_build_assign (l, t1);
3754 249 : insert_before (g);
3755 249 : idx_next = make_ssa_name (sizetype);
3756 249 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
3757 : bitint_big_endian
3758 0 : ? size_int (-1) : size_one_node);
3759 249 : insert_before (g);
3760 249 : m_gsi = gsi_for_stmt (final_stmt);
3761 249 : tree nidx = make_ssa_name (sizetype);
3762 249 : phi = create_phi_node (nidx, gsi_bb (m_gsi));
3763 249 : edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3764 249 : edge_true = EDGE_PRED (gsi_bb (m_gsi),
3765 : EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3766 249 : add_phi_arg (phi, idx, edge_false, UNKNOWN_LOCATION);
3767 249 : add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3768 249 : idx = nidx;
3769 : }
3770 774 : g = gimple_build_assign (make_ssa_name (sizetype),
3771 : bitint_big_endian ? PLUS_EXPR : MINUS_EXPR,
3772 : idx, n2);
3773 387 : insert_before (g);
3774 387 : idx = gimple_assign_lhs (g);
3775 387 : tree sext = ext;
3776 387 : if (!TYPE_UNSIGNED (type))
3777 187 : sext = add_cast (signed_type_for (m_limb_type), ext);
3778 387 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (sext)),
3779 : RSHIFT_EXPR, sext, n1);
3780 387 : insert_before (g);
3781 387 : t1 = gimple_assign_lhs (g);
3782 387 : if (!TYPE_UNSIGNED (type))
3783 : {
3784 187 : t1 = add_cast (m_limb_type, t1);
3785 187 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (sext)),
3786 : RSHIFT_EXPR, sext,
3787 187 : build_int_cst (TREE_TYPE (n),
3788 187 : limb_prec - 1));
3789 187 : insert_before (g);
3790 187 : ext = add_cast (m_limb_type, gimple_assign_lhs (g));
3791 : }
3792 : else
3793 200 : ext = build_zero_cst (m_limb_type);
3794 387 : l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3795 387 : g = gimple_build_assign (l, t1);
3796 387 : insert_before (g);
3797 387 : g = gimple_build_assign (make_ssa_name (sizetype), PLUS_EXPR, idx,
3798 : bitint_big_endian
3799 0 : ? size_int (-1) : size_one_node);
3800 387 : insert_before (g);
3801 387 : tree p2 = p;
3802 387 : if (bitint_big_endian)
3803 : {
3804 0 : tree new_idx = gimple_assign_lhs (g);
3805 0 : g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
3806 : NULL_TREE, NULL_TREE);
3807 0 : idx = new_idx;
3808 : }
3809 : else
3810 : {
3811 387 : if (bitint_extended == bitint_ext_full
3812 0 : && abi_limb_prec > limb_prec)
3813 0 : p2 = build_int_cst (sizetype,
3814 0 : CEIL (prec, abi_limb_prec)
3815 0 : * abi_limb_prec / limb_prec - 1);
3816 387 : idx = gimple_assign_lhs (g);
3817 387 : g = gimple_build_cond (LE_EXPR, idx, p2, NULL_TREE, NULL_TREE);
3818 : }
3819 387 : if_then (g, profile_probability::likely (), edge_true, edge_false);
3820 387 : idx = create_loop (idx, &idx_next);
3821 387 : l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3822 387 : g = gimple_build_assign (l, ext);
3823 387 : insert_before (g);
3824 387 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
3825 : bitint_big_endian
3826 0 : ? size_int (-1) : size_one_node);
3827 387 : insert_before (g);
3828 387 : if (bitint_big_endian)
3829 0 : g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
3830 : NULL_TREE, NULL_TREE);
3831 : else
3832 387 : g = gimple_build_cond (LE_EXPR, idx_next, p2, NULL_TREE, NULL_TREE);
3833 387 : insert_before (g);
3834 : }
3835 : else
3836 : {
3837 : /* Lower
3838 : dst = src << n;
3839 : as
3840 : unsigned n1 = n % limb_prec;
3841 : size_t n2 = n / limb_prec;
3842 : size_t n3 = n1 != 0;
3843 : unsigned n4 = (limb_prec - n1) % limb_prec;
3844 : size_t idx;
3845 : size_t p = prec / limb_prec - (prec % limb_prec == 0);
3846 : for (idx = p; (ssize_t) idx >= (ssize_t) (n2 + n3); --idx)
3847 : dst[idx] = (src[idx - n2] << n1) | (src[idx - n2 - n3] >> n4);
3848 : if (n1)
3849 : {
3850 : dst[idx] = src[idx - n2] << n1;
3851 : --idx;
3852 : }
3853 : for (; (ssize_t) idx >= 0; --idx)
3854 : dst[idx] = 0; */
3855 195 : tree n2pn3;
3856 195 : if (TREE_CODE (n2) == INTEGER_CST && TREE_CODE (n3) == INTEGER_CST)
3857 71 : n2pn3 = int_const_binop (PLUS_EXPR, n2, n3);
3858 : else
3859 : {
3860 124 : n2pn3 = make_ssa_name (sizetype);
3861 124 : g = gimple_build_assign (n2pn3, PLUS_EXPR, n2, n3);
3862 124 : insert_before (g);
3863 : }
3864 195 : if (bitint_big_endian)
3865 : {
3866 0 : if (TREE_CODE (n2pn3) == INTEGER_CST)
3867 0 : n2pn3 = int_const_binop (MINUS_EXPR, p, n2pn3);
3868 : else
3869 : {
3870 0 : g = gimple_build_assign (make_ssa_name (sizetype),
3871 : MINUS_EXPR, p, n2pn3);
3872 0 : insert_before (g);
3873 0 : n2pn3 = gimple_assign_lhs (g);
3874 : }
3875 : }
3876 : /* For LSHIFT_EXPR, we can use handle_operand with non-INTEGER_CST
3877 : idx even to access the most significant partial limb. */
3878 195 : m_var_msb = true;
3879 195 : if (integer_zerop (n3))
3880 : /* For n3 == 0 p >= n2 + n3 is always true for all valid shift
3881 : counts. Emit if (true) condition that can be optimized later. */
3882 45 : g = gimple_build_cond (NE_EXPR, boolean_true_node, boolean_false_node,
3883 : NULL_TREE, NULL_TREE);
3884 150 : else if (bitint_big_endian)
3885 0 : g = gimple_build_cond (NE_EXPR, n2pn3, size_int (-1), NULL_TREE,
3886 : NULL_TREE);
3887 : else
3888 150 : g = gimple_build_cond (LE_EXPR, n2pn3, p, NULL_TREE, NULL_TREE);
3889 195 : edge edge_true, edge_false;
3890 195 : if_then (g, profile_probability::likely (), edge_true, edge_false);
3891 195 : tree idx_next;
3892 195 : tree idx = create_loop (bitint_big_endian ? size_zero_node : p,
3893 : &idx_next);
3894 195 : tree idxmn2 = make_ssa_name (sizetype);
3895 195 : tree idxmn2mn3 = make_ssa_name (sizetype);
3896 390 : g = gimple_build_assign (idxmn2,
3897 : bitint_big_endian ? PLUS_EXPR : MINUS_EXPR,
3898 : idx, n2);
3899 195 : insert_before (g);
3900 390 : g = gimple_build_assign (idxmn2mn3,
3901 : bitint_big_endian ? PLUS_EXPR : MINUS_EXPR,
3902 : idxmn2, n3);
3903 195 : insert_before (g);
3904 195 : m_data_cnt = 0;
3905 195 : tree t1 = handle_operand (rhs1, idxmn2);
3906 195 : m_first = false;
3907 195 : g = gimple_build_assign (make_ssa_name (m_limb_type),
3908 : LSHIFT_EXPR, t1, n1);
3909 195 : insert_before (g);
3910 195 : t1 = gimple_assign_lhs (g);
3911 195 : if (!integer_zerop (n3))
3912 : {
3913 150 : m_data_cnt = 0;
3914 150 : tree t2 = handle_operand (rhs1, idxmn2mn3);
3915 150 : g = gimple_build_assign (make_ssa_name (m_limb_type),
3916 : RSHIFT_EXPR, t2, n4);
3917 150 : insert_before (g);
3918 150 : t2 = gimple_assign_lhs (g);
3919 150 : g = gimple_build_assign (make_ssa_name (m_limb_type),
3920 : BIT_IOR_EXPR, t1, t2);
3921 150 : insert_before (g);
3922 150 : t1 = gimple_assign_lhs (g);
3923 : }
3924 195 : tree l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3925 195 : g = gimple_build_assign (l, t1);
3926 195 : insert_before (g);
3927 390 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
3928 : bitint_big_endian
3929 195 : ? size_one_node : size_int (-1));
3930 195 : insert_before (g);
3931 195 : tree sn2pn3 = add_cast (ssizetype, n2pn3);
3932 390 : g = gimple_build_cond (bitint_big_endian ? LE_EXPR : GE_EXPR,
3933 : add_cast (ssizetype, idx_next), sn2pn3,
3934 : NULL_TREE, NULL_TREE);
3935 195 : insert_before (g);
3936 195 : idx = make_ssa_name (sizetype);
3937 195 : m_gsi = gsi_for_stmt (final_stmt);
3938 195 : gphi *phi = create_phi_node (idx, gsi_bb (m_gsi));
3939 195 : edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3940 195 : edge_true = EDGE_PRED (gsi_bb (m_gsi),
3941 : EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3942 195 : add_phi_arg (phi, bitint_big_endian ? size_zero_node : p,
3943 : edge_false, UNKNOWN_LOCATION);
3944 195 : add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3945 195 : m_data_cnt = 0;
3946 195 : if (!integer_zerop (n3))
3947 : {
3948 150 : g = gimple_build_cond (NE_EXPR, n3, size_zero_node,
3949 : NULL_TREE, NULL_TREE);
3950 150 : if_then (g, profile_probability::likely (), edge_true, edge_false);
3951 150 : idxmn2 = make_ssa_name (sizetype);
3952 300 : g = gimple_build_assign (idxmn2,
3953 : bitint_big_endian ? PLUS_EXPR : MINUS_EXPR,
3954 : idx, n2);
3955 150 : insert_before (g);
3956 150 : m_data_cnt = 0;
3957 150 : t1 = handle_operand (rhs1, idxmn2);
3958 150 : g = gimple_build_assign (make_ssa_name (m_limb_type),
3959 : LSHIFT_EXPR, t1, n1);
3960 150 : insert_before (g);
3961 150 : t1 = gimple_assign_lhs (g);
3962 150 : l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3963 150 : g = gimple_build_assign (l, t1);
3964 150 : insert_before (g);
3965 150 : idx_next = make_ssa_name (sizetype);
3966 300 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
3967 : bitint_big_endian
3968 150 : ? size_one_node : size_int (-1));
3969 150 : insert_before (g);
3970 150 : m_gsi = gsi_for_stmt (final_stmt);
3971 150 : tree nidx = make_ssa_name (sizetype);
3972 150 : phi = create_phi_node (nidx, gsi_bb (m_gsi));
3973 150 : edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3974 150 : edge_true = EDGE_PRED (gsi_bb (m_gsi),
3975 : EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3976 150 : add_phi_arg (phi, idx, edge_false, UNKNOWN_LOCATION);
3977 150 : add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3978 150 : idx = nidx;
3979 : }
3980 195 : if (bitint_big_endian)
3981 0 : g = gimple_build_cond (LE_EXPR, idx, p, NULL_TREE, NULL_TREE);
3982 : else
3983 195 : g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx),
3984 : ssize_int (0), NULL_TREE, NULL_TREE);
3985 195 : if_then (g, profile_probability::likely (), edge_true, edge_false);
3986 195 : idx = create_loop (idx, &idx_next);
3987 195 : l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3988 195 : g = gimple_build_assign (l, build_zero_cst (m_limb_type));
3989 195 : insert_before (g);
3990 390 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
3991 : bitint_big_endian
3992 195 : ? size_one_node : size_int (-1));
3993 195 : insert_before (g);
3994 195 : if (bitint_big_endian)
3995 0 : g = gimple_build_cond (LE_EXPR, idx_next, p, NULL_TREE, NULL_TREE);
3996 : else
3997 195 : g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx_next),
3998 : ssize_int (0), NULL_TREE, NULL_TREE);
3999 195 : insert_before (g);
4000 195 : if (bitint_extended && prec % limb_prec != 0)
4001 : {
4002 : /* The most significant limb has been updated either in the
4003 : loop or in the if after it. To simplify the code, just
4004 : read it back from memory and extend. */
4005 0 : m_gsi = gsi_after_labels (edge_false->dest);
4006 0 : idx = bitint_big_endian ? size_zero_node : p;
4007 0 : tree l = limb_access (TREE_TYPE (lhs), obj, idx, true);
4008 0 : tree type = limb_access_type (TREE_TYPE (lhs), idx);
4009 0 : tree v = make_ssa_name (m_limb_type);
4010 0 : g = gimple_build_assign (v, l);
4011 0 : insert_before (g);
4012 0 : v = add_cast (type, v);
4013 0 : l = limb_access (TREE_TYPE (lhs), obj, idx, true);
4014 0 : v = add_cast (m_limb_type, v);
4015 0 : g = gimple_build_assign (l, v);
4016 0 : insert_before (g);
4017 0 : if (bitint_extended == bitint_ext_full
4018 0 : && abi_limb_prec > limb_prec
4019 0 : && (CEIL (prec, abi_limb_prec) * abi_limb_prec
4020 0 : > CEIL (prec, limb_prec) * limb_prec))
4021 : {
4022 0 : tree p2 = build_int_cst (sizetype,
4023 : CEIL (prec, abi_limb_prec)
4024 0 : * abi_limb_prec / limb_prec - 1);
4025 0 : if (TYPE_UNSIGNED (TREE_TYPE (lhs)))
4026 0 : v = build_zero_cst (m_limb_type);
4027 : else
4028 : {
4029 0 : v = add_cast (signed_type_for (m_limb_type), v);
4030 0 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (v)),
4031 : RSHIFT_EXPR, v,
4032 : build_int_cst (unsigned_type_node,
4033 0 : limb_prec - 1));
4034 0 : insert_before (g);
4035 0 : v = add_cast (m_limb_type, gimple_assign_lhs (g));
4036 : }
4037 0 : l = limb_access (TREE_TYPE (lhs), obj, p2, true);
4038 0 : g = gimple_build_assign (l, v);
4039 0 : insert_before (g);
4040 : }
4041 : }
4042 195 : else if (bitint_extended == bitint_ext_full
4043 0 : && abi_limb_prec > limb_prec
4044 0 : && (CEIL (prec, abi_limb_prec) * abi_limb_prec
4045 0 : > CEIL (prec, limb_prec) * limb_prec))
4046 : {
4047 0 : m_gsi = gsi_after_labels (edge_false->dest);
4048 0 : tree p2 = build_int_cst (sizetype,
4049 : CEIL (prec, abi_limb_prec)
4050 0 : * abi_limb_prec / limb_prec - 1);
4051 0 : tree v;
4052 0 : if (TYPE_UNSIGNED (TREE_TYPE (lhs)))
4053 0 : v = build_zero_cst (m_limb_type);
4054 : else
4055 : {
4056 0 : tree l = limb_access (TREE_TYPE (lhs), obj, p, true);
4057 0 : v = make_ssa_name (m_limb_type);
4058 0 : g = gimple_build_assign (v, l);
4059 0 : insert_before (g);
4060 0 : v = add_cast (signed_type_for (m_limb_type), v);
4061 0 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (v)),
4062 : RSHIFT_EXPR, v,
4063 : build_int_cst (unsigned_type_node,
4064 0 : limb_prec - 1));
4065 0 : insert_before (g);
4066 0 : v = add_cast (m_limb_type, gimple_assign_lhs (g));
4067 : }
4068 0 : tree l = limb_access (TREE_TYPE (lhs), obj, p2, true);
4069 0 : g = gimple_build_assign (l, v);
4070 0 : insert_before (g);
4071 : }
4072 : }
4073 582 : }
4074 :
4075 : /* Lower large/huge _BitInt multiplication or division. */
4076 :
4077 : void
4078 354 : bitint_large_huge::lower_muldiv_stmt (tree obj, gimple *stmt)
4079 : {
4080 354 : tree rhs1 = gimple_assign_rhs1 (stmt);
4081 354 : tree rhs2 = gimple_assign_rhs2 (stmt);
4082 354 : tree lhs = gimple_assign_lhs (stmt);
4083 354 : tree_code rhs_code = gimple_assign_rhs_code (stmt);
4084 354 : tree type = TREE_TYPE (rhs1);
4085 354 : gcc_assert (TREE_CODE (type) == BITINT_TYPE
4086 : && bitint_precision_kind (type) >= bitint_prec_large);
4087 354 : int prec = TYPE_PRECISION (type), prec1, prec2;
4088 354 : bool ext_ms_limb = false;
4089 354 : bool do_ext = false;
4090 354 : rhs1 = handle_operand_addr (rhs1, stmt, NULL, &prec1);
4091 354 : rhs2 = handle_operand_addr (rhs2, stmt, NULL, &prec2);
4092 354 : if (obj == NULL_TREE)
4093 : {
4094 150 : int part = var_to_partition (m_map, lhs);
4095 150 : gcc_assert (m_vars[part] != NULL_TREE);
4096 150 : obj = m_vars[part];
4097 150 : lhs = build_fold_addr_expr (obj);
4098 : }
4099 : else
4100 : {
4101 204 : lhs = build_fold_addr_expr (obj);
4102 204 : lhs = force_gimple_operand_gsi (&m_gsi, lhs, true,
4103 : NULL_TREE, true, GSI_SAME_STMT);
4104 : }
4105 354 : if (bitint_extended && TYPE_OVERFLOW_WRAPS (type))
4106 : {
4107 0 : if (rhs_code == MULT_EXPR)
4108 : do_ext = true;
4109 : /* For signed division with -fwrapv, minimum negative / -1 needs
4110 : is minimum negative and the padding bits above it should be all
4111 : set. */
4112 0 : else if (!TYPE_UNSIGNED (type)
4113 0 : && (rhs_code == TRUNC_DIV_EXPR || rhs_code == EXACT_DIV_EXPR))
4114 354 : do_ext = true;
4115 : }
4116 354 : if (bitint_extended == bitint_ext_full
4117 0 : && abi_limb_prec > limb_prec
4118 0 : && (CEIL (prec, abi_limb_prec) * abi_limb_prec
4119 0 : > CEIL (prec, limb_prec) * limb_prec))
4120 : {
4121 : /* unsigned multiplication needs to wrap around, so we can't
4122 : increase prec. Similarly for -fwrapv. */
4123 0 : if (do_ext)
4124 : ext_ms_limb = true;
4125 : else
4126 0 : prec = CEIL (prec, abi_limb_prec) * abi_limb_prec;
4127 : }
4128 354 : tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
4129 354 : gimple *g;
4130 354 : switch (rhs_code)
4131 : {
4132 199 : case MULT_EXPR:
4133 199 : g = gimple_build_call_internal (IFN_MULBITINT, 6,
4134 199 : lhs, build_int_cst (sitype, prec),
4135 199 : rhs1, build_int_cst (sitype, prec1),
4136 199 : rhs2, build_int_cst (sitype, prec2));
4137 199 : insert_before (g);
4138 199 : break;
4139 103 : case TRUNC_DIV_EXPR:
4140 103 : case EXACT_DIV_EXPR:
4141 103 : g = gimple_build_call_internal (IFN_DIVMODBITINT, 8,
4142 103 : lhs, build_int_cst (sitype, prec),
4143 : null_pointer_node,
4144 : build_int_cst (sitype, 0),
4145 103 : rhs1, build_int_cst (sitype, prec1),
4146 103 : rhs2, build_int_cst (sitype, prec2));
4147 103 : if (!stmt_ends_bb_p (stmt))
4148 102 : gimple_call_set_nothrow (as_a <gcall *> (g), true);
4149 103 : insert_before (g);
4150 103 : break;
4151 52 : case TRUNC_MOD_EXPR:
4152 52 : g = gimple_build_call_internal (IFN_DIVMODBITINT, 8, null_pointer_node,
4153 : build_int_cst (sitype, 0),
4154 52 : lhs, build_int_cst (sitype, prec),
4155 52 : rhs1, build_int_cst (sitype, prec1),
4156 52 : rhs2, build_int_cst (sitype, prec2));
4157 52 : if (!stmt_ends_bb_p (stmt))
4158 49 : gimple_call_set_nothrow (as_a <gcall *> (g), true);
4159 52 : insert_before (g);
4160 52 : break;
4161 0 : default:
4162 0 : gcc_unreachable ();
4163 : }
4164 354 : if (stmt_ends_bb_p (stmt))
4165 : {
4166 4 : maybe_duplicate_eh_stmt (g, stmt);
4167 4 : edge e1;
4168 4 : edge_iterator ei;
4169 4 : basic_block bb = gimple_bb (stmt);
4170 :
4171 4 : FOR_EACH_EDGE (e1, ei, bb->succs)
4172 4 : if (e1->flags & EDGE_EH)
4173 : break;
4174 4 : if (e1)
4175 : {
4176 4 : edge e2 = split_block (gsi_bb (m_gsi), g);
4177 4 : m_gsi = gsi_after_labels (e2->dest);
4178 4 : add_eh_edge (e2->src, e1);
4179 : }
4180 : }
4181 354 : if (do_ext
4182 354 : && ((prec % limb_prec) != 0 || (ext_ms_limb && !TYPE_UNSIGNED (type))))
4183 : {
4184 : /* Unsigned multiplication wraps, but libgcc function will return the
4185 : bits beyond prec within the top limb as another limb of the full
4186 : multiplication. So, clear the padding bits here. */
4187 0 : tree idx = size_int (bitint_big_endian ? 0 : prec / limb_prec);
4188 0 : tree l = limb_access (type, obj, idx, true);
4189 0 : tree ctype = limb_access_type (type, idx);
4190 0 : tree v = make_ssa_name (m_limb_type);
4191 0 : g = gimple_build_assign (v, l);
4192 0 : insert_before (g);
4193 0 : tree v2 = v;
4194 0 : if ((prec % limb_prec) != 0)
4195 : {
4196 0 : v = add_cast (ctype, v);
4197 0 : l = limb_access (type, obj, idx, true);
4198 0 : v = add_cast (m_limb_type, v);
4199 0 : v2 = v;
4200 0 : g = gimple_build_assign (l, v);
4201 0 : insert_before (g);
4202 : }
4203 0 : if (ext_ms_limb && !TYPE_UNSIGNED (type))
4204 : {
4205 0 : v2 = add_cast (signed_type_for (m_limb_type), v2);
4206 0 : tree lpm1 = build_int_cst (unsigned_type_node, limb_prec - 1);
4207 0 : v = make_ssa_name (TREE_TYPE (v2));
4208 0 : g = gimple_build_assign (v, RSHIFT_EXPR, v2, lpm1);
4209 0 : insert_before (g);
4210 0 : unsigned int i
4211 0 : = CEIL (prec, abi_limb_prec) * abi_limb_prec / limb_prec;
4212 0 : v = add_cast (m_limb_type, v);
4213 0 : g = gimple_build_assign (limb_access (type, obj, size_int (i - 1),
4214 : true), v);
4215 0 : insert_before (g);
4216 0 : ext_ms_limb = false;
4217 : }
4218 : }
4219 354 : if (ext_ms_limb)
4220 : {
4221 0 : unsigned int i = CEIL (prec, abi_limb_prec) * abi_limb_prec / limb_prec;
4222 0 : g = gimple_build_assign (limb_access (type, obj, size_int (i - 1), true),
4223 : build_zero_cst (m_limb_type));
4224 0 : insert_before (g);
4225 : }
4226 354 : }
4227 :
4228 : /* Lower large/huge _BitInt conversion to/from floating point. */
4229 :
4230 : void
4231 317 : bitint_large_huge::lower_float_conv_stmt (tree obj, gimple *stmt)
4232 : {
4233 317 : tree rhs1 = gimple_assign_rhs1 (stmt);
4234 317 : tree lhs = gimple_assign_lhs (stmt);
4235 317 : tree_code rhs_code = gimple_assign_rhs_code (stmt);
4236 317 : tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
4237 317 : gimple *g;
4238 317 : if (rhs_code == FIX_TRUNC_EXPR)
4239 : {
4240 179 : tree type = TREE_TYPE (lhs);
4241 179 : int prec = TYPE_PRECISION (type);
4242 179 : bool extend_ms_limb = false;
4243 179 : if (bitint_extended == bitint_ext_full
4244 0 : && abi_limb_prec > limb_prec
4245 0 : && (CEIL (prec, abi_limb_prec) * abi_limb_prec
4246 0 : > CEIL (prec, limb_prec) * limb_prec))
4247 179 : extend_ms_limb = true;
4248 179 : if (!TYPE_UNSIGNED (type))
4249 93 : prec = -prec;
4250 179 : if (obj == NULL_TREE)
4251 : {
4252 135 : int part = var_to_partition (m_map, lhs);
4253 135 : gcc_assert (m_vars[part] != NULL_TREE);
4254 135 : obj = m_vars[part];
4255 135 : lhs = build_fold_addr_expr (obj);
4256 : }
4257 : else
4258 : {
4259 44 : lhs = build_fold_addr_expr (obj);
4260 44 : lhs = force_gimple_operand_gsi (&m_gsi, lhs, true,
4261 : NULL_TREE, true, GSI_SAME_STMT);
4262 : }
4263 179 : scalar_mode from_mode
4264 179 : = as_a <scalar_mode> (TYPE_MODE (TREE_TYPE (rhs1)));
4265 : #ifdef HAVE_SFmode
4266 : /* IEEE single is a full superset of both IEEE half and
4267 : bfloat formats, convert to float first and then to _BitInt
4268 : to avoid the need of another 2 library routines. */
4269 179 : if ((REAL_MODE_FORMAT (from_mode) == &arm_bfloat_half_format
4270 179 : || REAL_MODE_FORMAT (from_mode) == &ieee_half_format)
4271 191 : && REAL_MODE_FORMAT (SFmode) == &ieee_single_format)
4272 : {
4273 12 : tree type = lang_hooks.types.type_for_mode (SFmode, 0);
4274 12 : if (type)
4275 12 : rhs1 = add_cast (type, rhs1);
4276 : }
4277 : #endif
4278 179 : g = gimple_build_call_internal (IFN_FLOATTOBITINT, 3,
4279 179 : lhs, build_int_cst (sitype, prec),
4280 : rhs1);
4281 179 : insert_before (g);
4282 179 : if (extend_ms_limb)
4283 : {
4284 0 : unsigned int i
4285 0 : = (CEIL (prec < 0 ? -prec : prec, abi_limb_prec)
4286 0 : * abi_limb_prec / limb_prec);
4287 0 : tree val;
4288 0 : if (prec < 0)
4289 : {
4290 0 : g = gimple_build_assign (make_ssa_name (m_limb_type),
4291 : limb_access (type, obj,
4292 0 : size_int (i - 2),
4293 : true));
4294 0 : insert_before (g);
4295 0 : val = add_cast (signed_type_for (m_limb_type),
4296 : gimple_assign_lhs (g));
4297 0 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (val)),
4298 : RSHIFT_EXPR, val,
4299 : build_int_cst (unsigned_type_node,
4300 0 : limb_prec - 1));
4301 0 : insert_before (g);
4302 0 : val = add_cast (m_limb_type, gimple_assign_lhs (g));
4303 : }
4304 : else
4305 0 : val = build_zero_cst (m_limb_type);
4306 0 : g = gimple_build_assign (limb_access (type, obj, size_int (i - 1),
4307 : true), val);
4308 0 : insert_before (g);
4309 : }
4310 : }
4311 : else
4312 : {
4313 138 : int prec;
4314 138 : rhs1 = handle_operand_addr (rhs1, stmt, NULL, &prec);
4315 138 : g = gimple_build_call_internal (IFN_BITINTTOFLOAT, 2,
4316 138 : rhs1, build_int_cst (sitype, prec));
4317 138 : gimple_call_set_lhs (g, lhs);
4318 138 : if (!stmt_ends_bb_p (stmt))
4319 137 : gimple_call_set_nothrow (as_a <gcall *> (g), true);
4320 138 : gsi_replace (&m_gsi, g, true);
4321 : }
4322 317 : }
4323 :
4324 : /* Helper method for lower_addsub_overflow and lower_mul_overflow.
4325 : If check_zero is true, caller wants to check if all bits in [start, end)
4326 : are zero, otherwise if bits in [start, end) are either all zero or
4327 : all ones. L is the limb with index LIMB, START and END are measured
4328 : in bits. */
4329 :
4330 : tree
4331 6423 : bitint_large_huge::arith_overflow_extract_bits (unsigned int start,
4332 : unsigned int end, tree l,
4333 : unsigned int limb,
4334 : bool check_zero)
4335 : {
4336 6423 : unsigned startlimb = start / limb_prec;
4337 6423 : unsigned endlimb = (end - 1) / limb_prec;
4338 6423 : gimple *g;
4339 :
4340 6423 : if ((start % limb_prec) == 0 && (end % limb_prec) == 0)
4341 : return l;
4342 6155 : if (startlimb == endlimb && limb == startlimb)
4343 : {
4344 2020 : if (check_zero)
4345 : {
4346 1479 : wide_int w = wi::shifted_mask (start % limb_prec,
4347 1479 : end - start, false, limb_prec);
4348 2958 : g = gimple_build_assign (make_ssa_name (m_limb_type),
4349 : BIT_AND_EXPR, l,
4350 1479 : wide_int_to_tree (m_limb_type, w));
4351 1479 : insert_before (g);
4352 1479 : return gimple_assign_lhs (g);
4353 1479 : }
4354 541 : unsigned int shift = start % limb_prec;
4355 541 : if ((end % limb_prec) != 0)
4356 : {
4357 336 : unsigned int lshift = (-end) % limb_prec;
4358 336 : shift += lshift;
4359 336 : g = gimple_build_assign (make_ssa_name (m_limb_type),
4360 : LSHIFT_EXPR, l,
4361 : build_int_cst (unsigned_type_node,
4362 336 : lshift));
4363 336 : insert_before (g);
4364 336 : l = gimple_assign_lhs (g);
4365 : }
4366 541 : l = add_cast (signed_type_for (m_limb_type), l);
4367 541 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
4368 : RSHIFT_EXPR, l,
4369 541 : build_int_cst (unsigned_type_node, shift));
4370 541 : insert_before (g);
4371 541 : return add_cast (m_limb_type, gimple_assign_lhs (g));
4372 : }
4373 4135 : else if (limb == startlimb)
4374 : {
4375 1981 : if ((start % limb_prec) == 0)
4376 : return l;
4377 1867 : if (!check_zero)
4378 941 : l = add_cast (signed_type_for (m_limb_type), l);
4379 1867 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
4380 : RSHIFT_EXPR, l,
4381 : build_int_cst (unsigned_type_node,
4382 1867 : start % limb_prec));
4383 1867 : insert_before (g);
4384 1867 : l = gimple_assign_lhs (g);
4385 1867 : if (!check_zero)
4386 941 : l = add_cast (m_limb_type, l);
4387 1867 : return l;
4388 : }
4389 2154 : else if (limb == endlimb)
4390 : {
4391 1703 : if ((end % limb_prec) == 0)
4392 : return l;
4393 1686 : if (check_zero)
4394 : {
4395 880 : wide_int w = wi::mask (end % limb_prec, false, limb_prec);
4396 1760 : g = gimple_build_assign (make_ssa_name (m_limb_type),
4397 : BIT_AND_EXPR, l,
4398 880 : wide_int_to_tree (m_limb_type, w));
4399 880 : insert_before (g);
4400 880 : return gimple_assign_lhs (g);
4401 880 : }
4402 806 : unsigned int shift = (-end) % limb_prec;
4403 806 : g = gimple_build_assign (make_ssa_name (m_limb_type),
4404 : LSHIFT_EXPR, l,
4405 806 : build_int_cst (unsigned_type_node, shift));
4406 806 : insert_before (g);
4407 806 : l = add_cast (signed_type_for (m_limb_type), gimple_assign_lhs (g));
4408 806 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
4409 : RSHIFT_EXPR, l,
4410 806 : build_int_cst (unsigned_type_node, shift));
4411 806 : insert_before (g);
4412 806 : return add_cast (m_limb_type, gimple_assign_lhs (g));
4413 : }
4414 : return l;
4415 : }
4416 :
4417 : /* Helper method for lower_addsub_overflow and lower_mul_overflow. Store
4418 : result including overflow flag into the right locations. */
4419 :
4420 : void
4421 4181 : bitint_large_huge::finish_arith_overflow (tree var, tree obj, tree type,
4422 : tree ovf, tree lhs, tree orig_obj,
4423 : gimple *stmt, unsigned nelts,
4424 : tree_code code)
4425 : {
4426 4181 : gimple *g;
4427 :
4428 4181 : if (obj == NULL_TREE
4429 4181 : && (TREE_CODE (type) != BITINT_TYPE
4430 231 : || bitint_precision_kind (type) < bitint_prec_large))
4431 : {
4432 : /* Add support for 3 or more limbs filled in from normal integral
4433 : type if this assert fails. If no target chooses limb mode smaller
4434 : than half of largest supported normal integral type, this will not
4435 : be needed. */
4436 247 : gcc_assert (TYPE_PRECISION (type) <= 2 * limb_prec);
4437 247 : tree lhs_type = type;
4438 247 : if (TREE_CODE (type) == BITINT_TYPE
4439 247 : && bitint_precision_kind (type) == bitint_prec_middle)
4440 46 : lhs_type = build_nonstandard_integer_type (TYPE_PRECISION (type),
4441 46 : TYPE_UNSIGNED (type));
4442 247 : tree r1 = limb_access (NULL_TREE, var,
4443 : bitint_big_endian
4444 0 : ? size_int (nelts - 1) : size_zero_node, true);
4445 247 : g = gimple_build_assign (make_ssa_name (m_limb_type), r1);
4446 247 : insert_before (g);
4447 247 : r1 = gimple_assign_lhs (g);
4448 247 : if (!useless_type_conversion_p (lhs_type, TREE_TYPE (r1)))
4449 247 : r1 = add_cast (lhs_type, r1);
4450 247 : if (TYPE_PRECISION (lhs_type) > limb_prec)
4451 : {
4452 90 : tree r2 = limb_access (NULL_TREE, var,
4453 : bitint_big_endian
4454 0 : ? size_int (nelts - 2) : size_one_node, true);
4455 90 : g = gimple_build_assign (make_ssa_name (m_limb_type), r2);
4456 90 : insert_before (g);
4457 90 : r2 = gimple_assign_lhs (g);
4458 90 : r2 = add_cast (lhs_type, r2);
4459 90 : g = gimple_build_assign (make_ssa_name (lhs_type), LSHIFT_EXPR, r2,
4460 : build_int_cst (unsigned_type_node,
4461 90 : limb_prec));
4462 90 : insert_before (g);
4463 90 : g = gimple_build_assign (make_ssa_name (lhs_type), BIT_IOR_EXPR, r1,
4464 : gimple_assign_lhs (g));
4465 90 : insert_before (g);
4466 90 : r1 = gimple_assign_lhs (g);
4467 : }
4468 247 : if (lhs_type != type)
4469 46 : r1 = add_cast (type, r1);
4470 247 : ovf = add_cast (lhs_type, ovf);
4471 247 : if (lhs_type != type)
4472 46 : ovf = add_cast (type, ovf);
4473 247 : g = gimple_build_assign (lhs, COMPLEX_EXPR, r1, ovf);
4474 247 : m_gsi = gsi_for_stmt (stmt);
4475 247 : gsi_replace (&m_gsi, g, true);
4476 : }
4477 : else
4478 : {
4479 3934 : unsigned HOST_WIDE_INT obj_nelts = 0;
4480 3934 : tree atype = NULL_TREE;
4481 3934 : if (obj)
4482 : {
4483 3843 : obj_nelts = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))) / limb_prec;
4484 3843 : if (orig_obj == NULL_TREE)
4485 2269 : obj_nelts >>= 1;
4486 3843 : atype = build_array_type_nelts (m_limb_type, obj_nelts);
4487 : }
4488 3934 : if (bitint_extended && (var || obj))
4489 : {
4490 0 : unsigned prec = TYPE_PRECISION (type);
4491 0 : unsigned prec_limbs = CEIL (prec, limb_prec);
4492 0 : bool ext_ms_limb
4493 : = (bitint_extended == bitint_ext_full
4494 0 : && abi_limb_prec > limb_prec
4495 0 : && (CEIL (prec, abi_limb_prec) * abi_limb_prec
4496 0 : > CEIL (prec, limb_prec) * limb_prec));
4497 : /* For .{ADD,SUB}_OVERFLOW the partial limb if any is
4498 : already extended in lower_addsub_overflow. */
4499 0 : if ((code == MULT_EXPR && (prec % limb_prec) != 0)
4500 0 : || (ext_ms_limb && !TYPE_UNSIGNED (type)))
4501 : {
4502 0 : tree plm1idx = size_int (bitint_big_endian
4503 : ? nelts - obj_nelts : prec_limbs - 1);
4504 0 : tree plm1type
4505 0 : = limb_access_type (type, bitint_big_endian
4506 : ? size_zero_node : plm1idx);
4507 0 : tree l = limb_access (bitint_big_endian ? NULL_TREE : type,
4508 : var ? var : obj, plm1idx, true);
4509 0 : tree rhs = make_ssa_name (TREE_TYPE (l));
4510 0 : g = gimple_build_assign (rhs, l);
4511 0 : insert_before (g);
4512 0 : if (code == MULT_EXPR && (prec % limb_prec) != 0)
4513 : {
4514 0 : if (!useless_type_conversion_p (plm1type, TREE_TYPE (rhs)))
4515 0 : rhs = add_cast (plm1type, rhs);
4516 0 : if (!useless_type_conversion_p (TREE_TYPE (l),
4517 0 : TREE_TYPE (rhs)))
4518 0 : rhs = add_cast (TREE_TYPE (l), rhs);
4519 0 : l = limb_access (bitint_big_endian ? NULL_TREE : type,
4520 : var ? var : obj, plm1idx, true);
4521 0 : g = gimple_build_assign (l, rhs);
4522 0 : insert_before (g);
4523 : }
4524 0 : if (ext_ms_limb && !TYPE_UNSIGNED (type))
4525 : {
4526 0 : rhs = add_cast (signed_type_for (m_limb_type), rhs);
4527 0 : tree lpm1 = build_int_cst (unsigned_type_node,
4528 0 : limb_prec - 1);
4529 0 : tree v = make_ssa_name (TREE_TYPE (rhs));
4530 0 : g = gimple_build_assign (v, RSHIFT_EXPR, rhs, lpm1);
4531 0 : insert_before (g);
4532 0 : unsigned int i
4533 0 : = CEIL (prec, abi_limb_prec) * abi_limb_prec / limb_prec;
4534 0 : v = add_cast (m_limb_type, v);
4535 0 : g = gimple_build_assign (limb_access (type, var ? var : obj,
4536 0 : size_int (i - 1),
4537 : true), v);
4538 0 : insert_before (g);
4539 0 : ext_ms_limb = false;
4540 : }
4541 : }
4542 0 : if (ext_ms_limb)
4543 : {
4544 0 : unsigned int i
4545 0 : = CEIL (prec, abi_limb_prec) * abi_limb_prec / limb_prec;
4546 0 : g = gimple_build_assign (limb_access (type, var ? var : obj,
4547 0 : size_int (i - 1), true),
4548 : build_zero_cst (m_limb_type));
4549 0 : insert_before (g);
4550 : }
4551 : }
4552 3934 : if (var && obj)
4553 : {
4554 512 : tree v1, v2;
4555 512 : tree off;
4556 512 : if (orig_obj == NULL_TREE)
4557 : {
4558 32 : off = build_zero_cst (build_pointer_type (TREE_TYPE (obj)));
4559 32 : v1 = build2 (MEM_REF, atype,
4560 : build_fold_addr_expr (unshare_expr (obj)), off);
4561 : }
4562 480 : else if (!useless_type_conversion_p (atype, TREE_TYPE (obj)))
4563 8 : v1 = build1 (VIEW_CONVERT_EXPR, atype, unshare_expr (obj));
4564 : else
4565 472 : v1 = unshare_expr (obj);
4566 512 : off = build_int_cst (build_pointer_type (TREE_TYPE (var)),
4567 : bitint_big_endian
4568 512 : ? (nelts - obj_nelts) * m_limb_size : 0);
4569 512 : v2 = build2 (MEM_REF, atype, build_fold_addr_expr (var), off);
4570 512 : g = gimple_build_assign (v1, v2);
4571 512 : insert_before (g);
4572 : }
4573 3422 : else if (obj && bitint_big_endian && nelts != obj_nelts)
4574 : {
4575 0 : gcc_assert (nelts > obj_nelts);
4576 0 : tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
4577 0 : tree off = build_int_cst (build_pointer_type (TREE_TYPE (obj)),
4578 0 : (nelts - obj_nelts) * m_limb_size);
4579 0 : tree src = build2 (MEM_REF, atype,
4580 : build_fold_addr_expr (unshare_expr (obj)), off);
4581 0 : g = gimple_build_call (fn, 3,
4582 : build_fold_addr_expr (unshare_expr (obj)),
4583 : src, build_int_cst (size_type_node,
4584 0 : obj_nelts * m_limb_size));
4585 0 : insert_before (g);
4586 : }
4587 3934 : if (orig_obj == NULL_TREE && obj)
4588 : {
4589 2269 : ovf = add_cast (m_limb_type, ovf);
4590 2269 : tree l = limb_access (NULL_TREE, obj,
4591 2269 : size_int (bitint_big_endian
4592 : ? obj_nelts * 2 - 1 : obj_nelts),
4593 : true);
4594 2269 : g = gimple_build_assign (l, ovf);
4595 2269 : insert_before (g);
4596 2269 : if (obj_nelts > 1)
4597 : {
4598 2269 : atype = build_array_type_nelts (m_limb_type, obj_nelts - 1);
4599 2269 : tree off = build_int_cst (build_pointer_type (TREE_TYPE (obj)),
4600 2269 : (obj_nelts + !bitint_big_endian)
4601 2269 : * m_limb_size);
4602 2269 : tree v1 = build2 (MEM_REF, atype,
4603 : build_fold_addr_expr (unshare_expr (obj)),
4604 : off);
4605 2269 : g = gimple_build_assign (v1, build_zero_cst (atype));
4606 2269 : insert_before (g);
4607 : }
4608 : }
4609 1665 : else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE)
4610 : {
4611 1638 : imm_use_iterator ui;
4612 1638 : use_operand_p use_p;
4613 1638 : FOR_EACH_IMM_USE_FAST (use_p, ui, lhs)
4614 : {
4615 1638 : g = USE_STMT (use_p);
4616 1638 : if (!is_gimple_assign (g)
4617 1638 : || gimple_assign_rhs_code (g) != IMAGPART_EXPR)
4618 0 : continue;
4619 1638 : tree lhs2 = gimple_assign_lhs (g);
4620 1638 : gimple *use_stmt;
4621 1638 : single_imm_use (lhs2, &use_p, &use_stmt);
4622 1638 : lhs2 = gimple_assign_lhs (use_stmt);
4623 1638 : gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
4624 1638 : if (useless_type_conversion_p (TREE_TYPE (lhs2), TREE_TYPE (ovf)))
4625 1617 : g = gimple_build_assign (lhs2, ovf);
4626 : else
4627 21 : g = gimple_build_assign (lhs2, NOP_EXPR, ovf);
4628 1638 : gsi_replace (&gsi, g, true);
4629 1638 : if (gsi_stmt (m_gsi) == use_stmt)
4630 91 : m_gsi = gsi_for_stmt (g);
4631 1638 : break;
4632 1638 : }
4633 : }
4634 27 : else if (ovf != boolean_false_node)
4635 : {
4636 27 : g = gimple_build_cond (NE_EXPR, ovf, boolean_false_node,
4637 : NULL_TREE, NULL_TREE);
4638 27 : edge edge_true, edge_false;
4639 27 : if_then (g, profile_probability::very_unlikely (),
4640 : edge_true, edge_false);
4641 27 : tree zero = build_zero_cst (TREE_TYPE (lhs));
4642 27 : tree fn = ubsan_build_overflow_builtin (code, m_loc,
4643 27 : TREE_TYPE (lhs),
4644 : zero, zero, NULL);
4645 27 : force_gimple_operand_gsi (&m_gsi, fn, true, NULL_TREE,
4646 : true, GSI_SAME_STMT);
4647 27 : m_gsi = gsi_after_labels (edge_true->dest);
4648 : }
4649 : }
4650 4181 : if (var)
4651 : {
4652 771 : tree clobber = build_clobber (TREE_TYPE (var), CLOBBER_STORAGE_END);
4653 771 : g = gimple_build_assign (var, clobber);
4654 771 : gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
4655 : }
4656 4181 : }
4657 :
4658 : /* Helper function for lower_addsub_overflow and lower_mul_overflow.
4659 : Given precisions of result TYPE (PREC), argument 0 precision PREC0,
4660 : argument 1 precision PREC1 and minimum precision for the result
4661 : PREC2, compute *START, *END, *CHECK_ZERO and return OVF. */
4662 :
4663 : static tree
4664 4181 : arith_overflow (tree_code code, tree type, int prec, int prec0, int prec1,
4665 : int prec2, unsigned *start, unsigned *end, bool *check_zero)
4666 : {
4667 4181 : *start = 0;
4668 4181 : *end = 0;
4669 4181 : *check_zero = true;
4670 : /* Ignore this special rule for subtraction, even if both
4671 : prec0 >= 0 and prec1 >= 0, their subtraction can be negative
4672 : in infinite precision. */
4673 4181 : if (code != MINUS_EXPR && prec0 >= 0 && prec1 >= 0)
4674 : {
4675 : /* Result in [0, prec2) is unsigned, if prec > prec2,
4676 : all bits above it will be zero. */
4677 681 : if ((prec - !TYPE_UNSIGNED (type)) >= prec2)
4678 0 : return boolean_false_node;
4679 : else
4680 : {
4681 : /* ovf if any of bits in [start, end) is non-zero. */
4682 681 : *start = prec - !TYPE_UNSIGNED (type);
4683 681 : *end = prec2;
4684 : }
4685 : }
4686 3500 : else if (TYPE_UNSIGNED (type))
4687 : {
4688 : /* If result in [0, prec2) is signed and if prec > prec2,
4689 : all bits above it will be sign bit copies. */
4690 1942 : if (prec >= prec2)
4691 : {
4692 : /* ovf if bit prec - 1 is non-zero. */
4693 184 : *start = prec - 1;
4694 184 : *end = prec;
4695 : }
4696 : else
4697 : {
4698 : /* ovf if any of bits in [start, end) is non-zero. */
4699 1758 : *start = prec;
4700 1758 : *end = prec2;
4701 : }
4702 : }
4703 1558 : else if (prec >= prec2)
4704 0 : return boolean_false_node;
4705 : else
4706 : {
4707 : /* ovf if [start, end) bits aren't all zeros or all ones. */
4708 1558 : *start = prec - 1;
4709 1558 : *end = prec2;
4710 1558 : *check_zero = false;
4711 : }
4712 : return NULL_TREE;
4713 : }
4714 :
4715 : /* Lower a .{ADD,SUB}_OVERFLOW call with at least one large/huge _BitInt
4716 : argument or return type _Complex large/huge _BitInt. */
4717 :
4718 : void
4719 2721 : bitint_large_huge::lower_addsub_overflow (tree obj, gimple *stmt)
4720 : {
4721 2721 : tree arg0 = gimple_call_arg (stmt, 0);
4722 2721 : tree arg1 = gimple_call_arg (stmt, 1);
4723 2721 : tree lhs = gimple_call_lhs (stmt);
4724 2721 : gimple *g;
4725 :
4726 2721 : if (!lhs)
4727 : {
4728 0 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4729 0 : gsi_remove (&gsi, true);
4730 0 : return;
4731 : }
4732 2721 : gimple *final_stmt = gsi_stmt (m_gsi);
4733 2721 : tree type = TREE_TYPE (lhs);
4734 2721 : if (TREE_CODE (type) == COMPLEX_TYPE)
4735 2703 : type = TREE_TYPE (type);
4736 2721 : int prec = TYPE_PRECISION (type);
4737 2721 : int prec0 = range_to_prec (arg0, stmt);
4738 2721 : int prec1 = range_to_prec (arg1, stmt);
4739 : /* If PREC0 >= 0 && PREC1 >= 0 and CODE is not MINUS_EXPR, PREC2 is
4740 : the minimum unsigned precision of any possible operation's
4741 : result, otherwise it is minimum signed precision.
4742 : Some examples:
4743 : If PREC0 or PREC1 is 8, it means that argument is [0, 0xff],
4744 : if PREC0 or PREC1 is 10, it means that argument is [0, 0x3ff],
4745 : if PREC0 or PREC1 is -8, it means that argument is [-0x80, 0x7f],
4746 : if PREC0 or PREC1 is -10, it means that argument is [-0x200, 0x1ff].
4747 : PREC0 CODE PREC1 RESULT PREC2 SIGNED vs. UNSIGNED
4748 : 8 + 8 [0, 0x1fe] 9 UNSIGNED
4749 : 8 + 10 [0, 0x4fe] 11 UNSIGNED
4750 : -8 + -8 [-0x100, 0xfe] 9 SIGNED
4751 : -8 + -10 [-0x280, 0x27e] 11 SIGNED
4752 : 8 + -8 [-0x80, 0x17e] 10 SIGNED
4753 : 8 + -10 [-0x200, 0x2fe] 11 SIGNED
4754 : 10 + -8 [-0x80, 0x47e] 12 SIGNED
4755 : 8 - 8 [-0xff, 0xff] 9 SIGNED
4756 : 8 - 10 [-0x3ff, 0xff] 11 SIGNED
4757 : 10 - 8 [-0xff, 0x3ff] 11 SIGNED
4758 : -8 - -8 [-0xff, 0xff] 9 SIGNED
4759 : -8 - -10 [-0x27f, 0x27f] 11 SIGNED
4760 : -10 - -8 [-0x27f, 0x27f] 11 SIGNED
4761 : 8 - -8 [-0x7f, 0x17f] 10 SIGNED
4762 : 8 - -10 [-0x1ff, 0x2ff] 11 SIGNED
4763 : 10 - -8 [-0x7f, 0x47f] 12 SIGNED
4764 : -8 - 8 [-0x17f, 0x7f] 10 SIGNED
4765 : -8 - 10 [-0x47f, 0x7f] 12 SIGNED
4766 : -10 - 8 [-0x2ff, 0x1ff] 11 SIGNED */
4767 2721 : int prec2 = MAX (prec0 < 0 ? -prec0 : prec0,
4768 : prec1 < 0 ? -prec1 : prec1);
4769 : /* If operands are either both signed or both unsigned,
4770 : we need just one additional bit. */
4771 3760 : prec2 = (((prec0 < 0) == (prec1 < 0)
4772 : /* If one operand is signed and one unsigned and
4773 : the signed one has larger precision, we need
4774 : just one extra bit, otherwise two. */
4775 702 : || (prec0 < 0 ? (prec2 == -prec0 && prec2 != prec1)
4776 337 : : (prec2 == -prec1 && prec2 != prec0)))
4777 2721 : ? prec2 + 1 : prec2 + 2);
4778 2721 : int prec3 = MAX (prec0 < 0 ? -prec0 : prec0,
4779 : prec1 < 0 ? -prec1 : prec1);
4780 2721 : prec3 = MAX (prec3, prec);
4781 2721 : tree var = NULL_TREE;
4782 2721 : tree orig_obj = obj;
4783 2721 : if (obj == NULL_TREE
4784 1741 : && TREE_CODE (type) == BITINT_TYPE
4785 1640 : && bitint_precision_kind (type) >= bitint_prec_large
4786 1524 : && m_names
4787 4221 : && bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
4788 : {
4789 1445 : int part = var_to_partition (m_map, lhs);
4790 1445 : gcc_assert (m_vars[part] != NULL_TREE);
4791 1445 : obj = m_vars[part];
4792 1445 : if (TREE_TYPE (lhs) == type)
4793 2 : orig_obj = obj;
4794 : }
4795 2721 : if (TREE_CODE (type) != BITINT_TYPE
4796 2721 : || bitint_precision_kind (type) < bitint_prec_large)
4797 : {
4798 217 : unsigned HOST_WIDE_INT nelts = CEIL (prec, limb_prec);
4799 217 : tree atype = build_array_type_nelts (m_limb_type, nelts);
4800 217 : var = create_tmp_var (atype);
4801 : }
4802 :
4803 2721 : enum tree_code code;
4804 2721 : switch (gimple_call_internal_fn (stmt))
4805 : {
4806 : case IFN_ADD_OVERFLOW:
4807 : case IFN_UBSAN_CHECK_ADD:
4808 : code = PLUS_EXPR;
4809 : break;
4810 1393 : case IFN_SUB_OVERFLOW:
4811 1393 : case IFN_UBSAN_CHECK_SUB:
4812 1393 : code = MINUS_EXPR;
4813 1393 : break;
4814 0 : default:
4815 0 : gcc_unreachable ();
4816 : }
4817 2721 : unsigned start, end;
4818 2721 : bool check_zero;
4819 2721 : tree ovf = arith_overflow (code, type, prec, prec0, prec1, prec2,
4820 : &start, &end, &check_zero);
4821 :
4822 2721 : unsigned startlimb, endlimb;
4823 2721 : if (ovf)
4824 : {
4825 : startlimb = ~0U;
4826 : endlimb = ~0U;
4827 : }
4828 : else
4829 : {
4830 2721 : startlimb = start / limb_prec;
4831 2721 : endlimb = (end - 1) / limb_prec;
4832 : }
4833 :
4834 2721 : int prec4 = ovf != NULL_TREE ? prec : prec3;
4835 2721 : bitint_prec_kind kind = bitint_precision_kind (prec4);
4836 2721 : unsigned cnt, rem = 0, fin = 0, nelts;
4837 2721 : tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
4838 5442 : bool last_ovf = (ovf == NULL_TREE
4839 2721 : && CEIL (prec2, limb_prec) > CEIL (prec3, limb_prec));
4840 2721 : if (kind != bitint_prec_huge)
4841 1539 : nelts = cnt = CEIL (prec4, limb_prec) + last_ovf;
4842 : else
4843 : {
4844 1182 : rem = prec4 % (2 * limb_prec);
4845 1182 : fin = (prec4 - rem) / limb_prec;
4846 1182 : cnt = 2 + CEIL (rem, limb_prec) + last_ovf;
4847 1182 : nelts = fin + cnt - 2;
4848 1182 : idx = idx_first = create_loop (bitint_big_endian
4849 1182 : ? size_int (nelts - 1) : size_zero_node,
4850 : &idx_next);
4851 : }
4852 :
4853 2721 : if (kind == bitint_prec_huge)
4854 1182 : m_upwards_2limb = fin;
4855 2721 : m_upwards = true;
4856 :
4857 2721 : tree type0 = TREE_TYPE (arg0);
4858 2721 : tree type1 = TREE_TYPE (arg1);
4859 2721 : int prec5 = prec3;
4860 2721 : if (bitint_precision_kind (prec5) < bitint_prec_large)
4861 10 : prec5 = MAX (TYPE_PRECISION (type0), TYPE_PRECISION (type1));
4862 2721 : if (TYPE_PRECISION (type0) < prec5)
4863 : {
4864 146 : type0 = build_bitint_type (prec5, TYPE_UNSIGNED (type0));
4865 146 : if (TREE_CODE (arg0) == INTEGER_CST)
4866 27 : arg0 = fold_convert (type0, arg0);
4867 : }
4868 2721 : if (TYPE_PRECISION (type1) < prec5)
4869 : {
4870 156 : type1 = build_bitint_type (prec5, TYPE_UNSIGNED (type1));
4871 156 : if (TREE_CODE (arg1) == INTEGER_CST)
4872 76 : arg1 = fold_convert (type1, arg1);
4873 : }
4874 2721 : unsigned int data_cnt = 0;
4875 2721 : tree last_rhs1 = NULL_TREE, last_rhs2 = NULL_TREE;
4876 2721 : tree cmp = build_zero_cst (m_limb_type);
4877 2721 : unsigned prec_limbs = CEIL ((unsigned) prec, limb_prec);
4878 2721 : tree ovf_out = NULL_TREE, cmp_out = NULL_TREE;
4879 11856 : for (unsigned i = 0; i < cnt; i++)
4880 : {
4881 9135 : m_data_cnt = 0;
4882 9135 : tree rhs1, rhs2;
4883 9135 : if (kind != bitint_prec_huge)
4884 5303 : idx = size_int (bitint_big_endian ? nelts - 1 - i : i);
4885 3832 : else if (i >= 2)
4886 1468 : idx = size_int (bitint_big_endian ? nelts + 1 - fin - i : fin + i - 2);
4887 9135 : if (!last_ovf || i < cnt - 1)
4888 : {
4889 8189 : tree idx0 = idx, idx1 = idx;
4890 8189 : if (bitint_big_endian
4891 8189 : && CEIL ((unsigned) TYPE_PRECISION (type0), limb_prec) != nelts)
4892 : {
4893 0 : HOST_WIDE_INT diff
4894 0 : = ((HOST_WIDE_INT) CEIL (TYPE_PRECISION (type0), limb_prec)
4895 0 : - (HOST_WIDE_INT) nelts);
4896 0 : if (tree_fits_uhwi_p (idx))
4897 0 : idx0 = size_int (tree_to_uhwi (idx) + diff);
4898 : else
4899 : {
4900 0 : idx0 = make_ssa_name (sizetype);
4901 0 : g = gimple_build_assign (idx0, PLUS_EXPR, idx,
4902 0 : size_int (diff));
4903 0 : insert_before (g);
4904 : }
4905 : }
4906 8189 : if (type0 != TREE_TYPE (arg0))
4907 334 : rhs1 = handle_cast (type0, arg0, idx0);
4908 : else
4909 7855 : rhs1 = handle_operand (arg0, idx0);
4910 8189 : if (bitint_big_endian
4911 8189 : && CEIL ((unsigned) TYPE_PRECISION (type1), limb_prec) != nelts)
4912 : {
4913 0 : HOST_WIDE_INT diff
4914 0 : = ((HOST_WIDE_INT) CEIL (TYPE_PRECISION (type1), limb_prec)
4915 0 : - (HOST_WIDE_INT) nelts);
4916 0 : if (tree_fits_uhwi_p (idx))
4917 0 : idx1 = size_int (tree_to_uhwi (idx) + diff);
4918 : else
4919 : {
4920 0 : idx1 = make_ssa_name (sizetype);
4921 0 : g = gimple_build_assign (idx1, PLUS_EXPR, idx,
4922 0 : size_int (diff));
4923 0 : insert_before (g);
4924 : }
4925 : }
4926 8189 : if (type1 != TREE_TYPE (arg1))
4927 250 : rhs2 = handle_cast (type1, arg1, idx1);
4928 : else
4929 7939 : rhs2 = handle_operand (arg1, idx1);
4930 8189 : if (i == 0)
4931 2721 : data_cnt = m_data_cnt;
4932 8189 : if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4933 1971 : rhs1 = add_cast (m_limb_type, rhs1);
4934 8189 : if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs2)))
4935 1971 : rhs2 = add_cast (m_limb_type, rhs2);
4936 : last_rhs1 = rhs1;
4937 : last_rhs2 = rhs2;
4938 : }
4939 : else
4940 : {
4941 946 : m_data_cnt = data_cnt;
4942 946 : if (TYPE_UNSIGNED (type0) || prec0 >= 0)
4943 421 : rhs1 = build_zero_cst (m_limb_type);
4944 : else
4945 : {
4946 525 : rhs1 = add_cast (signed_type_for (m_limb_type), last_rhs1);
4947 525 : if (TREE_CODE (rhs1) == INTEGER_CST)
4948 52 : rhs1 = build_int_cst (m_limb_type,
4949 74 : tree_int_cst_sgn (rhs1) < 0 ? -1 : 0);
4950 : else
4951 : {
4952 946 : tree lpm1 = build_int_cst (unsigned_type_node,
4953 473 : limb_prec - 1);
4954 473 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
4955 : RSHIFT_EXPR, rhs1, lpm1);
4956 473 : insert_before (g);
4957 473 : rhs1 = add_cast (m_limb_type, gimple_assign_lhs (g));
4958 : }
4959 : }
4960 946 : if (TYPE_UNSIGNED (type1) || prec1 >= 0)
4961 543 : rhs2 = build_zero_cst (m_limb_type);
4962 : else
4963 : {
4964 403 : rhs2 = add_cast (signed_type_for (m_limb_type), last_rhs2);
4965 403 : if (TREE_CODE (rhs2) == INTEGER_CST)
4966 114 : rhs2 = build_int_cst (m_limb_type,
4967 153 : tree_int_cst_sgn (rhs2) < 0 ? -1 : 0);
4968 : else
4969 : {
4970 578 : tree lpm1 = build_int_cst (unsigned_type_node,
4971 289 : limb_prec - 1);
4972 289 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs2)),
4973 : RSHIFT_EXPR, rhs2, lpm1);
4974 289 : insert_before (g);
4975 289 : rhs2 = add_cast (m_limb_type, gimple_assign_lhs (g));
4976 : }
4977 : }
4978 : }
4979 9135 : tree rhs = handle_plus_minus (code, rhs1, rhs2, idx);
4980 9135 : if (ovf != boolean_false_node)
4981 : {
4982 9135 : if (tree_fits_uhwi_p (idx))
4983 : {
4984 6771 : unsigned limb = tree_to_uhwi (idx);
4985 6771 : if (bitint_big_endian)
4986 0 : limb = nelts - 1 - limb;
4987 6771 : if (limb >= startlimb && limb <= endlimb)
4988 : {
4989 3358 : tree l = arith_overflow_extract_bits (start, end, rhs,
4990 : limb, check_zero);
4991 3358 : tree this_ovf = make_ssa_name (boolean_type_node);
4992 3358 : if (ovf == NULL_TREE && !check_zero)
4993 : {
4994 895 : cmp = l;
4995 895 : g = gimple_build_assign (make_ssa_name (m_limb_type),
4996 : PLUS_EXPR, l,
4997 : build_int_cst (m_limb_type, 1));
4998 895 : insert_before (g);
4999 895 : g = gimple_build_assign (this_ovf, GT_EXPR,
5000 : gimple_assign_lhs (g),
5001 : build_int_cst (m_limb_type, 1));
5002 : }
5003 : else
5004 2463 : g = gimple_build_assign (this_ovf, NE_EXPR, l, cmp);
5005 3358 : insert_before (g);
5006 3358 : if (ovf == NULL_TREE)
5007 : ovf = this_ovf;
5008 : else
5009 : {
5010 1064 : tree b = make_ssa_name (boolean_type_node);
5011 1064 : g = gimple_build_assign (b, BIT_IOR_EXPR, ovf, this_ovf);
5012 1064 : insert_before (g);
5013 1064 : ovf = b;
5014 : }
5015 : }
5016 : }
5017 2364 : else if (startlimb < fin)
5018 : {
5019 854 : if (m_first && startlimb + 2 < fin)
5020 : {
5021 324 : tree data_out;
5022 324 : ovf = prepare_data_in_out (boolean_false_node, idx, &data_out);
5023 324 : ovf_out = m_data.pop ();
5024 324 : m_data.pop ();
5025 324 : if (!check_zero)
5026 : {
5027 169 : cmp = prepare_data_in_out (cmp, idx, &data_out);
5028 169 : cmp_out = m_data.pop ();
5029 169 : m_data.pop ();
5030 : }
5031 : }
5032 854 : if (i != 0 || startlimb != fin - 1)
5033 : {
5034 839 : tree_code cmp_code;
5035 839 : bool single_comparison
5036 839 : = (startlimb + 2 >= fin || (startlimb & 1) != (i & 1));
5037 : if (!single_comparison)
5038 : cmp_code = GE_EXPR;
5039 515 : else if ((startlimb & 1) == (i & 1))
5040 : cmp_code = EQ_EXPR;
5041 : else
5042 412 : cmp_code = GT_EXPR;
5043 839 : if (bitint_big_endian)
5044 0 : g = gimple_build_cond (swap_tree_comparison (cmp_code),
5045 0 : idx, size_int (nelts - 1
5046 : - startlimb),
5047 : NULL_TREE, NULL_TREE);
5048 : else
5049 839 : g = gimple_build_cond (cmp_code, idx, size_int (startlimb),
5050 : NULL_TREE, NULL_TREE);
5051 839 : edge edge_true_true, edge_true_false, edge_false;
5052 839 : gimple *g2 = NULL;
5053 839 : if (!single_comparison)
5054 324 : g2 = gimple_build_cond (NE_EXPR, idx,
5055 324 : size_int (bitint_big_endian
5056 : ? nelts - 1 - startlimb
5057 : : startlimb),
5058 : NULL_TREE, NULL_TREE);
5059 839 : if_then_if_then_else (g, g2, profile_probability::likely (),
5060 : profile_probability::likely (),
5061 : edge_true_true, edge_true_false,
5062 : edge_false);
5063 839 : unsigned tidx = startlimb + (cmp_code == GT_EXPR);
5064 839 : tree l = arith_overflow_extract_bits (start, end, rhs, tidx,
5065 : check_zero);
5066 839 : tree this_ovf = make_ssa_name (boolean_type_node);
5067 839 : if (cmp_code != GT_EXPR && !check_zero)
5068 : {
5069 173 : g = gimple_build_assign (make_ssa_name (m_limb_type),
5070 : PLUS_EXPR, l,
5071 : build_int_cst (m_limb_type, 1));
5072 173 : insert_before (g);
5073 173 : g = gimple_build_assign (this_ovf, GT_EXPR,
5074 : gimple_assign_lhs (g),
5075 : build_int_cst (m_limb_type, 1));
5076 : }
5077 : else
5078 666 : g = gimple_build_assign (this_ovf, NE_EXPR, l, cmp);
5079 839 : insert_before (g);
5080 839 : if (cmp_code == GT_EXPR)
5081 : {
5082 412 : tree t = make_ssa_name (boolean_type_node);
5083 412 : g = gimple_build_assign (t, BIT_IOR_EXPR, ovf, this_ovf);
5084 412 : insert_before (g);
5085 412 : this_ovf = t;
5086 : }
5087 839 : tree this_ovf2 = NULL_TREE;
5088 839 : if (!single_comparison)
5089 : {
5090 324 : m_gsi = gsi_after_labels (edge_true_true->src);
5091 324 : tree t = make_ssa_name (boolean_type_node);
5092 324 : g = gimple_build_assign (t, NE_EXPR, rhs, cmp);
5093 324 : insert_before (g);
5094 324 : this_ovf2 = make_ssa_name (boolean_type_node);
5095 324 : g = gimple_build_assign (this_ovf2, BIT_IOR_EXPR,
5096 : ovf, t);
5097 324 : insert_before (g);
5098 : }
5099 839 : m_gsi = gsi_after_labels (edge_true_false->dest);
5100 839 : tree t;
5101 839 : if (i == 1 && ovf_out)
5102 : t = ovf_out;
5103 : else
5104 515 : t = make_ssa_name (boolean_type_node);
5105 839 : gphi *phi = create_phi_node (t, edge_true_false->dest);
5106 839 : add_phi_arg (phi, this_ovf, edge_true_false,
5107 : UNKNOWN_LOCATION);
5108 839 : add_phi_arg (phi, ovf ? ovf
5109 : : boolean_false_node, edge_false,
5110 : UNKNOWN_LOCATION);
5111 839 : if (edge_true_true)
5112 324 : add_phi_arg (phi, this_ovf2, edge_true_true,
5113 : UNKNOWN_LOCATION);
5114 839 : ovf = t;
5115 839 : if (!check_zero && cmp_code != GT_EXPR)
5116 : {
5117 173 : t = cmp_out ? cmp_out : make_ssa_name (m_limb_type);
5118 173 : phi = create_phi_node (t, edge_true_false->dest);
5119 173 : add_phi_arg (phi, l, edge_true_false, UNKNOWN_LOCATION);
5120 173 : add_phi_arg (phi, cmp, edge_false, UNKNOWN_LOCATION);
5121 173 : if (edge_true_true)
5122 169 : add_phi_arg (phi, cmp, edge_true_true,
5123 : UNKNOWN_LOCATION);
5124 : cmp = t;
5125 : }
5126 : }
5127 : }
5128 : }
5129 :
5130 9135 : if (var || obj)
5131 : {
5132 8907 : if (tree_fits_uhwi_p (idx)
5133 6661 : && (bitint_big_endian
5134 6661 : ? nelts - 1 - tree_to_uhwi (idx)
5135 6661 : : tree_to_uhwi (idx)) >= prec_limbs)
5136 : ;
5137 7519 : else if (!tree_fits_uhwi_p (idx)
5138 2246 : && (unsigned) prec < (fin - (i == 0)) * limb_prec)
5139 : {
5140 1458 : bool single_comparison
5141 729 : = (((unsigned) prec % limb_prec) == 0
5142 571 : || prec_limbs + 1 >= fin
5143 1231 : || (prec_limbs & 1) == (i & 1));
5144 729 : if (bitint_big_endian)
5145 0 : g = gimple_build_cond (GE_EXPR, idx,
5146 0 : size_int (nelts - prec_limbs),
5147 : NULL_TREE, NULL_TREE);
5148 : else
5149 729 : g = gimple_build_cond (LE_EXPR, idx, size_int (prec_limbs - 1),
5150 : NULL_TREE, NULL_TREE);
5151 729 : gimple *g2 = NULL;
5152 729 : if (!single_comparison)
5153 251 : g2 = gimple_build_cond (EQ_EXPR, idx,
5154 251 : size_int (bitint_big_endian
5155 : ? nelts - prec_limbs
5156 : : prec_limbs - 1),
5157 : NULL_TREE, NULL_TREE);
5158 729 : edge edge_true_true, edge_true_false, edge_false;
5159 729 : if_then_if_then_else (g, g2, profile_probability::likely (),
5160 : profile_probability::unlikely (),
5161 : edge_true_true, edge_true_false,
5162 : edge_false);
5163 729 : tree idxl = idx;
5164 729 : if (bitint_big_endian && prec_limbs != nelts)
5165 : {
5166 0 : HOST_WIDE_INT diff = ((HOST_WIDE_INT) prec_limbs
5167 0 : - (HOST_WIDE_INT) nelts);
5168 0 : if (tree_fits_uhwi_p (idx))
5169 0 : idxl = size_int (tree_to_uhwi (idx) + diff);
5170 : else
5171 : {
5172 0 : idxl = make_ssa_name (sizetype);
5173 0 : g = gimple_build_assign (idxl, PLUS_EXPR, idx,
5174 0 : size_int (diff));
5175 0 : insert_before (g);
5176 : }
5177 : }
5178 1082 : tree l = limb_access (type, var ? var : obj, idxl, true);
5179 729 : g = gimple_build_assign (l, rhs);
5180 729 : insert_before (g);
5181 729 : if (!single_comparison)
5182 : {
5183 251 : m_gsi = gsi_after_labels (edge_true_true->src);
5184 251 : tree plm1idx = size_int (bitint_big_endian
5185 : ? 0 : prec_limbs - 1);
5186 251 : tree plm1type = limb_access_type (type, plm1idx);
5187 251 : l = limb_access (type, var ? var : obj, plm1idx, true);
5188 251 : if (!useless_type_conversion_p (plm1type, TREE_TYPE (rhs)))
5189 251 : rhs = add_cast (plm1type, rhs);
5190 251 : if (!useless_type_conversion_p (TREE_TYPE (l),
5191 251 : TREE_TYPE (rhs)))
5192 251 : rhs = add_cast (TREE_TYPE (l), rhs);
5193 251 : g = gimple_build_assign (l, rhs);
5194 251 : insert_before (g);
5195 : }
5196 729 : m_gsi = gsi_after_labels (edge_true_false->dest);
5197 729 : }
5198 : else
5199 : {
5200 6790 : tree idxl = idx;
5201 6790 : if (bitint_big_endian && prec_limbs != nelts)
5202 : {
5203 0 : HOST_WIDE_INT diff = ((HOST_WIDE_INT) prec_limbs
5204 0 : - (HOST_WIDE_INT) nelts);
5205 0 : if (tree_fits_uhwi_p (idx))
5206 0 : idxl = size_int (tree_to_uhwi (idx) + diff);
5207 : else
5208 : {
5209 0 : idxl = make_ssa_name (sizetype);
5210 0 : g = gimple_build_assign (idxl, PLUS_EXPR, idx,
5211 0 : size_int (diff));
5212 0 : insert_before (g);
5213 : }
5214 : }
5215 13551 : tree l = limb_access (type, var ? var : obj, idxl, true);
5216 6790 : if (bitint_extended && tree_fits_uhwi_p (idxl))
5217 : {
5218 0 : tree atype = limb_access_type (type, idxl);
5219 0 : if (!useless_type_conversion_p (atype, TREE_TYPE (rhs)))
5220 0 : rhs = add_cast (atype, rhs);
5221 : }
5222 6790 : if (!useless_type_conversion_p (TREE_TYPE (l), TREE_TYPE (rhs)))
5223 0 : rhs = add_cast (TREE_TYPE (l), rhs);
5224 6790 : g = gimple_build_assign (l, rhs);
5225 6790 : insert_before (g);
5226 : }
5227 : }
5228 9135 : m_first = false;
5229 9135 : if (kind == bitint_prec_huge && i <= 1)
5230 : {
5231 2364 : if (i == 0)
5232 : {
5233 1182 : idx = make_ssa_name (sizetype);
5234 1182 : g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
5235 : bitint_big_endian
5236 0 : ? size_int (-1) : size_one_node);
5237 1182 : insert_before (g);
5238 : }
5239 : else
5240 : {
5241 1182 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
5242 2364 : size_int (bitint_big_endian ? -2 : 2));
5243 1182 : insert_before (g);
5244 1182 : if (bitint_big_endian)
5245 0 : g = gimple_build_cond (NE_EXPR, idx_first,
5246 0 : size_int (nelts + 1 - fin),
5247 : NULL_TREE, NULL_TREE);
5248 : else
5249 1182 : g = gimple_build_cond (NE_EXPR, idx_next, size_int (fin),
5250 : NULL_TREE, NULL_TREE);
5251 1182 : insert_before (g);
5252 1182 : m_gsi = gsi_for_stmt (final_stmt);
5253 1182 : m_bb = NULL;
5254 : }
5255 : }
5256 : }
5257 :
5258 2721 : finish_arith_overflow (var, obj, type, ovf, lhs, orig_obj, stmt,
5259 : prec_limbs, code);
5260 : }
5261 :
5262 : /* Lower a .MUL_OVERFLOW call with at least one large/huge _BitInt
5263 : argument or return type _Complex large/huge _BitInt. */
5264 :
5265 : void
5266 1460 : bitint_large_huge::lower_mul_overflow (tree obj, gimple *stmt)
5267 : {
5268 1460 : tree arg0 = gimple_call_arg (stmt, 0);
5269 1460 : tree arg1 = gimple_call_arg (stmt, 1);
5270 1460 : tree lhs = gimple_call_lhs (stmt);
5271 1460 : if (!lhs)
5272 : {
5273 0 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5274 0 : gsi_remove (&gsi, true);
5275 0 : return;
5276 : }
5277 1460 : gimple *final_stmt = gsi_stmt (m_gsi);
5278 1460 : tree type = TREE_TYPE (lhs);
5279 1460 : if (TREE_CODE (type) == COMPLEX_TYPE)
5280 1451 : type = TREE_TYPE (type);
5281 1460 : int prec = TYPE_PRECISION (type), prec0, prec1;
5282 1460 : arg0 = handle_operand_addr (arg0, stmt, NULL, &prec0);
5283 1460 : arg1 = handle_operand_addr (arg1, stmt, NULL, &prec1);
5284 1460 : int prec2 = ((prec0 < 0 ? -prec0 : prec0)
5285 1460 : + (prec1 < 0 ? -prec1 : prec1));
5286 1460 : if (prec0 == 1 || prec1 == 1)
5287 25 : --prec2;
5288 1460 : tree var = NULL_TREE;
5289 1460 : tree orig_obj = obj;
5290 1460 : bool force_var = false;
5291 1460 : if (obj == NULL_TREE
5292 869 : && TREE_CODE (type) == BITINT_TYPE
5293 863 : && bitint_precision_kind (type) >= bitint_prec_large
5294 839 : && m_names
5295 2287 : && bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
5296 : {
5297 827 : int part = var_to_partition (m_map, lhs);
5298 827 : gcc_assert (m_vars[part] != NULL_TREE);
5299 827 : obj = m_vars[part];
5300 827 : if (TREE_TYPE (lhs) == type)
5301 1 : orig_obj = obj;
5302 : }
5303 633 : else if (obj != NULL_TREE && DECL_P (obj))
5304 : {
5305 1749 : for (int i = 0; i < 2; ++i)
5306 : {
5307 1166 : tree arg = i ? arg1 : arg0;
5308 1166 : if (TREE_CODE (arg) == ADDR_EXPR)
5309 1166 : arg = TREE_OPERAND (arg, 0);
5310 1166 : if (get_base_address (arg) == obj)
5311 : {
5312 : force_var = true;
5313 : break;
5314 : }
5315 : }
5316 : }
5317 1460 : if (obj == NULL_TREE
5318 1460 : || force_var
5319 1418 : || TREE_CODE (type) != BITINT_TYPE
5320 1418 : || bitint_precision_kind (type) < bitint_prec_large
5321 3704 : || prec2 > (CEIL (prec, limb_prec) * limb_prec * (orig_obj ? 1 : 2)))
5322 : {
5323 554 : unsigned HOST_WIDE_INT nelts = CEIL (MAX (prec, prec2), limb_prec);
5324 554 : tree atype = build_array_type_nelts (m_limb_type, nelts);
5325 554 : var = create_tmp_var (atype);
5326 : }
5327 1460 : tree addr = build_fold_addr_expr (var ? var : obj);
5328 1460 : addr = force_gimple_operand_gsi (&m_gsi, addr, true,
5329 : NULL_TREE, true, GSI_SAME_STMT);
5330 1460 : tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
5331 1460 : gimple *g
5332 2920 : = gimple_build_call_internal (IFN_MULBITINT, 6,
5333 : addr, build_int_cst (sitype,
5334 1522 : MAX (prec2, prec)),
5335 1460 : arg0, build_int_cst (sitype, prec0),
5336 1460 : arg1, build_int_cst (sitype, prec1));
5337 1460 : insert_before (g);
5338 :
5339 1460 : unsigned start, end;
5340 1460 : bool check_zero;
5341 1460 : tree ovf = arith_overflow (MULT_EXPR, type, prec, prec0, prec1, prec2,
5342 : &start, &end, &check_zero);
5343 1460 : if (ovf == NULL_TREE)
5344 : {
5345 1460 : unsigned startlimb = start / limb_prec;
5346 1460 : unsigned endlimb = (end - 1) / limb_prec;
5347 1460 : unsigned nelts = CEIL (MAX (prec, prec2), limb_prec);
5348 1460 : unsigned cnt;
5349 1460 : bool use_loop = false;
5350 1460 : if (startlimb == endlimb)
5351 : cnt = 1;
5352 1180 : else if (startlimb + 1 == endlimb)
5353 : cnt = 2;
5354 1009 : else if ((end % limb_prec) == 0)
5355 : {
5356 : cnt = 2;
5357 : use_loop = true;
5358 : }
5359 : else
5360 : {
5361 766 : cnt = 3;
5362 766 : use_loop = startlimb + 2 < endlimb;
5363 : }
5364 766 : if (cnt == 1)
5365 : {
5366 494 : tree l = limb_access (NULL_TREE, var ? var : obj,
5367 280 : size_int (bitint_big_endian
5368 : ? nelts - 1 - startlimb
5369 : : startlimb), true);
5370 280 : g = gimple_build_assign (make_ssa_name (m_limb_type), l);
5371 280 : insert_before (g);
5372 280 : l = arith_overflow_extract_bits (start, end, gimple_assign_lhs (g),
5373 : startlimb, check_zero);
5374 280 : ovf = make_ssa_name (boolean_type_node);
5375 280 : if (check_zero)
5376 240 : g = gimple_build_assign (ovf, NE_EXPR, l,
5377 : build_zero_cst (m_limb_type));
5378 : else
5379 : {
5380 40 : g = gimple_build_assign (make_ssa_name (m_limb_type),
5381 : PLUS_EXPR, l,
5382 : build_int_cst (m_limb_type, 1));
5383 40 : insert_before (g);
5384 40 : g = gimple_build_assign (ovf, GT_EXPR, gimple_assign_lhs (g),
5385 : build_int_cst (m_limb_type, 1));
5386 : }
5387 280 : insert_before (g);
5388 : }
5389 : else
5390 : {
5391 1180 : basic_block edge_bb = NULL;
5392 1180 : gimple_stmt_iterator gsi = m_gsi;
5393 1180 : gsi_prev (&gsi);
5394 1180 : edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
5395 1180 : edge_bb = e->src;
5396 1180 : m_gsi = gsi_end_bb (edge_bb);
5397 :
5398 1180 : tree cmp = build_zero_cst (m_limb_type);
5399 4306 : for (unsigned i = 0; i < cnt; i++)
5400 : {
5401 3126 : tree idx, idx_next = NULL_TREE;
5402 3126 : if (i == 0)
5403 1180 : idx = size_int (bitint_big_endian
5404 : ? nelts - 1 - startlimb : startlimb);
5405 1946 : else if (i == 2)
5406 766 : idx = size_int (bitint_big_endian
5407 : ? nelts - 1 - endlimb : endlimb);
5408 1180 : else if (use_loop)
5409 661 : idx = create_loop (size_int (bitint_big_endian
5410 : ? nelts - startlimb - 2
5411 : : startlimb + 1), &idx_next);
5412 : else
5413 519 : idx = size_int (bitint_big_endian
5414 : ? nelts - startlimb - 2 : startlimb + 1);
5415 4965 : tree l = limb_access (NULL_TREE, var ? var : obj, idx, true);
5416 3126 : g = gimple_build_assign (make_ssa_name (m_limb_type), l);
5417 3126 : insert_before (g);
5418 3126 : l = gimple_assign_lhs (g);
5419 3126 : if (i == 0 || i == 2)
5420 2712 : l = arith_overflow_extract_bits (start, end, l,
5421 : i == 0 ? startlimb : endlimb,
5422 : check_zero);
5423 1946 : if (i == 0 && !check_zero)
5424 : {
5425 450 : cmp = l;
5426 450 : g = gimple_build_assign (make_ssa_name (m_limb_type),
5427 : PLUS_EXPR, l,
5428 : build_int_cst (m_limb_type, 1));
5429 450 : insert_before (g);
5430 450 : g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
5431 : build_int_cst (m_limb_type, 1),
5432 : NULL_TREE, NULL_TREE);
5433 : }
5434 : else
5435 2676 : g = gimple_build_cond (NE_EXPR, l, cmp, NULL_TREE, NULL_TREE);
5436 3126 : insert_before (g);
5437 3126 : edge e1 = split_block (gsi_bb (m_gsi), g);
5438 3126 : e1->flags = EDGE_FALSE_VALUE;
5439 3126 : edge e2 = make_edge (e1->src, gimple_bb (final_stmt),
5440 : EDGE_TRUE_VALUE);
5441 3126 : e1->probability = profile_probability::likely ();
5442 3126 : e2->probability = e1->probability.invert ();
5443 3126 : if (i == 0)
5444 1180 : set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
5445 3126 : m_gsi = gsi_after_labels (e1->dest);
5446 3126 : if (i == 1 && use_loop)
5447 : {
5448 661 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
5449 : bitint_big_endian
5450 0 : ? size_int (-1) : size_one_node);
5451 661 : insert_before (g);
5452 661 : if (bitint_big_endian)
5453 0 : g = gimple_build_cond (NE_EXPR, idx,
5454 0 : size_int (nelts - endlimb
5455 : - (cnt == 2)),
5456 : NULL_TREE, NULL_TREE);
5457 : else
5458 661 : g = gimple_build_cond (NE_EXPR, idx_next,
5459 661 : size_int (endlimb + (cnt == 2)),
5460 : NULL_TREE, NULL_TREE);
5461 661 : insert_before (g);
5462 661 : edge true_edge, false_edge;
5463 661 : extract_true_false_edges_from_block (gsi_bb (m_gsi),
5464 : &true_edge,
5465 : &false_edge);
5466 661 : m_gsi = gsi_after_labels (false_edge->dest);
5467 661 : m_bb = NULL;
5468 : }
5469 : }
5470 :
5471 1180 : ovf = make_ssa_name (boolean_type_node);
5472 1180 : basic_block bb = gimple_bb (final_stmt);
5473 1180 : gphi *phi = create_phi_node (ovf, bb);
5474 1180 : edge e1 = find_edge (gsi_bb (m_gsi), bb);
5475 1180 : edge_iterator ei;
5476 5486 : FOR_EACH_EDGE (e, ei, bb->preds)
5477 : {
5478 4306 : tree val = e == e1 ? boolean_false_node : boolean_true_node;
5479 4306 : add_phi_arg (phi, val, e, UNKNOWN_LOCATION);
5480 : }
5481 1180 : m_gsi = gsi_for_stmt (final_stmt);
5482 : }
5483 : }
5484 :
5485 1460 : finish_arith_overflow (var, obj, type, ovf, lhs, orig_obj, stmt,
5486 1460 : CEIL (MAX (prec, prec2), limb_prec), MULT_EXPR);
5487 : }
5488 :
5489 : /* Lower REALPART_EXPR or IMAGPART_EXPR stmt extracting part of result from
5490 : .{ADD,SUB,MUL}_OVERFLOW call. */
5491 :
5492 : void
5493 5984 : bitint_large_huge::lower_cplxpart_stmt (tree obj, gimple *stmt)
5494 : {
5495 5984 : tree rhs1 = gimple_assign_rhs1 (stmt);
5496 5984 : rhs1 = TREE_OPERAND (rhs1, 0);
5497 5984 : if (obj == NULL_TREE)
5498 : {
5499 5911 : int part = var_to_partition (m_map, gimple_assign_lhs (stmt));
5500 5911 : gcc_assert (m_vars[part] != NULL_TREE);
5501 : obj = m_vars[part];
5502 : }
5503 5984 : if (TREE_CODE (rhs1) == SSA_NAME
5504 5984 : && (m_names == NULL
5505 5983 : || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
5506 : {
5507 1547 : lower_call (obj, SSA_NAME_DEF_STMT (rhs1));
5508 1547 : return;
5509 : }
5510 4437 : int part = var_to_partition (m_map, rhs1);
5511 4437 : gcc_assert (m_vars[part] != NULL_TREE);
5512 4437 : tree var = m_vars[part];
5513 4437 : unsigned HOST_WIDE_INT nelts
5514 4437 : = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))) / limb_prec;
5515 4437 : tree atype = build_array_type_nelts (m_limb_type, nelts);
5516 4437 : if (!useless_type_conversion_p (atype, TREE_TYPE (obj)))
5517 96 : obj = build1 (VIEW_CONVERT_EXPR, atype, obj);
5518 4437 : tree off = build_int_cst (build_pointer_type (TREE_TYPE (var)),
5519 4437 : gimple_assign_rhs_code (stmt) == REALPART_EXPR
5520 4437 : ? 0 : nelts * m_limb_size);
5521 4437 : tree v2 = build2 (MEM_REF, atype, build_fold_addr_expr (var), off);
5522 4437 : gimple *g = gimple_build_assign (obj, v2);
5523 4437 : insert_before (g);
5524 : }
5525 :
5526 : /* Lower COMPLEX_EXPR stmt. */
5527 :
5528 : void
5529 18 : bitint_large_huge::lower_complexexpr_stmt (gimple *stmt)
5530 : {
5531 18 : tree lhs = gimple_assign_lhs (stmt);
5532 18 : tree rhs1 = gimple_assign_rhs1 (stmt);
5533 18 : tree rhs2 = gimple_assign_rhs2 (stmt);
5534 18 : int part = var_to_partition (m_map, lhs);
5535 18 : gcc_assert (m_vars[part] != NULL_TREE);
5536 18 : lhs = m_vars[part];
5537 18 : unsigned HOST_WIDE_INT nelts
5538 18 : = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs1))) / limb_prec;
5539 18 : tree atype = build_array_type_nelts (m_limb_type, nelts);
5540 18 : tree zero = build_zero_cst (build_pointer_type (TREE_TYPE (lhs)));
5541 18 : tree v1 = build2 (MEM_REF, atype, build_fold_addr_expr (lhs), zero);
5542 18 : tree v2;
5543 18 : if (TREE_CODE (rhs1) == SSA_NAME)
5544 : {
5545 18 : part = var_to_partition (m_map, rhs1);
5546 18 : gcc_assert (m_vars[part] != NULL_TREE);
5547 : v2 = m_vars[part];
5548 : }
5549 0 : else if (integer_zerop (rhs1))
5550 0 : v2 = build_zero_cst (atype);
5551 : else
5552 0 : v2 = tree_output_constant_def (rhs1);
5553 18 : if (!useless_type_conversion_p (atype, TREE_TYPE (v2)))
5554 18 : v2 = build1 (VIEW_CONVERT_EXPR, atype, v2);
5555 18 : gimple *g = gimple_build_assign (v1, v2);
5556 18 : insert_before (g);
5557 18 : tree off = fold_convert (build_pointer_type (TREE_TYPE (lhs)),
5558 : TYPE_SIZE_UNIT (atype));
5559 18 : v1 = build2 (MEM_REF, atype, build_fold_addr_expr (lhs), off);
5560 18 : if (TREE_CODE (rhs2) == SSA_NAME)
5561 : {
5562 0 : part = var_to_partition (m_map, rhs2);
5563 0 : gcc_assert (m_vars[part] != NULL_TREE);
5564 : v2 = m_vars[part];
5565 : }
5566 18 : else if (integer_zerop (rhs2))
5567 18 : v2 = build_zero_cst (atype);
5568 : else
5569 0 : v2 = tree_output_constant_def (rhs2);
5570 18 : if (!useless_type_conversion_p (atype, TREE_TYPE (v2)))
5571 0 : v2 = build1 (VIEW_CONVERT_EXPR, atype, v2);
5572 18 : g = gimple_build_assign (v1, v2);
5573 18 : insert_before (g);
5574 18 : }
5575 :
5576 : /* Lower a .{CLZ,CTZ,CLRSB,FFS,PARITY,POPCOUNT} call with one large/huge _BitInt
5577 : argument. */
5578 :
5579 : void
5580 91 : bitint_large_huge::lower_bit_query (gimple *stmt)
5581 : {
5582 91 : tree arg0 = gimple_call_arg (stmt, 0);
5583 91 : tree arg1 = (gimple_call_num_args (stmt) == 2
5584 91 : ? gimple_call_arg (stmt, 1) : NULL_TREE);
5585 91 : tree lhs = gimple_call_lhs (stmt);
5586 91 : gimple *g;
5587 :
5588 91 : if (!lhs)
5589 : {
5590 0 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5591 0 : gsi_remove (&gsi, true);
5592 0 : return;
5593 : }
5594 91 : tree type = TREE_TYPE (arg0);
5595 91 : gcc_assert (TREE_CODE (type) == BITINT_TYPE);
5596 91 : bitint_prec_kind kind = bitint_precision_kind (type);
5597 91 : gcc_assert (kind >= bitint_prec_large);
5598 91 : enum internal_fn ifn = gimple_call_internal_fn (stmt);
5599 91 : enum built_in_function fcode = END_BUILTINS;
5600 91 : gcc_assert (TYPE_PRECISION (unsigned_type_node) == limb_prec
5601 : || TYPE_PRECISION (long_unsigned_type_node) == limb_prec
5602 : || TYPE_PRECISION (long_long_unsigned_type_node) == limb_prec);
5603 91 : switch (ifn)
5604 : {
5605 25 : case IFN_CLZ:
5606 25 : if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
5607 : fcode = BUILT_IN_CLZ;
5608 25 : else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
5609 : fcode = BUILT_IN_CLZL;
5610 : else
5611 0 : fcode = BUILT_IN_CLZLL;
5612 : break;
5613 10 : case IFN_FFS:
5614 : /* .FFS (X) is .CTZ (X, -1) + 1, though under the hood
5615 : we don't add the addend at the end. */
5616 10 : arg1 = integer_zero_node;
5617 : /* FALLTHRU */
5618 37 : case IFN_CTZ:
5619 37 : if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
5620 : fcode = BUILT_IN_CTZ;
5621 37 : else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
5622 : fcode = BUILT_IN_CTZL;
5623 : else
5624 0 : fcode = BUILT_IN_CTZLL;
5625 37 : m_upwards = true;
5626 37 : break;
5627 8 : case IFN_CLRSB:
5628 8 : if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
5629 : fcode = BUILT_IN_CLRSB;
5630 8 : else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
5631 : fcode = BUILT_IN_CLRSBL;
5632 : else
5633 0 : fcode = BUILT_IN_CLRSBLL;
5634 : break;
5635 11 : case IFN_PARITY:
5636 11 : if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
5637 : fcode = BUILT_IN_PARITY;
5638 11 : else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
5639 : fcode = BUILT_IN_PARITYL;
5640 : else
5641 0 : fcode = BUILT_IN_PARITYLL;
5642 11 : m_upwards = true;
5643 11 : break;
5644 10 : case IFN_POPCOUNT:
5645 10 : if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
5646 : fcode = BUILT_IN_POPCOUNT;
5647 10 : else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
5648 : fcode = BUILT_IN_POPCOUNTL;
5649 : else
5650 0 : fcode = BUILT_IN_POPCOUNTLL;
5651 10 : m_upwards = true;
5652 10 : break;
5653 0 : default:
5654 0 : gcc_unreachable ();
5655 : }
5656 91 : tree fndecl = builtin_decl_explicit (fcode), res = NULL_TREE;
5657 91 : unsigned cnt = 0, rem = 0, end = 0, prec = TYPE_PRECISION (type);
5658 91 : unsigned nelts = CEIL (prec, limb_prec);
5659 91 : struct bq_details { edge e; tree val, addend; } *bqp = NULL;
5660 91 : basic_block edge_bb = NULL;
5661 91 : if (m_upwards)
5662 : {
5663 58 : tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
5664 58 : if (kind == bitint_prec_large)
5665 : cnt = nelts;
5666 : else
5667 : {
5668 32 : rem = (prec % (2 * limb_prec));
5669 32 : end = (prec - rem) / limb_prec;
5670 32 : cnt = 2 + CEIL (rem, limb_prec);
5671 32 : idx = idx_first = create_loop (bitint_big_endian
5672 32 : ? size_int (nelts - 1)
5673 : : size_zero_node, &idx_next);
5674 : }
5675 :
5676 58 : if (ifn == IFN_CTZ || ifn == IFN_FFS)
5677 : {
5678 37 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5679 37 : gsi_prev (&gsi);
5680 37 : edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
5681 37 : edge_bb = e->src;
5682 37 : if (kind == bitint_prec_large)
5683 32 : m_gsi = gsi_end_bb (edge_bb);
5684 37 : bqp = XALLOCAVEC (struct bq_details, cnt);
5685 : }
5686 : else
5687 21 : m_after_stmt = stmt;
5688 58 : if (kind != bitint_prec_large)
5689 32 : m_upwards_2limb = end;
5690 :
5691 214 : for (unsigned i = 0; i < cnt; i++)
5692 : {
5693 156 : m_data_cnt = 0;
5694 156 : if (kind == bitint_prec_large)
5695 80 : idx = size_int (bitint_big_endian ? nelts - 1 - i : i);
5696 76 : else if (i >= 2)
5697 12 : idx = size_int (bitint_big_endian
5698 : ? nelts - 1 - end - (i > 2) : end + (i > 2));
5699 :
5700 156 : tree rhs1 = handle_operand (arg0, idx);
5701 156 : if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
5702 : {
5703 26 : if (!TYPE_UNSIGNED (TREE_TYPE (rhs1)))
5704 4 : rhs1 = add_cast (unsigned_type_for (TREE_TYPE (rhs1)), rhs1);
5705 26 : rhs1 = add_cast (m_limb_type, rhs1);
5706 : }
5707 :
5708 156 : tree in, out, tem;
5709 156 : if (ifn == IFN_PARITY)
5710 30 : in = prepare_data_in_out (build_zero_cst (m_limb_type), idx, &out);
5711 126 : else if (ifn == IFN_FFS)
5712 26 : in = prepare_data_in_out (integer_one_node, idx, &out);
5713 : else
5714 100 : in = prepare_data_in_out (integer_zero_node, idx, &out);
5715 :
5716 156 : switch (ifn)
5717 : {
5718 98 : case IFN_CTZ:
5719 98 : case IFN_FFS:
5720 98 : g = gimple_build_cond (NE_EXPR, rhs1,
5721 : build_zero_cst (m_limb_type),
5722 : NULL_TREE, NULL_TREE);
5723 98 : insert_before (g);
5724 98 : edge e1, e2;
5725 98 : e1 = split_block (gsi_bb (m_gsi), g);
5726 98 : e1->flags = EDGE_FALSE_VALUE;
5727 98 : e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
5728 98 : e1->probability = profile_probability::unlikely ();
5729 98 : e2->probability = e1->probability.invert ();
5730 98 : if (i == 0)
5731 37 : set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
5732 98 : m_gsi = gsi_after_labels (e1->dest);
5733 98 : bqp[i].e = e2;
5734 98 : bqp[i].val = rhs1;
5735 98 : if (tree_fits_uhwi_p (idx))
5736 56 : bqp[i].addend
5737 56 : = build_int_cst (integer_type_node,
5738 : (bitint_big_endian
5739 56 : ? nelts - 1 - tree_to_uhwi (idx)
5740 56 : : tree_to_uhwi (idx)) * limb_prec
5741 56 : + (ifn == IFN_FFS));
5742 : else
5743 : {
5744 42 : bqp[i].addend = in;
5745 42 : if (i == 1)
5746 21 : res = out;
5747 : else
5748 21 : res = make_ssa_name (integer_type_node);
5749 42 : g = gimple_build_assign (res, PLUS_EXPR, in,
5750 : build_int_cst (integer_type_node,
5751 42 : limb_prec));
5752 42 : insert_before (g);
5753 42 : m_data[m_data_cnt] = res;
5754 : }
5755 : break;
5756 30 : case IFN_PARITY:
5757 30 : if (!integer_zerop (in))
5758 : {
5759 25 : if (kind == bitint_prec_huge && i == 1)
5760 6 : res = out;
5761 : else
5762 19 : res = make_ssa_name (m_limb_type);
5763 25 : g = gimple_build_assign (res, BIT_XOR_EXPR, in, rhs1);
5764 25 : insert_before (g);
5765 : }
5766 : else
5767 : res = rhs1;
5768 30 : m_data[m_data_cnt] = res;
5769 30 : break;
5770 28 : case IFN_POPCOUNT:
5771 28 : g = gimple_build_call (fndecl, 1, rhs1);
5772 28 : tem = make_ssa_name (integer_type_node);
5773 28 : gimple_call_set_lhs (g, tem);
5774 28 : insert_before (g);
5775 28 : if (!integer_zerop (in))
5776 : {
5777 23 : if (kind == bitint_prec_huge && i == 1)
5778 5 : res = out;
5779 : else
5780 18 : res = make_ssa_name (integer_type_node);
5781 23 : g = gimple_build_assign (res, PLUS_EXPR, in, tem);
5782 23 : insert_before (g);
5783 : }
5784 : else
5785 : res = tem;
5786 28 : m_data[m_data_cnt] = res;
5787 28 : break;
5788 0 : default:
5789 0 : gcc_unreachable ();
5790 : }
5791 :
5792 156 : m_first = false;
5793 156 : if (kind == bitint_prec_huge && i <= 1)
5794 : {
5795 64 : if (i == 0)
5796 : {
5797 32 : idx = make_ssa_name (sizetype);
5798 32 : g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
5799 : bitint_big_endian
5800 0 : ? size_int (-1) : size_one_node);
5801 32 : insert_before (g);
5802 : }
5803 : else
5804 : {
5805 32 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
5806 64 : size_int (bitint_big_endian
5807 : ? -2 : 2));
5808 32 : insert_before (g);
5809 32 : if (bitint_big_endian)
5810 0 : g = gimple_build_cond (NE_EXPR, idx_first,
5811 0 : size_int (cnt - 1),
5812 : NULL_TREE, NULL_TREE);
5813 : else
5814 32 : g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
5815 : NULL_TREE, NULL_TREE);
5816 32 : insert_before (g);
5817 32 : if (ifn == IFN_CTZ || ifn == IFN_FFS)
5818 21 : m_gsi = gsi_after_labels (edge_bb);
5819 : else
5820 11 : m_gsi = gsi_for_stmt (stmt);
5821 32 : m_bb = NULL;
5822 : }
5823 : }
5824 : }
5825 : }
5826 : else
5827 : {
5828 33 : tree idx = NULL_TREE, idx_next = NULL_TREE, first = NULL_TREE;
5829 33 : int sub_one = 0;
5830 33 : if (kind == bitint_prec_large)
5831 : cnt = nelts;
5832 : else
5833 : {
5834 16 : rem = prec % limb_prec;
5835 16 : if (rem == 0 && (!TYPE_UNSIGNED (type) || ifn == IFN_CLRSB))
5836 : rem = limb_prec;
5837 16 : end = (prec - rem) / limb_prec;
5838 16 : cnt = 1 + (rem != 0);
5839 16 : if (ifn == IFN_CLRSB)
5840 4 : sub_one = 1;
5841 : }
5842 :
5843 33 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5844 33 : gsi_prev (&gsi);
5845 33 : edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
5846 33 : edge_bb = e->src;
5847 33 : m_gsi = gsi_end_bb (edge_bb);
5848 :
5849 33 : if (ifn == IFN_CLZ)
5850 25 : bqp = XALLOCAVEC (struct bq_details, cnt);
5851 : else
5852 : {
5853 8 : gsi = gsi_for_stmt (stmt);
5854 8 : gsi_prev (&gsi);
5855 8 : e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
5856 8 : edge_bb = e->src;
5857 8 : bqp = XALLOCAVEC (struct bq_details, 2 * cnt);
5858 : }
5859 :
5860 110 : for (unsigned i = 0; i < cnt; i++)
5861 : {
5862 77 : m_data_cnt = 0;
5863 77 : if (kind == bitint_prec_large)
5864 51 : idx = size_int (bitint_big_endian ? i : cnt - i - 1);
5865 26 : else if (i == cnt - 1)
5866 16 : idx = create_loop (size_int (bitint_big_endian ? i : end - 1),
5867 : &idx_next);
5868 : else
5869 10 : idx = bitint_big_endian ? size_zero_node : size_int (end);
5870 :
5871 77 : tree rhs1 = handle_operand (arg0, idx);
5872 77 : if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
5873 : {
5874 17 : if (ifn == IFN_CLZ && !TYPE_UNSIGNED (TREE_TYPE (rhs1)))
5875 0 : rhs1 = add_cast (unsigned_type_for (TREE_TYPE (rhs1)), rhs1);
5876 17 : else if (ifn == IFN_CLRSB && TYPE_UNSIGNED (TREE_TYPE (rhs1)))
5877 0 : rhs1 = add_cast (signed_type_for (TREE_TYPE (rhs1)), rhs1);
5878 17 : rhs1 = add_cast (m_limb_type, rhs1);
5879 : }
5880 :
5881 77 : if (ifn == IFN_CLZ)
5882 : {
5883 57 : g = gimple_build_cond (NE_EXPR, rhs1,
5884 : build_zero_cst (m_limb_type),
5885 : NULL_TREE, NULL_TREE);
5886 57 : insert_before (g);
5887 57 : edge e1 = split_block (gsi_bb (m_gsi), g);
5888 57 : e1->flags = EDGE_FALSE_VALUE;
5889 57 : edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
5890 57 : e1->probability = profile_probability::unlikely ();
5891 57 : e2->probability = e1->probability.invert ();
5892 57 : if (i == 0)
5893 25 : set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
5894 57 : m_gsi = gsi_after_labels (e1->dest);
5895 57 : bqp[i].e = e2;
5896 57 : bqp[i].val = rhs1;
5897 : }
5898 : else
5899 : {
5900 20 : if (i == 0)
5901 : {
5902 8 : first = rhs1;
5903 8 : g = gimple_build_assign (make_ssa_name (m_limb_type),
5904 : PLUS_EXPR, rhs1,
5905 : build_int_cst (m_limb_type, 1));
5906 8 : insert_before (g);
5907 8 : g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
5908 : build_int_cst (m_limb_type, 1),
5909 : NULL_TREE, NULL_TREE);
5910 8 : insert_before (g);
5911 : }
5912 : else
5913 : {
5914 12 : g = gimple_build_assign (make_ssa_name (m_limb_type),
5915 : BIT_XOR_EXPR, rhs1, first);
5916 12 : insert_before (g);
5917 12 : tree stype = signed_type_for (m_limb_type);
5918 12 : g = gimple_build_cond (LT_EXPR,
5919 : add_cast (stype,
5920 : gimple_assign_lhs (g)),
5921 : build_zero_cst (stype),
5922 : NULL_TREE, NULL_TREE);
5923 12 : insert_before (g);
5924 12 : edge e1 = split_block (gsi_bb (m_gsi), g);
5925 12 : e1->flags = EDGE_FALSE_VALUE;
5926 12 : edge e2 = make_edge (e1->src, gimple_bb (stmt),
5927 : EDGE_TRUE_VALUE);
5928 12 : e1->probability = profile_probability::unlikely ();
5929 12 : e2->probability = e1->probability.invert ();
5930 12 : if (i == 1)
5931 8 : set_immediate_dominator (CDI_DOMINATORS, e2->dest,
5932 : e2->src);
5933 12 : m_gsi = gsi_after_labels (e1->dest);
5934 12 : bqp[2 * i].e = e2;
5935 12 : g = gimple_build_cond (NE_EXPR, rhs1, first,
5936 : NULL_TREE, NULL_TREE);
5937 12 : insert_before (g);
5938 : }
5939 20 : edge e1 = split_block (gsi_bb (m_gsi), g);
5940 20 : e1->flags = EDGE_FALSE_VALUE;
5941 20 : edge e2 = make_edge (e1->src, edge_bb, EDGE_TRUE_VALUE);
5942 20 : e1->probability = profile_probability::unlikely ();
5943 20 : e2->probability = e1->probability.invert ();
5944 20 : if (i == 0)
5945 8 : set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
5946 20 : m_gsi = gsi_after_labels (e1->dest);
5947 20 : bqp[2 * i + 1].e = e2;
5948 20 : bqp[i].val = rhs1;
5949 : }
5950 77 : if (tree_fits_uhwi_p (idx))
5951 122 : bqp[i].addend
5952 61 : = build_int_cst (integer_type_node,
5953 61 : (int) prec
5954 61 : - (((int) (bitint_big_endian
5955 0 : ? nelts - 1 - tree_to_uhwi (idx)
5956 61 : : tree_to_uhwi (idx)) + 1)
5957 61 : * limb_prec) - sub_one);
5958 : else
5959 : {
5960 16 : tree in, out;
5961 16 : in = build_int_cst (integer_type_node, rem - sub_one);
5962 16 : m_first = true;
5963 16 : in = prepare_data_in_out (in, idx, &out);
5964 16 : out = m_data[m_data_cnt + 1];
5965 16 : bqp[i].addend = in;
5966 16 : g = gimple_build_assign (out, PLUS_EXPR, in,
5967 : build_int_cst (integer_type_node,
5968 16 : limb_prec));
5969 16 : insert_before (g);
5970 16 : m_data[m_data_cnt] = out;
5971 : }
5972 :
5973 77 : m_first = false;
5974 77 : if (kind == bitint_prec_huge && i == cnt - 1)
5975 : {
5976 32 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
5977 : bitint_big_endian
5978 16 : ? size_one_node : size_int (-1));
5979 16 : insert_before (g);
5980 16 : g = gimple_build_cond (NE_EXPR, idx,
5981 : bitint_big_endian
5982 0 : ? size_int (nelts - 1) : size_zero_node,
5983 : NULL_TREE, NULL_TREE);
5984 16 : insert_before (g);
5985 16 : edge true_edge, false_edge;
5986 16 : extract_true_false_edges_from_block (gsi_bb (m_gsi),
5987 : &true_edge, &false_edge);
5988 16 : m_gsi = gsi_after_labels (false_edge->dest);
5989 16 : m_bb = NULL;
5990 : }
5991 : }
5992 : }
5993 91 : switch (ifn)
5994 : {
5995 62 : case IFN_CLZ:
5996 62 : case IFN_CTZ:
5997 62 : case IFN_FFS:
5998 62 : gphi *phi1, *phi2, *phi3;
5999 62 : basic_block bb;
6000 62 : bb = gsi_bb (m_gsi);
6001 62 : remove_edge (find_edge (bb, gimple_bb (stmt)));
6002 62 : phi1 = create_phi_node (make_ssa_name (m_limb_type),
6003 : gimple_bb (stmt));
6004 62 : phi2 = create_phi_node (make_ssa_name (integer_type_node),
6005 : gimple_bb (stmt));
6006 217 : for (unsigned i = 0; i < cnt; i++)
6007 : {
6008 155 : add_phi_arg (phi1, bqp[i].val, bqp[i].e, UNKNOWN_LOCATION);
6009 155 : add_phi_arg (phi2, bqp[i].addend, bqp[i].e, UNKNOWN_LOCATION);
6010 : }
6011 62 : if (arg1 == NULL_TREE)
6012 : {
6013 35 : g = gimple_build_builtin_unreachable (m_loc);
6014 35 : insert_before (g);
6015 : }
6016 62 : m_gsi = gsi_for_stmt (stmt);
6017 62 : g = gimple_build_call (fndecl, 1, gimple_phi_result (phi1));
6018 62 : gimple_call_set_lhs (g, make_ssa_name (integer_type_node));
6019 62 : insert_before (g);
6020 62 : if (arg1 == NULL_TREE)
6021 35 : g = gimple_build_assign (lhs, PLUS_EXPR,
6022 : gimple_phi_result (phi2),
6023 : gimple_call_lhs (g));
6024 : else
6025 : {
6026 27 : g = gimple_build_assign (make_ssa_name (integer_type_node),
6027 : PLUS_EXPR, gimple_phi_result (phi2),
6028 : gimple_call_lhs (g));
6029 27 : insert_before (g);
6030 27 : edge e1 = split_block (gimple_bb (stmt), g);
6031 27 : edge e2 = make_edge (bb, e1->dest, EDGE_FALLTHRU);
6032 27 : e2->probability = profile_probability::always ();
6033 27 : set_immediate_dominator (CDI_DOMINATORS, e1->dest,
6034 : get_immediate_dominator (CDI_DOMINATORS,
6035 : e1->src));
6036 27 : phi3 = create_phi_node (make_ssa_name (integer_type_node), e1->dest);
6037 27 : add_phi_arg (phi3, gimple_assign_lhs (g), e1, UNKNOWN_LOCATION);
6038 27 : add_phi_arg (phi3, arg1, e2, UNKNOWN_LOCATION);
6039 27 : m_gsi = gsi_for_stmt (stmt);
6040 27 : g = gimple_build_assign (lhs, gimple_phi_result (phi3));
6041 : }
6042 62 : gsi_replace (&m_gsi, g, true);
6043 62 : break;
6044 8 : case IFN_CLRSB:
6045 8 : bb = gsi_bb (m_gsi);
6046 8 : remove_edge (find_edge (bb, edge_bb));
6047 8 : edge e;
6048 8 : e = make_edge (bb, gimple_bb (stmt), EDGE_FALLTHRU);
6049 8 : e->probability = profile_probability::always ();
6050 8 : set_immediate_dominator (CDI_DOMINATORS, gimple_bb (stmt),
6051 : get_immediate_dominator (CDI_DOMINATORS,
6052 : edge_bb));
6053 8 : phi1 = create_phi_node (make_ssa_name (m_limb_type),
6054 : edge_bb);
6055 8 : phi2 = create_phi_node (make_ssa_name (integer_type_node),
6056 : edge_bb);
6057 8 : phi3 = create_phi_node (make_ssa_name (integer_type_node),
6058 : gimple_bb (stmt));
6059 28 : for (unsigned i = 0; i < cnt; i++)
6060 : {
6061 20 : add_phi_arg (phi1, bqp[i].val, bqp[2 * i + 1].e, UNKNOWN_LOCATION);
6062 20 : add_phi_arg (phi2, bqp[i].addend, bqp[2 * i + 1].e,
6063 : UNKNOWN_LOCATION);
6064 20 : tree a = bqp[i].addend;
6065 20 : if (i && kind == bitint_prec_large)
6066 8 : a = int_const_binop (PLUS_EXPR, a, integer_minus_one_node);
6067 20 : if (i)
6068 12 : add_phi_arg (phi3, a, bqp[2 * i].e, UNKNOWN_LOCATION);
6069 : }
6070 8 : add_phi_arg (phi3, build_int_cst (integer_type_node, prec - 1), e,
6071 : UNKNOWN_LOCATION);
6072 8 : m_gsi = gsi_after_labels (edge_bb);
6073 8 : g = gimple_build_call (fndecl, 1,
6074 : add_cast (signed_type_for (m_limb_type),
6075 : gimple_phi_result (phi1)));
6076 8 : gimple_call_set_lhs (g, make_ssa_name (integer_type_node));
6077 8 : insert_before (g);
6078 8 : g = gimple_build_assign (make_ssa_name (integer_type_node),
6079 : PLUS_EXPR, gimple_call_lhs (g),
6080 : gimple_phi_result (phi2));
6081 8 : insert_before (g);
6082 8 : if (kind != bitint_prec_large)
6083 : {
6084 4 : g = gimple_build_assign (make_ssa_name (integer_type_node),
6085 : PLUS_EXPR, gimple_assign_lhs (g),
6086 : integer_one_node);
6087 4 : insert_before (g);
6088 : }
6089 8 : add_phi_arg (phi3, gimple_assign_lhs (g),
6090 : find_edge (edge_bb, gimple_bb (stmt)), UNKNOWN_LOCATION);
6091 8 : m_gsi = gsi_for_stmt (stmt);
6092 8 : g = gimple_build_assign (lhs, gimple_phi_result (phi3));
6093 8 : gsi_replace (&m_gsi, g, true);
6094 8 : break;
6095 11 : case IFN_PARITY:
6096 11 : g = gimple_build_call (fndecl, 1, res);
6097 11 : gimple_call_set_lhs (g, lhs);
6098 11 : gsi_replace (&m_gsi, g, true);
6099 11 : break;
6100 10 : case IFN_POPCOUNT:
6101 10 : g = gimple_build_assign (lhs, res);
6102 10 : gsi_replace (&m_gsi, g, true);
6103 10 : break;
6104 0 : default:
6105 0 : gcc_unreachable ();
6106 : }
6107 : }
6108 :
6109 : /* Lower a call statement with one or more large/huge _BitInt
6110 : arguments or large/huge _BitInt return value. */
6111 :
6112 : void
6113 8749 : bitint_large_huge::lower_call (tree obj, gimple *stmt)
6114 : {
6115 8749 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
6116 8749 : unsigned int nargs = gimple_call_num_args (stmt);
6117 8749 : if (gimple_call_internal_p (stmt))
6118 4272 : switch (gimple_call_internal_fn (stmt))
6119 : {
6120 2721 : case IFN_ADD_OVERFLOW:
6121 2721 : case IFN_SUB_OVERFLOW:
6122 2721 : case IFN_UBSAN_CHECK_ADD:
6123 2721 : case IFN_UBSAN_CHECK_SUB:
6124 2721 : lower_addsub_overflow (obj, stmt);
6125 6993 : return;
6126 1460 : case IFN_MUL_OVERFLOW:
6127 1460 : case IFN_UBSAN_CHECK_MUL:
6128 1460 : lower_mul_overflow (obj, stmt);
6129 1460 : return;
6130 91 : case IFN_CLZ:
6131 91 : case IFN_CTZ:
6132 91 : case IFN_CLRSB:
6133 91 : case IFN_FFS:
6134 91 : case IFN_PARITY:
6135 91 : case IFN_POPCOUNT:
6136 91 : lower_bit_query (stmt);
6137 91 : return;
6138 : default:
6139 : break;
6140 : }
6141 4477 : bool returns_twice = (gimple_call_flags (stmt) & ECF_RETURNS_TWICE) != 0;
6142 10072 : for (unsigned int i = 0; i < nargs; ++i)
6143 : {
6144 5595 : tree arg = gimple_call_arg (stmt, i);
6145 8855 : if (TREE_CODE (arg) != SSA_NAME
6146 2432 : || TREE_CODE (TREE_TYPE (arg)) != BITINT_TYPE
6147 7965 : || bitint_precision_kind (TREE_TYPE (arg)) <= bitint_prec_middle)
6148 3260 : continue;
6149 2335 : if (SSA_NAME_IS_DEFAULT_DEF (arg)
6150 2335 : && (!SSA_NAME_VAR (arg) || VAR_P (SSA_NAME_VAR (arg))))
6151 : {
6152 1 : tree var = create_tmp_reg (TREE_TYPE (arg));
6153 1 : arg = get_or_create_ssa_default_def (cfun, var);
6154 : }
6155 : else
6156 : {
6157 2334 : int p = var_to_partition (m_map, arg);
6158 2334 : tree v = m_vars[p];
6159 2334 : gcc_assert (v != NULL_TREE);
6160 2334 : if (!types_compatible_p (TREE_TYPE (arg), TREE_TYPE (v)))
6161 2314 : v = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (arg), v);
6162 2334 : arg = make_ssa_name (TREE_TYPE (arg));
6163 2334 : gimple *g = gimple_build_assign (arg, v);
6164 2334 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6165 2334 : if (returns_twice && bb_has_abnormal_pred (gimple_bb (stmt)))
6166 : {
6167 11 : m_returns_twice_calls.safe_push (stmt);
6168 11 : returns_twice = false;
6169 : }
6170 : }
6171 2335 : gimple_call_set_arg (stmt, i, arg);
6172 2335 : if (m_preserved == NULL)
6173 404 : m_preserved = BITMAP_ALLOC (NULL);
6174 2335 : bitmap_set_bit (m_preserved, SSA_NAME_VERSION (arg));
6175 : }
6176 4477 : tree lhs = gimple_call_lhs (stmt);
6177 4477 : if (lhs
6178 4346 : && TREE_CODE (lhs) == SSA_NAME
6179 4346 : && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
6180 8753 : && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
6181 : {
6182 4250 : int p = var_to_partition (m_map, lhs);
6183 4250 : tree v = m_vars[p];
6184 4250 : gcc_assert (v != NULL_TREE);
6185 4250 : if (!types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (v)))
6186 4250 : v = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), v);
6187 4250 : gimple_call_set_lhs (stmt, v);
6188 4250 : SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
6189 : }
6190 4477 : update_stmt (stmt);
6191 : }
6192 :
6193 : /* Lower __asm STMT which involves large/huge _BitInt values. */
6194 :
6195 : void
6196 3 : bitint_large_huge::lower_asm (gimple *stmt)
6197 : {
6198 3 : gasm *g = as_a <gasm *> (stmt);
6199 3 : unsigned noutputs = gimple_asm_noutputs (g);
6200 3 : unsigned ninputs = gimple_asm_ninputs (g);
6201 :
6202 5 : for (unsigned i = 0; i < noutputs; ++i)
6203 : {
6204 2 : tree t = gimple_asm_output_op (g, i);
6205 2 : tree s = TREE_VALUE (t);
6206 2 : if (TREE_CODE (s) == SSA_NAME
6207 1 : && TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
6208 3 : && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
6209 : {
6210 1 : int part = var_to_partition (m_map, s);
6211 1 : gcc_assert (m_vars[part] != NULL_TREE);
6212 1 : TREE_VALUE (t) = m_vars[part];
6213 : }
6214 : }
6215 8 : for (unsigned i = 0; i < ninputs; ++i)
6216 : {
6217 5 : tree t = gimple_asm_input_op (g, i);
6218 5 : tree s = TREE_VALUE (t);
6219 5 : if (TREE_CODE (s) == SSA_NAME
6220 4 : && TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
6221 9 : && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
6222 : {
6223 4 : if (SSA_NAME_IS_DEFAULT_DEF (s)
6224 4 : && (!SSA_NAME_VAR (s) || VAR_P (SSA_NAME_VAR (s))))
6225 : {
6226 1 : TREE_VALUE (t) = create_tmp_var (TREE_TYPE (s), "bitint");
6227 1 : mark_addressable (TREE_VALUE (t));
6228 : }
6229 : else
6230 : {
6231 3 : int part = var_to_partition (m_map, s);
6232 3 : gcc_assert (m_vars[part] != NULL_TREE);
6233 3 : TREE_VALUE (t) = m_vars[part];
6234 : }
6235 : }
6236 : }
6237 3 : update_stmt (stmt);
6238 3 : }
6239 :
6240 : /* Lower statement STMT which involves large/huge _BitInt values
6241 : into code accessing individual limbs. */
6242 :
6243 : void
6244 42546 : bitint_large_huge::lower_stmt (gimple *stmt)
6245 : {
6246 42546 : m_first = true;
6247 42546 : m_lhs = NULL_TREE;
6248 42546 : m_data.truncate (0);
6249 42546 : m_data_cnt = 0;
6250 42546 : m_gsi = gsi_for_stmt (stmt);
6251 42546 : m_after_stmt = NULL;
6252 42546 : m_bb = NULL;
6253 42546 : m_init_gsi = m_gsi;
6254 42546 : gsi_prev (&m_init_gsi);
6255 42546 : m_preheader_bb = NULL;
6256 42546 : m_upwards_2limb = 0;
6257 42546 : m_upwards = false;
6258 42546 : m_var_msb = false;
6259 42546 : m_cast_conditional = false;
6260 42546 : m_bitfld_load = 0;
6261 42546 : m_loc = gimple_location (stmt);
6262 42546 : if (is_gimple_call (stmt))
6263 : {
6264 7087 : lower_call (NULL_TREE, stmt);
6265 7087 : return;
6266 : }
6267 35459 : if (gimple_code (stmt) == GIMPLE_ASM)
6268 : {
6269 3 : lower_asm (stmt);
6270 3 : return;
6271 : }
6272 35456 : tree lhs = NULL_TREE, cmp_op1 = NULL_TREE, cmp_op2 = NULL_TREE;
6273 35456 : tree_code cmp_code = comparison_op (stmt, &cmp_op1, &cmp_op2);
6274 35456 : bool eq_p = (cmp_code == EQ_EXPR || cmp_code == NE_EXPR);
6275 35456 : bool mergeable_cast_p = false;
6276 35456 : bool final_cast_p = false;
6277 35456 : if (gimple_assign_cast_p (stmt))
6278 : {
6279 5414 : lhs = gimple_assign_lhs (stmt);
6280 5414 : tree rhs1 = gimple_assign_rhs1 (stmt);
6281 5414 : if (TREE_CODE (rhs1) == VIEW_CONVERT_EXPR)
6282 44 : rhs1 = TREE_OPERAND (rhs1, 0);
6283 5414 : if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
6284 1221 : && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
6285 6599 : && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
6286 : mergeable_cast_p = true;
6287 4372 : else if (TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE
6288 4229 : && bitint_precision_kind (TREE_TYPE (rhs1)) >= bitint_prec_large
6289 8601 : && (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6290 36 : || POINTER_TYPE_P (TREE_TYPE (lhs))
6291 35 : || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR))
6292 : {
6293 4229 : final_cast_p = true;
6294 4229 : if (((TREE_CODE (TREE_TYPE (lhs)) == INTEGER_TYPE
6295 536 : && TYPE_PRECISION (TREE_TYPE (lhs)) > MAX_FIXED_MODE_SIZE)
6296 4229 : || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6297 36 : && !POINTER_TYPE_P (TREE_TYPE (lhs))))
6298 4264 : && gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
6299 : {
6300 : /* Handle VIEW_CONVERT_EXPRs to not generally supported
6301 : huge INTEGER_TYPEs like uint256_t or uint512_t. These
6302 : are usually emitted from memcpy folding and backends
6303 : support moves with them but that is usually it.
6304 : Similarly handle VCEs to vector/complex types etc. */
6305 35 : gcc_assert (TREE_CODE (rhs1) == SSA_NAME);
6306 35 : if (SSA_NAME_IS_DEFAULT_DEF (rhs1)
6307 35 : && (!SSA_NAME_VAR (rhs1) || VAR_P (SSA_NAME_VAR (rhs1))))
6308 : {
6309 0 : tree var = create_tmp_reg (TREE_TYPE (lhs));
6310 0 : rhs1 = get_or_create_ssa_default_def (cfun, var);
6311 0 : gimple_assign_set_rhs1 (stmt, rhs1);
6312 0 : gimple_assign_set_rhs_code (stmt, SSA_NAME);
6313 : }
6314 35 : else if (m_names == NULL
6315 35 : || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1)))
6316 : {
6317 0 : gimple *g = SSA_NAME_DEF_STMT (rhs1);
6318 0 : gcc_assert (gimple_assign_load_p (g));
6319 0 : tree mem = gimple_assign_rhs1 (g);
6320 0 : tree ltype = TREE_TYPE (lhs);
6321 0 : addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (mem));
6322 0 : if (as != TYPE_ADDR_SPACE (ltype))
6323 0 : ltype
6324 0 : = build_qualified_type (ltype,
6325 0 : TYPE_QUALS (ltype)
6326 0 : | ENCODE_QUAL_ADDR_SPACE (as));
6327 0 : rhs1 = build1 (VIEW_CONVERT_EXPR, ltype, unshare_expr (mem));
6328 0 : gimple_assign_set_rhs1 (stmt, rhs1);
6329 : }
6330 : else
6331 : {
6332 35 : int part = var_to_partition (m_map, rhs1);
6333 35 : gcc_assert (m_vars[part] != NULL_TREE);
6334 35 : rhs1 = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
6335 : m_vars[part]);
6336 35 : gimple_assign_set_rhs1 (stmt, rhs1);
6337 : }
6338 35 : update_stmt (stmt);
6339 35 : return;
6340 : }
6341 4194 : if (TREE_CODE (rhs1) == SSA_NAME
6342 4194 : && (m_names == NULL
6343 4157 : || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
6344 : {
6345 1717 : gimple *g = SSA_NAME_DEF_STMT (rhs1);
6346 1717 : if (is_gimple_assign (g)
6347 1717 : && gimple_assign_rhs_code (g) == IMAGPART_EXPR)
6348 : {
6349 1638 : tree rhs2 = TREE_OPERAND (gimple_assign_rhs1 (g), 0);
6350 1638 : if (TREE_CODE (rhs2) == SSA_NAME
6351 1638 : && (m_names == NULL
6352 1601 : || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs2))))
6353 : {
6354 1638 : g = SSA_NAME_DEF_STMT (rhs2);
6355 1638 : int ovf = optimizable_arith_overflow (g);
6356 1638 : if (ovf == 2)
6357 : /* If .{ADD,SUB,MUL}_OVERFLOW has both REALPART_EXPR
6358 : and IMAGPART_EXPR uses, where the latter is cast to
6359 : non-_BitInt, it will be optimized when handling
6360 : the REALPART_EXPR. */
6361 : return;
6362 91 : if (ovf == 1)
6363 : {
6364 91 : lower_call (NULL_TREE, g);
6365 91 : return;
6366 : }
6367 : }
6368 : }
6369 : }
6370 : }
6371 143 : else if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
6372 143 : && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
6373 143 : && !INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
6374 143 : && !POINTER_TYPE_P (TREE_TYPE (rhs1))
6375 286 : && gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
6376 : {
6377 8 : int part = var_to_partition (m_map, lhs);
6378 8 : gcc_assert (m_vars[part] != NULL_TREE);
6379 8 : lhs = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs1), m_vars[part]);
6380 8 : insert_before (gimple_build_assign (lhs, rhs1));
6381 8 : return;
6382 : }
6383 : }
6384 33775 : if (gimple_store_p (stmt))
6385 : {
6386 8926 : tree rhs1 = gimple_assign_rhs1 (stmt);
6387 8926 : if (TREE_CODE (rhs1) == SSA_NAME
6388 8926 : && (m_names == NULL
6389 7885 : || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
6390 : {
6391 1594 : gimple *g = SSA_NAME_DEF_STMT (rhs1);
6392 1594 : m_loc = gimple_location (g);
6393 1594 : lhs = gimple_assign_lhs (stmt);
6394 1594 : if (is_gimple_assign (g) && !mergeable_op (g))
6395 632 : switch (gimple_assign_rhs_code (g))
6396 : {
6397 118 : case LSHIFT_EXPR:
6398 118 : case RSHIFT_EXPR:
6399 118 : lower_shift_stmt (lhs, g);
6400 463 : handled:
6401 463 : m_gsi = gsi_for_stmt (stmt);
6402 463 : unlink_stmt_vdef (stmt);
6403 926 : release_ssa_name (gimple_vdef (stmt));
6404 463 : gsi_remove (&m_gsi, true);
6405 463 : return;
6406 204 : case MULT_EXPR:
6407 204 : case TRUNC_DIV_EXPR:
6408 204 : case EXACT_DIV_EXPR:
6409 204 : case TRUNC_MOD_EXPR:
6410 204 : lower_muldiv_stmt (lhs, g);
6411 204 : goto handled;
6412 44 : case FIX_TRUNC_EXPR:
6413 44 : lower_float_conv_stmt (lhs, g);
6414 44 : goto handled;
6415 73 : case REALPART_EXPR:
6416 73 : case IMAGPART_EXPR:
6417 73 : lower_cplxpart_stmt (lhs, g);
6418 73 : goto handled;
6419 7 : case VIEW_CONVERT_EXPR:
6420 7 : {
6421 7 : tree rhs1 = gimple_assign_rhs1 (g);
6422 7 : rhs1 = TREE_OPERAND (rhs1, 0);
6423 7 : if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
6424 6 : && !POINTER_TYPE_P (TREE_TYPE (rhs1)))
6425 : {
6426 6 : tree ltype = TREE_TYPE (rhs1);
6427 6 : addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (lhs));
6428 6 : ltype
6429 12 : = build_qualified_type (ltype,
6430 6 : TYPE_QUALS (TREE_TYPE (lhs))
6431 6 : | ENCODE_QUAL_ADDR_SPACE (as));
6432 6 : lhs = build1 (VIEW_CONVERT_EXPR, ltype, lhs);
6433 6 : gimple_assign_set_lhs (stmt, lhs);
6434 6 : gimple_assign_set_rhs1 (stmt, rhs1);
6435 6 : gimple_assign_set_rhs_code (stmt, TREE_CODE (rhs1));
6436 6 : update_stmt (stmt);
6437 6 : return;
6438 : }
6439 : }
6440 : break;
6441 : default:
6442 : break;
6443 : }
6444 962 : else if (optimizable_arith_overflow (g) == 3)
6445 : {
6446 24 : lower_call (lhs, g);
6447 24 : goto handled;
6448 : }
6449 1125 : m_loc = gimple_location (stmt);
6450 : }
6451 : }
6452 33306 : if (mergeable_op (stmt)
6453 21974 : || gimple_store_p (stmt)
6454 21974 : || gimple_assign_load_p (stmt)
6455 : || eq_p
6456 17174 : || mergeable_cast_p
6457 43402 : || (is_gimple_assign (stmt)
6458 9708 : && gimple_assign_rhs_code (stmt) == PAREN_EXPR))
6459 : {
6460 23212 : lhs = lower_mergeable_stmt (stmt, cmp_code, cmp_op1, cmp_op2);
6461 23212 : if (!eq_p)
6462 : return;
6463 : }
6464 10094 : else if (cmp_code != ERROR_MARK)
6465 722 : lhs = lower_comparison_stmt (stmt, cmp_code, cmp_op1, cmp_op2);
6466 16595 : if (cmp_code != ERROR_MARK)
6467 : {
6468 7223 : if (gimple_code (stmt) == GIMPLE_COND)
6469 : {
6470 6457 : gcond *cstmt = as_a <gcond *> (stmt);
6471 6457 : gimple_cond_set_lhs (cstmt, lhs);
6472 6457 : gimple_cond_set_rhs (cstmt, boolean_false_node);
6473 6457 : gimple_cond_set_code (cstmt, cmp_code);
6474 6457 : update_stmt (stmt);
6475 6457 : return;
6476 : }
6477 766 : if (gimple_assign_rhs_code (stmt) == COND_EXPR)
6478 : {
6479 0 : tree cond = build2 (cmp_code, boolean_type_node, lhs,
6480 : boolean_false_node);
6481 0 : gimple_assign_set_rhs1 (stmt, cond);
6482 0 : lhs = gimple_assign_lhs (stmt);
6483 0 : gcc_assert (TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
6484 : || (bitint_precision_kind (TREE_TYPE (lhs))
6485 : <= bitint_prec_middle));
6486 0 : update_stmt (stmt);
6487 0 : return;
6488 : }
6489 766 : gimple_assign_set_rhs1 (stmt, lhs);
6490 766 : gimple_assign_set_rhs2 (stmt, boolean_false_node);
6491 766 : gimple_assign_set_rhs_code (stmt, cmp_code);
6492 766 : update_stmt (stmt);
6493 766 : return;
6494 : }
6495 9372 : if (final_cast_p)
6496 : {
6497 2556 : tree lhs_type = TREE_TYPE (lhs);
6498 : /* Add support for 3 or more limbs filled in from normal integral
6499 : type if this assert fails. If no target chooses limb mode smaller
6500 : than half of largest supported normal integral type, this will not
6501 : be needed. */
6502 2556 : gcc_assert (TYPE_PRECISION (lhs_type) <= 2 * limb_prec);
6503 2556 : gimple *g;
6504 2556 : if ((TREE_CODE (lhs_type) == BITINT_TYPE
6505 36 : && bitint_precision_kind (lhs_type) == bitint_prec_middle)
6506 2577 : || POINTER_TYPE_P (lhs_type))
6507 16 : lhs_type = build_nonstandard_integer_type (TYPE_PRECISION (lhs_type),
6508 16 : TYPE_UNSIGNED (lhs_type));
6509 2556 : m_data_cnt = 0;
6510 2556 : tree rhs1 = gimple_assign_rhs1 (stmt);
6511 2556 : unsigned int prec = TYPE_PRECISION (TREE_TYPE (rhs1));
6512 2556 : unsigned int cnt = CEIL (prec, limb_prec);
6513 2556 : tree r1 = handle_operand (rhs1, size_int (bitint_big_endian
6514 : ? cnt - 1 : 0));
6515 2556 : if (!useless_type_conversion_p (lhs_type, TREE_TYPE (r1)))
6516 2449 : r1 = add_cast (lhs_type, r1);
6517 2556 : if (TYPE_PRECISION (lhs_type) > limb_prec)
6518 : {
6519 70 : m_data_cnt = 0;
6520 70 : m_first = false;
6521 70 : tree r2 = handle_operand (rhs1, size_int (bitint_big_endian
6522 : ? cnt - 2 : 1));
6523 70 : r2 = add_cast (lhs_type, r2);
6524 70 : g = gimple_build_assign (make_ssa_name (lhs_type), LSHIFT_EXPR, r2,
6525 : build_int_cst (unsigned_type_node,
6526 70 : limb_prec));
6527 70 : insert_before (g);
6528 70 : g = gimple_build_assign (make_ssa_name (lhs_type), BIT_IOR_EXPR, r1,
6529 : gimple_assign_lhs (g));
6530 70 : insert_before (g);
6531 70 : r1 = gimple_assign_lhs (g);
6532 : }
6533 2556 : if (lhs_type != TREE_TYPE (lhs))
6534 16 : g = gimple_build_assign (lhs, NOP_EXPR, r1);
6535 : else
6536 2540 : g = gimple_build_assign (lhs, r1);
6537 2556 : gsi_replace (&m_gsi, g, true);
6538 2556 : return;
6539 : }
6540 6816 : if (is_gimple_assign (stmt))
6541 6816 : switch (gimple_assign_rhs_code (stmt))
6542 : {
6543 464 : case LSHIFT_EXPR:
6544 464 : case RSHIFT_EXPR:
6545 464 : lower_shift_stmt (NULL_TREE, stmt);
6546 464 : return;
6547 150 : case MULT_EXPR:
6548 150 : case TRUNC_DIV_EXPR:
6549 150 : case EXACT_DIV_EXPR:
6550 150 : case TRUNC_MOD_EXPR:
6551 150 : lower_muldiv_stmt (NULL_TREE, stmt);
6552 150 : return;
6553 273 : case FIX_TRUNC_EXPR:
6554 273 : case FLOAT_EXPR:
6555 273 : lower_float_conv_stmt (NULL_TREE, stmt);
6556 273 : return;
6557 5911 : case REALPART_EXPR:
6558 5911 : case IMAGPART_EXPR:
6559 5911 : lower_cplxpart_stmt (NULL_TREE, stmt);
6560 5911 : return;
6561 18 : case COMPLEX_EXPR:
6562 18 : lower_complexexpr_stmt (stmt);
6563 18 : return;
6564 : default:
6565 : break;
6566 : }
6567 0 : gcc_unreachable ();
6568 : }
6569 :
6570 : /* Helper for walk_non_aliased_vuses. Determine if we arrived at
6571 : the desired memory state. */
6572 :
6573 : void *
6574 2180 : vuse_eq (ao_ref *, tree vuse1, void *data)
6575 : {
6576 2180 : tree vuse2 = (tree) data;
6577 2180 : if (vuse1 == vuse2)
6578 818 : return data;
6579 :
6580 : return NULL;
6581 : }
6582 :
6583 : /* Return true if STMT uses a library function and needs to take
6584 : address of its inputs. We need to avoid bit-fields in those
6585 : cases. Similarly, we need to avoid overlap between destination
6586 : and source limb arrays. */
6587 :
6588 : bool
6589 15050 : stmt_needs_operand_addr (gimple *stmt)
6590 : {
6591 15050 : if (is_gimple_assign (stmt))
6592 10272 : switch (gimple_assign_rhs_code (stmt))
6593 : {
6594 585 : case MULT_EXPR:
6595 585 : case TRUNC_DIV_EXPR:
6596 585 : case EXACT_DIV_EXPR:
6597 585 : case TRUNC_MOD_EXPR:
6598 585 : case FLOAT_EXPR:
6599 585 : return true;
6600 : default:
6601 : break;
6602 : }
6603 4778 : else if (gimple_call_internal_p (stmt, IFN_MUL_OVERFLOW)
6604 4778 : || gimple_call_internal_p (stmt, IFN_UBSAN_CHECK_MUL))
6605 : return true;
6606 : return false;
6607 : }
6608 :
6609 : /* Dominator walker used to discover which large/huge _BitInt
6610 : loads could be sunk into all their uses. */
6611 :
6612 604 : class bitint_dom_walker : public dom_walker
6613 : {
6614 : public:
6615 302 : bitint_dom_walker (bitmap names, bitmap loads)
6616 604 : : dom_walker (CDI_DOMINATORS), m_names (names), m_loads (loads) {}
6617 :
6618 : edge before_dom_children (basic_block) final override;
6619 :
6620 : private:
6621 : bitmap m_names, m_loads;
6622 : };
6623 :
6624 : edge
6625 4487 : bitint_dom_walker::before_dom_children (basic_block bb)
6626 : {
6627 4487 : gphi *phi = get_virtual_phi (bb);
6628 4487 : tree vop;
6629 4487 : if (phi)
6630 794 : vop = gimple_phi_result (phi);
6631 3693 : else if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
6632 : vop = NULL_TREE;
6633 : else
6634 3391 : vop = (tree) get_immediate_dominator (CDI_DOMINATORS, bb)->aux;
6635 :
6636 4487 : auto_vec<tree, 16> worklist;
6637 8974 : for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6638 19926 : !gsi_end_p (gsi); gsi_next (&gsi))
6639 : {
6640 15439 : gimple *stmt = gsi_stmt (gsi);
6641 15439 : if (is_gimple_debug (stmt))
6642 2767 : continue;
6643 :
6644 15474 : if (!vop && gimple_vuse (stmt))
6645 : vop = gimple_vuse (stmt);
6646 :
6647 15050 : tree cvop = vop;
6648 28244 : if (gimple_vdef (stmt))
6649 15050 : vop = gimple_vdef (stmt);
6650 :
6651 15050 : tree lhs = gimple_get_lhs (stmt);
6652 17428 : if (lhs
6653 10939 : && TREE_CODE (lhs) == SSA_NAME
6654 8722 : && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
6655 5853 : && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
6656 20755 : && !bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
6657 : /* If lhs of stmt is large/huge _BitInt SSA_NAME not in m_names,
6658 : it means it will be handled in a loop or straight line code
6659 : at the location of its (ultimate) immediate use, so for
6660 : vop checking purposes check these only at the ultimate
6661 : immediate use. */
6662 2378 : continue;
6663 :
6664 12672 : ssa_op_iter oi;
6665 12672 : use_operand_p use_p;
6666 21488 : FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
6667 : {
6668 8816 : tree s = USE_FROM_PTR (use_p);
6669 8816 : if (TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
6670 8816 : && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
6671 3251 : worklist.safe_push (s);
6672 : }
6673 :
6674 12672 : bool needs_operand_addr = stmt_needs_operand_addr (stmt);
6675 31431 : while (worklist.length () > 0)
6676 : {
6677 6087 : tree s = worklist.pop ();
6678 :
6679 6087 : if (!bitmap_bit_p (m_names, SSA_NAME_VERSION (s)))
6680 : {
6681 2378 : gimple *g = SSA_NAME_DEF_STMT (s);
6682 2378 : needs_operand_addr |= stmt_needs_operand_addr (g);
6683 5453 : FOR_EACH_SSA_USE_OPERAND (use_p, g, oi, SSA_OP_USE)
6684 : {
6685 3075 : tree s2 = USE_FROM_PTR (use_p);
6686 3075 : if (TREE_CODE (TREE_TYPE (s2)) == BITINT_TYPE
6687 3075 : && (bitint_precision_kind (TREE_TYPE (s2))
6688 : >= bitint_prec_large))
6689 2836 : worklist.safe_push (s2);
6690 : }
6691 3097 : continue;
6692 2378 : }
6693 3709 : if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
6694 3709 : && gimple_assign_cast_p (SSA_NAME_DEF_STMT (s)))
6695 : {
6696 226 : tree rhs = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
6697 396 : if (TREE_CODE (rhs) == SSA_NAME
6698 226 : && bitmap_bit_p (m_loads, SSA_NAME_VERSION (rhs)))
6699 : s = rhs;
6700 : else
6701 170 : continue;
6702 : }
6703 3483 : else if (!bitmap_bit_p (m_loads, SSA_NAME_VERSION (s)))
6704 547 : continue;
6705 :
6706 2992 : gimple *g = SSA_NAME_DEF_STMT (s);
6707 2992 : tree rhs1 = gimple_assign_rhs1 (g);
6708 2992 : if (needs_operand_addr
6709 214 : && TREE_CODE (rhs1) == COMPONENT_REF
6710 3009 : && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
6711 : {
6712 4 : tree fld = TREE_OPERAND (rhs1, 1);
6713 : /* For little-endian, we can allow as inputs bit-fields
6714 : which start at a limb boundary. */
6715 6 : if (!bitint_big_endian
6716 4 : && DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
6717 4 : && tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld))
6718 4 : && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
6719 4 : % limb_prec) == 0)
6720 : ;
6721 : else
6722 : {
6723 2 : bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
6724 2 : continue;
6725 : }
6726 : }
6727 :
6728 2990 : ao_ref ref;
6729 2990 : ao_ref_init (&ref, rhs1);
6730 2990 : tree lvop = gimple_vuse (g);
6731 2990 : unsigned limit = 64;
6732 2990 : tree vuse = cvop;
6733 2990 : if (vop != cvop
6734 1330 : && is_gimple_assign (stmt)
6735 1328 : && gimple_store_p (stmt)
6736 4318 : && (needs_operand_addr
6737 1139 : || !operand_equal_p (lhs, gimple_assign_rhs1 (g), 0)))
6738 : vuse = vop;
6739 2990 : if (vuse != lvop
6740 2990 : && walk_non_aliased_vuses (&ref, vuse, false, vuse_eq,
6741 : NULL, NULL, NULL, limit, lvop) == NULL)
6742 526 : bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
6743 : }
6744 : }
6745 :
6746 4487 : bb->aux = (void *) vop;
6747 4487 : return NULL;
6748 4487 : }
6749 :
6750 : }
6751 :
6752 : /* Replacement for normal processing of STMT in tree-ssa-coalesce.cc
6753 : build_ssa_conflict_graph.
6754 : The differences are:
6755 : 1) don't process assignments with large/huge _BitInt lhs not in NAMES
6756 : 2) for large/huge _BitInt multiplication/division/modulo process def
6757 : only after processing uses rather than before to make uses conflict
6758 : with the definition
6759 : 3) for large/huge _BitInt uses not in NAMES mark the uses of their
6760 : SSA_NAME_DEF_STMT (recursively), because those uses will be sunk into
6761 : the final statement. */
6762 :
6763 : void
6764 83376 : build_bitint_stmt_ssa_conflicts (gimple *stmt, live_track *live,
6765 : ssa_conflicts *graph, bitmap names,
6766 : void (*def) (live_track *, tree,
6767 : ssa_conflicts *),
6768 : void (*use) (live_track *, tree),
6769 : void (*clear) (live_track *, tree))
6770 : {
6771 83376 : bool muldiv_p = false;
6772 83376 : tree lhs = NULL_TREE;
6773 83376 : if (is_gimple_assign (stmt))
6774 : {
6775 45823 : lhs = gimple_assign_lhs (stmt);
6776 45823 : if (TREE_CODE (lhs) == SSA_NAME)
6777 : {
6778 33083 : tree type = TREE_TYPE (lhs);
6779 33083 : if (TREE_CODE (type) == COMPLEX_TYPE)
6780 63 : type = TREE_TYPE (type);
6781 33083 : if (TREE_CODE (type) == BITINT_TYPE
6782 33083 : && bitint_precision_kind (type) >= bitint_prec_large)
6783 : {
6784 19822 : if (!bitmap_bit_p (names, SSA_NAME_VERSION (lhs)))
6785 4849 : return;
6786 :
6787 : /* A copy between 2 partitions does not introduce an interference
6788 : by itself. If they did, you would never be able to coalesce
6789 : two things which are copied. If the two variables really do
6790 : conflict, they will conflict elsewhere in the program.
6791 :
6792 : This is handled by simply removing the SRC of the copy from
6793 : the live list, and processing the stmt normally.
6794 :
6795 : Don't do this if lhs is not in names though, in such cases
6796 : it is actually used at some point later in the basic
6797 : block. */
6798 14973 : if (gimple_assign_copy_p (stmt))
6799 : {
6800 1665 : tree rhs1 = gimple_assign_rhs1 (stmt);
6801 1665 : if (TREE_CODE (rhs1) == SSA_NAME)
6802 31 : clear (live, rhs1);
6803 : }
6804 :
6805 14973 : switch (gimple_assign_rhs_code (stmt))
6806 : {
6807 150 : case MULT_EXPR:
6808 150 : case TRUNC_DIV_EXPR:
6809 150 : case EXACT_DIV_EXPR:
6810 150 : case TRUNC_MOD_EXPR:
6811 150 : muldiv_p = true;
6812 : default:
6813 : break;
6814 : }
6815 : }
6816 : }
6817 : }
6818 37553 : else if (bitint_big_endian
6819 0 : && is_gimple_call (stmt)
6820 37553 : && gimple_call_internal_p (stmt))
6821 0 : switch (gimple_call_internal_fn (stmt))
6822 : {
6823 0 : case IFN_ADD_OVERFLOW:
6824 0 : case IFN_SUB_OVERFLOW:
6825 0 : case IFN_UBSAN_CHECK_ADD:
6826 0 : case IFN_UBSAN_CHECK_SUB:
6827 0 : case IFN_MUL_OVERFLOW:
6828 0 : case IFN_UBSAN_CHECK_MUL:
6829 0 : lhs = gimple_call_lhs (stmt);
6830 0 : if (lhs)
6831 78527 : muldiv_p = true;
6832 : break;
6833 : default:
6834 : break;
6835 : }
6836 :
6837 157054 : auto_vec<tree, 16> worklist;
6838 78527 : ssa_op_iter iter;
6839 78527 : tree var;
6840 : /* On little-endian, mergeable ops process limbs from 0 up so except
6841 : for multiplication/division/modulo there is no risk in using the
6842 : same underlying variable for lhs and some operand, even when casts
6843 : are involved, the lhs limb is stored only after processing the source
6844 : limbs with the same index.
6845 : For multiplication/division/modulo, the libgcc library function requires
6846 : no aliasing between result and sources.
6847 : On big-endian, even mergeable ops limb processing can be problematic
6848 : though, because it can apply various index corrections e.g. when there
6849 : is a cast from operand with different number of limbs. So, make the
6850 : lhs conflict with all the operands which are (for now virtually) used on
6851 : the current stmt if there is any mismatch in the number of limbs between
6852 : operands and the lhs. */
6853 78527 : if (bitint_big_endian && lhs && !muldiv_p)
6854 : {
6855 0 : tree ltype = TREE_TYPE (lhs);
6856 0 : if (TREE_CODE (ltype) == COMPLEX_TYPE)
6857 : muldiv_p = true;
6858 0 : else if (TREE_CODE (lhs) == SSA_NAME
6859 0 : && TREE_CODE (ltype) == BITINT_TYPE
6860 0 : && bitint_precision_kind (ltype) >= bitint_prec_large)
6861 : {
6862 0 : unsigned lnelts = CEIL (TYPE_PRECISION (ltype), limb_prec);
6863 0 : FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE)
6864 : {
6865 0 : tree type = TREE_TYPE (var);
6866 0 : if (TREE_CODE (type) == COMPLEX_TYPE)
6867 0 : type = TREE_TYPE (type);
6868 0 : if (TREE_CODE (type) == BITINT_TYPE
6869 0 : && bitint_precision_kind (type) >= bitint_prec_large)
6870 : {
6871 0 : if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
6872 : {
6873 0 : unsigned nelts = CEIL (TYPE_PRECISION (type), limb_prec);
6874 0 : if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
6875 0 : || lnelts != nelts)
6876 : {
6877 0 : muldiv_p = true;
6878 : break;
6879 : }
6880 : }
6881 : else
6882 0 : worklist.safe_push (var);
6883 : }
6884 : }
6885 :
6886 0 : while (!muldiv_p && worklist.length () > 0)
6887 : {
6888 0 : tree s = worklist.pop ();
6889 0 : FOR_EACH_SSA_TREE_OPERAND (var, SSA_NAME_DEF_STMT (s), iter,
6890 : SSA_OP_USE)
6891 : {
6892 0 : tree type = TREE_TYPE (var);
6893 0 : if (TREE_CODE (type) == COMPLEX_TYPE)
6894 0 : type = TREE_TYPE (type);
6895 0 : if (TREE_CODE (type) == BITINT_TYPE
6896 0 : && bitint_precision_kind (type) >= bitint_prec_large)
6897 : {
6898 0 : if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
6899 : {
6900 0 : unsigned nelts = CEIL (TYPE_PRECISION (type),
6901 : limb_prec);
6902 0 : if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
6903 0 : || lnelts != nelts)
6904 : {
6905 : muldiv_p = true;
6906 : break;
6907 : }
6908 : }
6909 : else
6910 0 : worklist.safe_push (var);
6911 : }
6912 : }
6913 : }
6914 0 : worklist.truncate (0);
6915 : }
6916 : }
6917 :
6918 78527 : if (!muldiv_p)
6919 : {
6920 : /* For stmts with more than one SSA_NAME definition pretend all the
6921 : SSA_NAME outputs but the first one are live at this point, so
6922 : that conflicts are added in between all those even when they are
6923 : actually not really live after the asm, because expansion might
6924 : copy those into pseudos after the asm and if multiple outputs
6925 : share the same partition, it might overwrite those that should
6926 : be live. E.g.
6927 : asm volatile (".." : "=r" (a) : "=r" (b) : "0" (a), "1" (a));
6928 : return a;
6929 : See PR70593. */
6930 78377 : bool first = true;
6931 115480 : FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
6932 37103 : if (first)
6933 : first = false;
6934 : else
6935 0 : use (live, var);
6936 :
6937 115480 : FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
6938 37103 : def (live, var, graph);
6939 : }
6940 :
6941 133950 : FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE)
6942 : {
6943 55423 : tree type = TREE_TYPE (var);
6944 55423 : if (TREE_CODE (type) == COMPLEX_TYPE)
6945 6287 : type = TREE_TYPE (type);
6946 55423 : if (TREE_CODE (type) == BITINT_TYPE
6947 55423 : && bitint_precision_kind (type) >= bitint_prec_large)
6948 : {
6949 35790 : if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
6950 30743 : use (live, var);
6951 : else
6952 5047 : worklist.safe_push (var);
6953 : }
6954 : }
6955 :
6956 86562 : while (worklist.length () > 0)
6957 : {
6958 8035 : tree s = worklist.pop ();
6959 16806 : FOR_EACH_SSA_TREE_OPERAND (var, SSA_NAME_DEF_STMT (s), iter, SSA_OP_USE)
6960 : {
6961 8771 : tree type = TREE_TYPE (var);
6962 8771 : if (TREE_CODE (type) == COMPLEX_TYPE)
6963 1673 : type = TREE_TYPE (type);
6964 8771 : if (TREE_CODE (type) == BITINT_TYPE
6965 8771 : && bitint_precision_kind (type) >= bitint_prec_large)
6966 : {
6967 8113 : if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
6968 5125 : use (live, var);
6969 : else
6970 2988 : worklist.safe_push (var);
6971 : }
6972 : }
6973 : }
6974 :
6975 78527 : if (muldiv_p)
6976 150 : def (live, lhs, graph);
6977 : }
6978 :
6979 : /* If STMT is .{ADD,SUB,MUL}_OVERFLOW with INTEGER_CST arguments,
6980 : return the largest bitint_prec_kind of them, otherwise return
6981 : bitint_prec_small. */
6982 :
6983 : static bitint_prec_kind
6984 190629 : arith_overflow_arg_kind (gimple *stmt)
6985 : {
6986 190629 : bitint_prec_kind ret = bitint_prec_small;
6987 190629 : if (is_gimple_call (stmt) && gimple_call_internal_p (stmt))
6988 88121 : switch (gimple_call_internal_fn (stmt))
6989 : {
6990 : case IFN_ADD_OVERFLOW:
6991 : case IFN_SUB_OVERFLOW:
6992 : case IFN_MUL_OVERFLOW:
6993 223827 : for (int i = 0; i < 2; ++i)
6994 : {
6995 149218 : tree a = gimple_call_arg (stmt, i);
6996 149218 : if (TREE_CODE (a) == INTEGER_CST
6997 149218 : && TREE_CODE (TREE_TYPE (a)) == BITINT_TYPE)
6998 : {
6999 5928 : bitint_prec_kind kind = bitint_precision_kind (TREE_TYPE (a));
7000 149218 : ret = MAX (ret, kind);
7001 : }
7002 : }
7003 : break;
7004 : default:
7005 : break;
7006 : }
7007 190629 : return ret;
7008 : }
7009 :
7010 : /* Entry point for _BitInt(N) operation lowering during optimization. */
7011 :
7012 : static unsigned int
7013 1475305 : gimple_lower_bitint (void)
7014 : {
7015 1475305 : small_max_prec = mid_min_prec = large_min_prec = huge_min_prec = 0;
7016 1475305 : limb_prec = abi_limb_prec = 0;
7017 1475305 : bitint_big_endian = false;
7018 :
7019 1475305 : unsigned int i;
7020 62944133 : for (i = 0; i < num_ssa_names; ++i)
7021 : {
7022 61476050 : tree s = ssa_name (i);
7023 61476050 : if (s == NULL)
7024 13101252 : continue;
7025 48374798 : tree type = TREE_TYPE (s);
7026 48374798 : if (TREE_CODE (type) == COMPLEX_TYPE)
7027 : {
7028 179983 : if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s))
7029 : != bitint_prec_small)
7030 : break;
7031 179870 : type = TREE_TYPE (type);
7032 : }
7033 48374685 : if (TREE_CODE (type) == BITINT_TYPE
7034 48374685 : && bitint_precision_kind (type) != bitint_prec_small)
7035 : break;
7036 : /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
7037 : into memory. Such functions could have no large/huge SSA_NAMEs. */
7038 48367641 : if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
7039 : {
7040 21099714 : gimple *g = SSA_NAME_DEF_STMT (s);
7041 21099714 : if (is_gimple_assign (g) && gimple_store_p (g))
7042 : {
7043 10688505 : tree t = gimple_assign_rhs1 (g);
7044 10688505 : if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
7045 10688505 : && (bitint_precision_kind (TREE_TYPE (t))
7046 : >= bitint_prec_large))
7047 : break;
7048 : }
7049 : }
7050 : /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
7051 : to floating point types need to be rewritten. */
7052 27267927 : else if (SCALAR_FLOAT_TYPE_P (type))
7053 : {
7054 2293286 : gimple *g = SSA_NAME_DEF_STMT (s);
7055 2293286 : if (is_gimple_assign (g) && gimple_assign_rhs_code (g) == FLOAT_EXPR)
7056 : {
7057 127687 : tree t = gimple_assign_rhs1 (g);
7058 127687 : if (TREE_CODE (t) == INTEGER_CST
7059 110 : && TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
7060 127688 : && (bitint_precision_kind (TREE_TYPE (t))
7061 : != bitint_prec_small))
7062 : break;
7063 : }
7064 : }
7065 : }
7066 2950610 : if (i == num_ssa_names)
7067 : return 0;
7068 :
7069 7222 : basic_block bb;
7070 7222 : auto_vec<gimple *, 4> switch_statements;
7071 45631 : FOR_EACH_BB_FN (bb, cfun)
7072 : {
7073 114489 : if (gswitch *swtch = safe_dyn_cast <gswitch *> (*gsi_last_bb (bb)))
7074 : {
7075 23 : tree idx = gimple_switch_index (swtch);
7076 23 : if (TREE_CODE (TREE_TYPE (idx)) != BITINT_TYPE
7077 23 : || bitint_precision_kind (TREE_TYPE (idx)) < bitint_prec_large)
7078 12 : continue;
7079 :
7080 11 : if (optimize)
7081 6 : group_case_labels_stmt (swtch);
7082 11 : if (gimple_switch_num_labels (swtch) == 1)
7083 : {
7084 1 : single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
7085 1 : gimple_stmt_iterator gsi = gsi_for_stmt (swtch);
7086 1 : gsi_remove (&gsi, true);
7087 : }
7088 : else
7089 10 : switch_statements.safe_push (swtch);
7090 : }
7091 : }
7092 :
7093 7222 : if (!switch_statements.is_empty ())
7094 : {
7095 10 : bool expanded = false;
7096 10 : gimple *stmt;
7097 10 : unsigned int j;
7098 10 : i = 0;
7099 20 : FOR_EACH_VEC_ELT (switch_statements, j, stmt)
7100 : {
7101 10 : gswitch *swtch = as_a<gswitch *> (stmt);
7102 10 : tree_switch_conversion::switch_decision_tree dt (swtch);
7103 10 : expanded |= dt.analyze_switch_statement ();
7104 10 : }
7105 :
7106 10 : if (expanded)
7107 : {
7108 10 : free_dominance_info (CDI_DOMINATORS);
7109 10 : free_dominance_info (CDI_POST_DOMINATORS);
7110 10 : mark_virtual_operands_for_renaming (cfun);
7111 10 : cleanup_tree_cfg (TODO_update_ssa);
7112 : }
7113 : }
7114 :
7115 7222 : struct bitint_large_huge large_huge;
7116 7222 : bool has_large_huge_parm_result = false;
7117 7222 : bool has_large_huge = false;
7118 7222 : unsigned int ret = 0, first_large_huge = ~0U;
7119 7222 : bool edge_insertions = false;
7120 125197 : for (; i < num_ssa_names; ++i)
7121 : {
7122 117975 : tree s = ssa_name (i);
7123 117975 : if (s == NULL)
7124 2742 : continue;
7125 115233 : tree type = TREE_TYPE (s);
7126 115233 : if (TREE_CODE (type) == COMPLEX_TYPE)
7127 : {
7128 5338 : if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s))
7129 : >= bitint_prec_large)
7130 1957 : has_large_huge = true;
7131 5338 : type = TREE_TYPE (type);
7132 : }
7133 115233 : if (TREE_CODE (type) == BITINT_TYPE
7134 115233 : && bitint_precision_kind (type) >= bitint_prec_large)
7135 : {
7136 35707 : if (first_large_huge == ~0U)
7137 5679 : first_large_huge = i;
7138 35707 : gimple *stmt = SSA_NAME_DEF_STMT (s), *g;
7139 35707 : gimple_stmt_iterator gsi;
7140 35707 : tree_code rhs_code;
7141 : /* Unoptimize certain constructs to simpler alternatives to
7142 : avoid having to lower all of them. */
7143 35707 : if (is_gimple_assign (stmt) && gimple_bb (stmt))
7144 22085 : switch (rhs_code = gimple_assign_rhs_code (stmt))
7145 : {
7146 : default:
7147 : break;
7148 354 : case MULT_EXPR:
7149 354 : case TRUNC_DIV_EXPR:
7150 354 : case EXACT_DIV_EXPR:
7151 354 : case TRUNC_MOD_EXPR:
7152 354 : if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s))
7153 : {
7154 2 : location_t loc = gimple_location (stmt);
7155 2 : gsi = gsi_for_stmt (stmt);
7156 2 : tree rhs1 = gimple_assign_rhs1 (stmt);
7157 2 : tree rhs2 = gimple_assign_rhs2 (stmt);
7158 : /* For multiplication and division with (ab)
7159 : lhs and one or both operands force the operands
7160 : into new SSA_NAMEs to avoid coalescing failures. */
7161 2 : if (TREE_CODE (rhs1) == SSA_NAME
7162 2 : && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1))
7163 : {
7164 2 : first_large_huge = 0;
7165 2 : tree t = make_ssa_name (TREE_TYPE (rhs1));
7166 2 : g = gimple_build_assign (t, SSA_NAME, rhs1);
7167 2 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
7168 2 : gimple_set_location (g, loc);
7169 2 : gimple_assign_set_rhs1 (stmt, t);
7170 2 : if (rhs1 == rhs2)
7171 : {
7172 0 : gimple_assign_set_rhs2 (stmt, t);
7173 0 : rhs2 = t;
7174 : }
7175 2 : update_stmt (stmt);
7176 : }
7177 2 : if (TREE_CODE (rhs2) == SSA_NAME
7178 2 : && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs2))
7179 : {
7180 0 : first_large_huge = 0;
7181 0 : tree t = make_ssa_name (TREE_TYPE (rhs2));
7182 0 : g = gimple_build_assign (t, SSA_NAME, rhs2);
7183 0 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
7184 0 : gimple_set_location (g, loc);
7185 0 : gimple_assign_set_rhs2 (stmt, t);
7186 0 : update_stmt (stmt);
7187 : }
7188 : }
7189 : break;
7190 3 : case LROTATE_EXPR:
7191 3 : case RROTATE_EXPR:
7192 3 : {
7193 3 : first_large_huge = 0;
7194 3 : location_t loc = gimple_location (stmt);
7195 3 : gsi = gsi_for_stmt (stmt);
7196 3 : tree rhs1 = gimple_assign_rhs1 (stmt);
7197 3 : tree type = TREE_TYPE (rhs1);
7198 3 : tree n = gimple_assign_rhs2 (stmt), m;
7199 3 : tree p = build_int_cst (TREE_TYPE (n),
7200 3 : TYPE_PRECISION (type));
7201 3 : if (TREE_CODE (n) == INTEGER_CST)
7202 : {
7203 0 : if (integer_zerop (n))
7204 : m = n;
7205 : else
7206 0 : m = fold_build2 (MINUS_EXPR, TREE_TYPE (n), p, n);
7207 : }
7208 : else
7209 : {
7210 3 : tree tem = make_ssa_name (TREE_TYPE (n));
7211 3 : g = gimple_build_assign (tem, MINUS_EXPR, p, n);
7212 3 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
7213 3 : gimple_set_location (g, loc);
7214 3 : m = make_ssa_name (TREE_TYPE (n));
7215 3 : g = gimple_build_assign (m, TRUNC_MOD_EXPR, tem, p);
7216 3 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
7217 3 : gimple_set_location (g, loc);
7218 : }
7219 3 : if (!TYPE_UNSIGNED (type))
7220 : {
7221 0 : tree utype = build_bitint_type (TYPE_PRECISION (type),
7222 : 1);
7223 0 : if (TREE_CODE (rhs1) == INTEGER_CST)
7224 0 : rhs1 = fold_convert (utype, rhs1);
7225 : else
7226 : {
7227 0 : tree t = make_ssa_name (type);
7228 0 : g = gimple_build_assign (t, NOP_EXPR, rhs1);
7229 0 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
7230 0 : gimple_set_location (g, loc);
7231 : }
7232 : }
7233 4 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
7234 : rhs_code == LROTATE_EXPR
7235 : ? LSHIFT_EXPR : RSHIFT_EXPR,
7236 : rhs1, n);
7237 3 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
7238 3 : gimple_set_location (g, loc);
7239 3 : tree op1 = gimple_assign_lhs (g);
7240 4 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
7241 : rhs_code == LROTATE_EXPR
7242 : ? RSHIFT_EXPR : LSHIFT_EXPR,
7243 : rhs1, m);
7244 3 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
7245 3 : gimple_set_location (g, loc);
7246 3 : tree op2 = gimple_assign_lhs (g);
7247 3 : tree lhs = gimple_assign_lhs (stmt);
7248 3 : if (!TYPE_UNSIGNED (type))
7249 : {
7250 0 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (op1)),
7251 : BIT_IOR_EXPR, op1, op2);
7252 0 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
7253 0 : gimple_set_location (g, loc);
7254 0 : g = gimple_build_assign (lhs, NOP_EXPR,
7255 : gimple_assign_lhs (g));
7256 : }
7257 : else
7258 3 : g = gimple_build_assign (lhs, BIT_IOR_EXPR, op1, op2);
7259 3 : gsi_replace (&gsi, g, true);
7260 3 : gimple_set_location (g, loc);
7261 : }
7262 3 : break;
7263 21 : case ABS_EXPR:
7264 21 : case ABSU_EXPR:
7265 21 : case MIN_EXPR:
7266 21 : case MAX_EXPR:
7267 21 : case COND_EXPR:
7268 21 : first_large_huge = 0;
7269 21 : gsi = gsi_for_stmt (stmt);
7270 21 : tree lhs = gimple_assign_lhs (stmt);
7271 21 : tree rhs1 = gimple_assign_rhs1 (stmt), rhs2 = NULL_TREE;
7272 21 : location_t loc = gimple_location (stmt);
7273 21 : if (rhs_code == ABS_EXPR)
7274 4 : g = gimple_build_cond (LT_EXPR, rhs1,
7275 4 : build_zero_cst (TREE_TYPE (rhs1)),
7276 : NULL_TREE, NULL_TREE);
7277 17 : else if (rhs_code == ABSU_EXPR)
7278 : {
7279 8 : rhs2 = make_ssa_name (TREE_TYPE (lhs));
7280 8 : g = gimple_build_assign (rhs2, NOP_EXPR, rhs1);
7281 8 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
7282 8 : gimple_set_location (g, loc);
7283 8 : g = gimple_build_cond (LT_EXPR, rhs1,
7284 8 : build_zero_cst (TREE_TYPE (rhs1)),
7285 : NULL_TREE, NULL_TREE);
7286 8 : rhs1 = rhs2;
7287 : }
7288 9 : else if (rhs_code == MIN_EXPR || rhs_code == MAX_EXPR)
7289 : {
7290 9 : rhs2 = gimple_assign_rhs2 (stmt);
7291 9 : if (TREE_CODE (rhs1) == INTEGER_CST)
7292 0 : std::swap (rhs1, rhs2);
7293 9 : g = gimple_build_cond (LT_EXPR, rhs1, rhs2,
7294 : NULL_TREE, NULL_TREE);
7295 9 : if (rhs_code == MAX_EXPR)
7296 5 : std::swap (rhs1, rhs2);
7297 : }
7298 : else
7299 : {
7300 0 : g = gimple_build_cond (NE_EXPR, rhs1,
7301 0 : build_zero_cst (TREE_TYPE (rhs1)),
7302 : NULL_TREE, NULL_TREE);
7303 0 : rhs1 = gimple_assign_rhs2 (stmt);
7304 0 : rhs2 = gimple_assign_rhs3 (stmt);
7305 : }
7306 21 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
7307 21 : gimple_set_location (g, loc);
7308 21 : edge e1 = split_block (gsi_bb (gsi), g);
7309 21 : edge e2 = split_block (e1->dest, (gimple *) NULL);
7310 21 : edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
7311 21 : e3->probability = profile_probability::even ();
7312 21 : e1->flags = EDGE_TRUE_VALUE;
7313 21 : e1->probability = e3->probability.invert ();
7314 21 : if (dom_info_available_p (CDI_DOMINATORS))
7315 13 : set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
7316 21 : if (rhs_code == ABS_EXPR || rhs_code == ABSU_EXPR)
7317 : {
7318 12 : gsi = gsi_after_labels (e1->dest);
7319 12 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
7320 : NEGATE_EXPR, rhs1);
7321 12 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
7322 12 : gimple_set_location (g, loc);
7323 12 : rhs2 = gimple_assign_lhs (g);
7324 12 : std::swap (rhs1, rhs2);
7325 : }
7326 21 : gsi = gsi_for_stmt (stmt);
7327 21 : gsi_remove (&gsi, true);
7328 21 : gphi *phi = create_phi_node (lhs, e2->dest);
7329 21 : add_phi_arg (phi, rhs1, e2, UNKNOWN_LOCATION);
7330 21 : add_phi_arg (phi, rhs2, e3, UNKNOWN_LOCATION);
7331 21 : break;
7332 : }
7333 : }
7334 : /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
7335 : into memory. Such functions could have no large/huge SSA_NAMEs. */
7336 79526 : else if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
7337 : {
7338 50678 : gimple *g = SSA_NAME_DEF_STMT (s);
7339 50678 : if (is_gimple_assign (g) && gimple_store_p (g))
7340 : {
7341 16286 : tree t = gimple_assign_rhs1 (g);
7342 16286 : if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
7343 16286 : && (bitint_precision_kind (TREE_TYPE (t))
7344 : >= bitint_prec_large))
7345 : has_large_huge = true;
7346 : }
7347 : }
7348 : /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
7349 : to floating point types need to be rewritten. */
7350 28848 : else if (SCALAR_FLOAT_TYPE_P (type))
7351 : {
7352 684 : gimple *g = SSA_NAME_DEF_STMT (s);
7353 684 : if (is_gimple_assign (g) && gimple_assign_rhs_code (g) == FLOAT_EXPR)
7354 : {
7355 177 : tree t = gimple_assign_rhs1 (g);
7356 177 : if (TREE_CODE (t) == INTEGER_CST
7357 1 : && TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
7358 178 : && (bitint_precision_kind (TREE_TYPE (t))
7359 : >= bitint_prec_large))
7360 : has_large_huge = true;
7361 : }
7362 : }
7363 : }
7364 101887 : for (i = first_large_huge; i < num_ssa_names; ++i)
7365 : {
7366 94665 : tree s = ssa_name (i);
7367 94665 : if (s == NULL)
7368 2431 : continue;
7369 92234 : tree type = TREE_TYPE (s);
7370 92234 : if (TREE_CODE (type) == COMPLEX_TYPE)
7371 4109 : type = TREE_TYPE (type);
7372 92234 : if (TREE_CODE (type) == BITINT_TYPE
7373 92234 : && bitint_precision_kind (type) >= bitint_prec_large)
7374 : {
7375 35707 : use_operand_p use_p;
7376 35707 : gimple *use_stmt;
7377 35707 : has_large_huge = true;
7378 37369 : if (optimize
7379 52372 : && optimizable_arith_overflow (SSA_NAME_DEF_STMT (s)))
7380 6418 : continue;
7381 : /* Ignore large/huge _BitInt SSA_NAMEs which have single use in
7382 : the same bb and could be handled in the same loop with the
7383 : immediate use. */
7384 34045 : if (optimize
7385 15003 : && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
7386 14981 : && single_imm_use (s, &use_p, &use_stmt)
7387 48458 : && gimple_bb (SSA_NAME_DEF_STMT (s)) == gimple_bb (use_stmt))
7388 : {
7389 10142 : if (mergeable_op (SSA_NAME_DEF_STMT (s)))
7390 : {
7391 2156 : if (mergeable_op (use_stmt))
7392 1870 : continue;
7393 286 : tree_code cmp_code = comparison_op (use_stmt, NULL, NULL);
7394 286 : if (cmp_code == EQ_EXPR || cmp_code == NE_EXPR)
7395 26 : continue;
7396 260 : if (gimple_assign_cast_p (use_stmt))
7397 : {
7398 117 : tree lhs = gimple_assign_lhs (use_stmt);
7399 234 : if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
7400 : /* Don't merge with VIEW_CONVERT_EXPRs to
7401 : huge INTEGER_TYPEs used sometimes in memcpy
7402 : expansion. */
7403 222 : && (TREE_CODE (TREE_TYPE (lhs)) != INTEGER_TYPE
7404 8 : || (TYPE_PRECISION (TREE_TYPE (lhs))
7405 16 : <= MAX_FIXED_MODE_SIZE)))
7406 105 : continue;
7407 : }
7408 143 : else if (gimple_store_p (use_stmt)
7409 0 : && is_gimple_assign (use_stmt)
7410 0 : && !gimple_has_volatile_ops (use_stmt)
7411 143 : && !stmt_ends_bb_p (use_stmt))
7412 0 : continue;
7413 : }
7414 8141 : if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (s)))
7415 : {
7416 824 : tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
7417 824 : if (TREE_CODE (rhs1) == VIEW_CONVERT_EXPR)
7418 : {
7419 17 : rhs1 = TREE_OPERAND (rhs1, 0);
7420 17 : if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
7421 13 : && !POINTER_TYPE_P (TREE_TYPE (rhs1))
7422 13 : && gimple_store_p (use_stmt))
7423 6 : continue;
7424 : }
7425 1636 : if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
7426 729 : && ((is_gimple_assign (use_stmt)
7427 682 : && (gimple_assign_rhs_code (use_stmt)
7428 : != COMPLEX_EXPR))
7429 47 : || gimple_code (use_stmt) == GIMPLE_COND)
7430 719 : && (!gimple_store_p (use_stmt)
7431 122 : || (is_gimple_assign (use_stmt)
7432 122 : && !gimple_has_volatile_ops (use_stmt)
7433 122 : && !stmt_ends_bb_p (use_stmt)))
7434 1537 : && (TREE_CODE (rhs1) != SSA_NAME
7435 719 : || !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
7436 : {
7437 719 : if (is_gimple_assign (use_stmt))
7438 682 : switch (gimple_assign_rhs_code (use_stmt))
7439 : {
7440 54 : case TRUNC_DIV_EXPR:
7441 54 : case EXACT_DIV_EXPR:
7442 54 : case TRUNC_MOD_EXPR:
7443 54 : case FLOAT_EXPR:
7444 : /* For division, modulo and casts to floating
7445 : point, avoid representing unsigned operands
7446 : using negative prec if they were sign-extended
7447 : from narrower precision. */
7448 54 : if (TYPE_UNSIGNED (TREE_TYPE (s))
7449 28 : && !TYPE_UNSIGNED (TREE_TYPE (rhs1))
7450 63 : && (TYPE_PRECISION (TREE_TYPE (s))
7451 9 : > TYPE_PRECISION (TREE_TYPE (rhs1))))
7452 8 : goto force_name;
7453 : /* FALLTHRU */
7454 107 : case MULT_EXPR:
7455 107 : if (TREE_CODE (TREE_TYPE (rhs1)) != BITINT_TYPE
7456 107 : || (bitint_precision_kind (TREE_TYPE (rhs1))
7457 : < bitint_prec_large))
7458 42 : continue;
7459 : /* Uses which use handle_operand_addr can't
7460 : deal with nested casts. */
7461 65 : if (TREE_CODE (rhs1) == SSA_NAME
7462 65 : && gimple_assign_cast_p
7463 65 : (SSA_NAME_DEF_STMT (rhs1))
7464 43 : && has_single_use (rhs1)
7465 108 : && (gimple_bb (SSA_NAME_DEF_STMT (rhs1))
7466 43 : == gimple_bb (SSA_NAME_DEF_STMT (s))))
7467 43 : goto force_name;
7468 : break;
7469 0 : case VIEW_CONVERT_EXPR:
7470 0 : {
7471 0 : tree lhs = gimple_assign_lhs (use_stmt);
7472 : /* Don't merge with VIEW_CONVERT_EXPRs to
7473 : non-integral types. */
7474 0 : if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
7475 0 : goto force_name;
7476 : /* Don't merge with VIEW_CONVERT_EXPRs to
7477 : huge INTEGER_TYPEs used sometimes in memcpy
7478 : expansion. */
7479 0 : if (TREE_CODE (TREE_TYPE (lhs)) == INTEGER_TYPE
7480 0 : && (TYPE_PRECISION (TREE_TYPE (lhs))
7481 0 : > MAX_FIXED_MODE_SIZE))
7482 0 : goto force_name;
7483 : }
7484 : break;
7485 : default:
7486 : break;
7487 : }
7488 626 : if (TREE_CODE (TREE_TYPE (rhs1)) != BITINT_TYPE
7489 626 : || (bitint_precision_kind (TREE_TYPE (rhs1))
7490 : < bitint_prec_large))
7491 224 : continue;
7492 402 : if ((TYPE_PRECISION (TREE_TYPE (rhs1))
7493 402 : >= TYPE_PRECISION (TREE_TYPE (s)))
7494 402 : && mergeable_op (use_stmt))
7495 60 : continue;
7496 : /* Prevent merging a widening non-mergeable cast
7497 : on result of some narrower mergeable op
7498 : together with later mergeable operations. E.g.
7499 : result of _BitInt(223) addition shouldn't be
7500 : sign-extended to _BitInt(513) and have another
7501 : _BitInt(513) added to it, as handle_plus_minus
7502 : with its PHI node handling inside of handle_cast
7503 : will not work correctly. An exception is if
7504 : use_stmt is a store, this is handled directly
7505 : in lower_mergeable_stmt. */
7506 677 : if (TREE_CODE (rhs1) != SSA_NAME
7507 342 : || !has_single_use (rhs1)
7508 256 : || (gimple_bb (SSA_NAME_DEF_STMT (rhs1))
7509 256 : != gimple_bb (SSA_NAME_DEF_STMT (s)))
7510 207 : || !mergeable_op (SSA_NAME_DEF_STMT (rhs1))
7511 420 : || gimple_store_p (use_stmt))
7512 335 : continue;
7513 7 : if ((TYPE_PRECISION (TREE_TYPE (rhs1))
7514 7 : < TYPE_PRECISION (TREE_TYPE (s)))
7515 9 : && gimple_assign_cast_p (SSA_NAME_DEF_STMT (rhs1)))
7516 : {
7517 : /* Another exception is if the widening cast is
7518 : from mergeable same precision cast from something
7519 : not mergeable. */
7520 0 : tree rhs2
7521 0 : = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (rhs1));
7522 0 : if (TREE_CODE (TREE_TYPE (rhs2)) == BITINT_TYPE
7523 0 : && (TYPE_PRECISION (TREE_TYPE (rhs1))
7524 0 : == TYPE_PRECISION (TREE_TYPE (rhs2))))
7525 : {
7526 0 : if (TREE_CODE (rhs2) != SSA_NAME
7527 0 : || !has_single_use (rhs2)
7528 0 : || (gimple_bb (SSA_NAME_DEF_STMT (rhs2))
7529 0 : != gimple_bb (SSA_NAME_DEF_STMT (s)))
7530 0 : || !mergeable_op (SSA_NAME_DEF_STMT (rhs2)))
7531 0 : continue;
7532 : }
7533 : }
7534 : }
7535 : }
7536 7423 : if (is_gimple_assign (SSA_NAME_DEF_STMT (s)))
7537 5365 : switch (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (s)))
7538 : {
7539 1780 : case REALPART_EXPR:
7540 1780 : case IMAGPART_EXPR:
7541 1780 : {
7542 1780 : gimple *ds = SSA_NAME_DEF_STMT (s);
7543 1780 : tree rhs1 = gimple_assign_rhs1 (ds);
7544 1780 : rhs1 = TREE_OPERAND (rhs1, 0);
7545 1780 : if (TREE_CODE (rhs1) == SSA_NAME)
7546 : {
7547 1780 : gimple *g = SSA_NAME_DEF_STMT (rhs1);
7548 1780 : if (optimizable_arith_overflow (g))
7549 : {
7550 1648 : if (gimple_assign_rhs_code (ds) == IMAGPART_EXPR)
7551 1638 : continue;
7552 10 : if (gimple_store_p (use_stmt))
7553 : {
7554 : /* Punt if the cast use of IMAGPART_EXPR stmt
7555 : appears before the store use_stmt, because
7556 : optimizable arith overflow can't be
7557 : lowered at the store location in that case.
7558 : See PR121828. */
7559 10 : gimple_stmt_iterator gsi
7560 10 : = gsi_for_stmt (use_stmt);
7561 10 : unsigned int cnt = 0;
7562 12 : do
7563 : {
7564 12 : gsi_prev_nondebug (&gsi);
7565 12 : if (gsi_end_p (gsi))
7566 : break;
7567 12 : gimple *g2 = gsi_stmt (gsi);
7568 12 : if (g2 == ds)
7569 : break;
7570 3 : if (++cnt == 64)
7571 : break;
7572 3 : if (!gimple_assign_cast_p (g2))
7573 2 : continue;
7574 1 : tree rhs2 = gimple_assign_rhs1 (g2);
7575 1 : if (TREE_CODE (rhs2) != SSA_NAME)
7576 0 : continue;
7577 1 : gimple *g3 = SSA_NAME_DEF_STMT (rhs2);
7578 1 : if (!is_gimple_assign (g3))
7579 0 : continue;
7580 1 : if (gimple_assign_rhs_code (g3)
7581 : != IMAGPART_EXPR)
7582 0 : continue;
7583 1 : rhs2 = gimple_assign_rhs1 (g3);
7584 1 : rhs2 = TREE_OPERAND (rhs2, 0);
7585 1 : if (rhs2 != rhs1)
7586 0 : continue;
7587 : cnt = 64;
7588 : break;
7589 : }
7590 : while (1);
7591 10 : if (cnt == 64)
7592 : break;
7593 : }
7594 : }
7595 : }
7596 : }
7597 : /* FALLTHRU */
7598 817 : case LSHIFT_EXPR:
7599 817 : case RSHIFT_EXPR:
7600 817 : case MULT_EXPR:
7601 817 : case TRUNC_DIV_EXPR:
7602 817 : case EXACT_DIV_EXPR:
7603 817 : case TRUNC_MOD_EXPR:
7604 817 : case FIX_TRUNC_EXPR:
7605 817 : if (gimple_store_p (use_stmt)
7606 442 : && is_gimple_assign (use_stmt)
7607 442 : && !gimple_has_volatile_ops (use_stmt)
7608 1259 : && !stmt_ends_bb_p (use_stmt))
7609 : {
7610 442 : tree lhs = gimple_assign_lhs (use_stmt);
7611 : /* As multiply/division passes address of the lhs
7612 : to library function and that assumes it can extend
7613 : it to whole number of limbs, avoid merging those
7614 : with bit-field stores. Don't allow it for
7615 : shifts etc. either, so that the bit-field store
7616 : handling doesn't have to be done everywhere. */
7617 442 : if (TREE_CODE (lhs) == COMPONENT_REF
7618 442 : && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
7619 : break;
7620 439 : continue;
7621 439 : }
7622 : break;
7623 : default:
7624 : break;
7625 : }
7626 : }
7627 :
7628 : /* Also ignore uninitialized uses. */
7629 29249 : if (SSA_NAME_IS_DEFAULT_DEF (s)
7630 29249 : && (!SSA_NAME_VAR (s) || VAR_P (SSA_NAME_VAR (s))))
7631 11 : continue;
7632 :
7633 29289 : force_name:
7634 29289 : if (!large_huge.m_names)
7635 5576 : large_huge.m_names = BITMAP_ALLOC (NULL);
7636 29289 : bitmap_set_bit (large_huge.m_names, SSA_NAME_VERSION (s));
7637 29289 : if (has_single_use (s))
7638 : {
7639 : tree s2 = s;
7640 : /* The coalescing hook special cases SSA_NAME copies.
7641 : Make sure not to mark in m_single_use_names single
7642 : use SSA_NAMEs copied from non-single use SSA_NAMEs. */
7643 25635 : while (gimple_assign_copy_p (SSA_NAME_DEF_STMT (s2)))
7644 : {
7645 919 : s2 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s2));
7646 919 : if (TREE_CODE (s2) != SSA_NAME)
7647 : break;
7648 27 : if (!has_single_use (s2))
7649 : {
7650 : s2 = NULL_TREE;
7651 : break;
7652 : }
7653 : }
7654 25628 : if (s2)
7655 : {
7656 25608 : if (!large_huge.m_single_use_names)
7657 5463 : large_huge.m_single_use_names = BITMAP_ALLOC (NULL);
7658 25608 : bitmap_set_bit (large_huge.m_single_use_names,
7659 25608 : SSA_NAME_VERSION (s));
7660 : }
7661 : }
7662 29289 : if (SSA_NAME_VAR (s)
7663 7182 : && ((TREE_CODE (SSA_NAME_VAR (s)) == PARM_DECL
7664 5341 : && SSA_NAME_IS_DEFAULT_DEF (s))
7665 1878 : || TREE_CODE (SSA_NAME_VAR (s)) == RESULT_DECL))
7666 : has_large_huge_parm_result = true;
7667 29289 : if (optimize
7668 10247 : && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
7669 10225 : && gimple_assign_load_p (SSA_NAME_DEF_STMT (s))
7670 6042 : && !gimple_has_volatile_ops (SSA_NAME_DEF_STMT (s))
7671 32261 : && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s)))
7672 : {
7673 2972 : use_operand_p use_p;
7674 2972 : imm_use_iterator iter;
7675 2972 : bool optimizable_load = true;
7676 8978 : FOR_EACH_IMM_USE_FAST (use_p, iter, s)
7677 : {
7678 3132 : gimple *use_stmt = USE_STMT (use_p);
7679 3132 : if (is_gimple_debug (use_stmt))
7680 0 : continue;
7681 3132 : if (gimple_code (use_stmt) == GIMPLE_PHI
7682 3120 : || is_gimple_call (use_stmt)
7683 3035 : || gimple_code (use_stmt) == GIMPLE_ASM
7684 6166 : || (is_gimple_assign (use_stmt)
7685 1902 : && (gimple_assign_rhs_code (use_stmt)
7686 : == COMPLEX_EXPR)))
7687 : {
7688 : optimizable_load = false;
7689 : break;
7690 : }
7691 2972 : }
7692 :
7693 2972 : ssa_op_iter oi;
7694 4018 : FOR_EACH_SSA_USE_OPERAND (use_p, SSA_NAME_DEF_STMT (s),
7695 : oi, SSA_OP_USE)
7696 : {
7697 1046 : tree s2 = USE_FROM_PTR (use_p);
7698 1046 : if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s2))
7699 : {
7700 : optimizable_load = false;
7701 : break;
7702 : }
7703 : }
7704 :
7705 2972 : if (optimizable_load && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s)))
7706 : {
7707 2874 : if (!large_huge.m_loads)
7708 302 : large_huge.m_loads = BITMAP_ALLOC (NULL);
7709 2874 : bitmap_set_bit (large_huge.m_loads, SSA_NAME_VERSION (s));
7710 : }
7711 : }
7712 : }
7713 : /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
7714 : into memory. Such functions could have no large/huge SSA_NAMEs. */
7715 56527 : else if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
7716 : {
7717 39904 : gimple *g = SSA_NAME_DEF_STMT (s);
7718 39904 : if (is_gimple_assign (g) && gimple_store_p (g))
7719 : {
7720 13647 : tree t = gimple_assign_rhs1 (g);
7721 13647 : if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
7722 13647 : && bitint_precision_kind (TREE_TYPE (t)) >= bitint_prec_large)
7723 : has_large_huge = true;
7724 : }
7725 : }
7726 : }
7727 :
7728 7222 : if (large_huge.m_names || has_large_huge)
7729 : {
7730 5754 : ret = TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
7731 5754 : calculate_dominance_info (CDI_DOMINATORS);
7732 5754 : if (optimize)
7733 2991 : enable_ranger (cfun);
7734 5754 : if (large_huge.m_loads)
7735 : {
7736 302 : basic_block entry = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7737 302 : entry->aux = NULL;
7738 604 : bitint_dom_walker (large_huge.m_names,
7739 302 : large_huge.m_loads).walk (entry);
7740 302 : bitmap_and_compl_into (large_huge.m_names, large_huge.m_loads);
7741 302 : clear_aux_for_blocks ();
7742 302 : BITMAP_FREE (large_huge.m_loads);
7743 : }
7744 5754 : large_huge.m_limb_type = build_nonstandard_integer_type (limb_prec, 1);
7745 5754 : large_huge.m_limb_size
7746 5754 : = tree_to_uhwi (TYPE_SIZE_UNIT (large_huge.m_limb_type));
7747 : }
7748 7222 : if (large_huge.m_names)
7749 : {
7750 5576 : large_huge.m_map
7751 11152 : = init_var_map (num_ssa_names, NULL, large_huge.m_names);
7752 5576 : coalesce_ssa_name (large_huge.m_map);
7753 5576 : partition_view_normal (large_huge.m_map);
7754 5576 : if (dump_file && (dump_flags & TDF_DETAILS))
7755 : {
7756 0 : fprintf (dump_file, "After Coalescing:\n");
7757 0 : dump_var_map (dump_file, large_huge.m_map);
7758 : }
7759 5576 : large_huge.m_vars
7760 5576 : = XCNEWVEC (tree, num_var_partitions (large_huge.m_map));
7761 5576 : bitmap_iterator bi;
7762 5576 : if (has_large_huge_parm_result)
7763 19506 : EXECUTE_IF_SET_IN_BITMAP (large_huge.m_names, 0, i, bi)
7764 : {
7765 15361 : tree s = ssa_name (i);
7766 15361 : if (SSA_NAME_VAR (s)
7767 6102 : && ((TREE_CODE (SSA_NAME_VAR (s)) == PARM_DECL
7768 5341 : && SSA_NAME_IS_DEFAULT_DEF (s))
7769 798 : || TREE_CODE (SSA_NAME_VAR (s)) == RESULT_DECL))
7770 : {
7771 5304 : int p = var_to_partition (large_huge.m_map, s);
7772 5304 : if (large_huge.m_vars[p] == NULL_TREE)
7773 : {
7774 5304 : large_huge.m_vars[p] = SSA_NAME_VAR (s);
7775 5304 : mark_addressable (SSA_NAME_VAR (s));
7776 : }
7777 : }
7778 : }
7779 5576 : tree atype = NULL_TREE;
7780 5576 : if (dump_file && (dump_flags & TDF_DETAILS))
7781 0 : fprintf (dump_file, "Mapping SSA_NAMEs to decls:\n");
7782 32519 : EXECUTE_IF_SET_IN_BITMAP (large_huge.m_names, 0, i, bi)
7783 : {
7784 26943 : tree s = ssa_name (i);
7785 26943 : int p = var_to_partition (large_huge.m_map, s);
7786 26943 : if (large_huge.m_vars[p] == NULL_TREE)
7787 : {
7788 18796 : if (atype == NULL_TREE
7789 32864 : || !tree_int_cst_equal (TYPE_SIZE (atype),
7790 14068 : TYPE_SIZE (TREE_TYPE (s))))
7791 : {
7792 7670 : unsigned HOST_WIDE_INT nelts
7793 7670 : = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (s))) / limb_prec;
7794 7670 : atype = build_array_type_nelts (large_huge.m_limb_type,
7795 7670 : nelts);
7796 : }
7797 18796 : large_huge.m_vars[p] = create_tmp_var (atype, "bitint");
7798 18796 : mark_addressable (large_huge.m_vars[p]);
7799 : }
7800 26943 : if (dump_file && (dump_flags & TDF_DETAILS))
7801 : {
7802 0 : print_generic_expr (dump_file, s, TDF_SLIM);
7803 0 : fprintf (dump_file, " -> ");
7804 0 : print_generic_expr (dump_file, large_huge.m_vars[p], TDF_SLIM);
7805 0 : fprintf (dump_file, "\n");
7806 : }
7807 : }
7808 : }
7809 :
7810 45925 : FOR_EACH_BB_REVERSE_FN (bb, cfun)
7811 : {
7812 38703 : gimple_stmt_iterator prev;
7813 195696 : for (gimple_stmt_iterator gsi = gsi_last_bb (bb); !gsi_end_p (gsi);
7814 118290 : gsi = prev)
7815 : {
7816 118290 : prev = gsi;
7817 118290 : gsi_prev (&prev);
7818 118290 : ssa_op_iter iter;
7819 118290 : gimple *stmt = gsi_stmt (gsi);
7820 118290 : if (is_gimple_debug (stmt))
7821 80449 : continue;
7822 113585 : bitint_prec_kind kind = bitint_prec_small;
7823 113585 : tree t;
7824 341864 : FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, SSA_OP_ALL_OPERANDS)
7825 228279 : if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE)
7826 : {
7827 78545 : bitint_prec_kind this_kind
7828 78545 : = bitint_precision_kind (TREE_TYPE (t));
7829 228487 : kind = MAX (kind, this_kind);
7830 : }
7831 113585 : if (is_gimple_assign (stmt) && gimple_store_p (stmt))
7832 : {
7833 16309 : t = gimple_assign_rhs1 (stmt);
7834 16309 : if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE)
7835 : {
7836 14114 : bitint_prec_kind this_kind
7837 14114 : = bitint_precision_kind (TREE_TYPE (t));
7838 14114 : kind = MAX (kind, this_kind);
7839 : }
7840 : }
7841 113585 : if (is_gimple_assign (stmt)
7842 113585 : && gimple_assign_rhs_code (stmt) == FLOAT_EXPR)
7843 : {
7844 179 : t = gimple_assign_rhs1 (stmt);
7845 179 : if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
7846 179 : && TREE_CODE (t) == INTEGER_CST)
7847 : {
7848 1 : bitint_prec_kind this_kind
7849 1 : = bitint_precision_kind (TREE_TYPE (t));
7850 1 : kind = MAX (kind, this_kind);
7851 : }
7852 : }
7853 113585 : if (is_gimple_call (stmt))
7854 : {
7855 25569 : t = gimple_call_lhs (stmt);
7856 25569 : if (t && TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE)
7857 : {
7858 5308 : bitint_prec_kind this_kind = arith_overflow_arg_kind (stmt);
7859 5308 : kind = MAX (kind, this_kind);
7860 5308 : if (TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == BITINT_TYPE)
7861 : {
7862 5188 : this_kind
7863 5188 : = bitint_precision_kind (TREE_TYPE (TREE_TYPE (t)));
7864 5188 : kind = MAX (kind, this_kind);
7865 : }
7866 : }
7867 : }
7868 113263 : if (kind == bitint_prec_small)
7869 44520 : continue;
7870 69065 : switch (gimple_code (stmt))
7871 : {
7872 11029 : case GIMPLE_CALL:
7873 : /* For now. We'll need to handle some internal functions and
7874 : perhaps some builtins. */
7875 11029 : if (kind == bitint_prec_middle)
7876 2280 : continue;
7877 : break;
7878 4 : case GIMPLE_ASM:
7879 4 : if (kind == bitint_prec_middle)
7880 1 : continue;
7881 : break;
7882 1124 : case GIMPLE_RETURN:
7883 1124 : continue;
7884 48328 : case GIMPLE_ASSIGN:
7885 48328 : if (gimple_clobber_p (stmt))
7886 3509 : continue;
7887 44819 : if (kind >= bitint_prec_large)
7888 : break;
7889 8729 : if (gimple_assign_single_p (stmt))
7890 : /* No need to lower copies, loads or stores. */
7891 5784 : continue;
7892 2945 : if (gimple_assign_cast_p (stmt))
7893 : {
7894 2379 : tree lhs = gimple_assign_lhs (stmt);
7895 2379 : tree rhs = gimple_assign_rhs1 (stmt);
7896 4758 : if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
7897 2379 : && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
7898 4752 : && (TYPE_PRECISION (TREE_TYPE (lhs))
7899 2373 : == TYPE_PRECISION (TREE_TYPE (rhs))))
7900 : /* No need to lower casts to same precision. */
7901 28 : continue;
7902 : }
7903 : break;
7904 : default:
7905 : break;
7906 1124 : }
7907 :
7908 11497 : if (kind == bitint_prec_middle)
7909 : {
7910 5040 : tree type = NULL_TREE;
7911 : /* Middle _BitInt(N) is rewritten to casts to INTEGER_TYPEs
7912 : with the same precision and back. */
7913 5040 : unsigned int nops = gimple_num_ops (stmt);
7914 16921 : for (unsigned int i = is_gimple_assign (stmt) ? 1 : 0;
7915 16921 : i < nops; ++i)
7916 11881 : if (tree op = gimple_op (stmt, i))
7917 : {
7918 7643 : tree nop = maybe_cast_middle_bitint (&gsi, op, type);
7919 7643 : if (nop != op)
7920 6763 : gimple_set_op (stmt, i, nop);
7921 880 : else if (COMPARISON_CLASS_P (op))
7922 : {
7923 0 : TREE_OPERAND (op, 0)
7924 0 : = maybe_cast_middle_bitint (&gsi,
7925 0 : TREE_OPERAND (op, 0),
7926 : type);
7927 0 : TREE_OPERAND (op, 1)
7928 0 : = maybe_cast_middle_bitint (&gsi,
7929 0 : TREE_OPERAND (op, 1),
7930 : type);
7931 : }
7932 880 : else if (TREE_CODE (op) == CASE_LABEL_EXPR)
7933 : {
7934 24 : CASE_LOW (op)
7935 24 : = maybe_cast_middle_bitint (&gsi, CASE_LOW (op),
7936 : type);
7937 48 : CASE_HIGH (op)
7938 48 : = maybe_cast_middle_bitint (&gsi, CASE_HIGH (op),
7939 : type);
7940 : }
7941 : }
7942 5040 : if (tree lhs = gimple_get_lhs (stmt))
7943 2917 : if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
7944 2917 : && (bitint_precision_kind (TREE_TYPE (lhs))
7945 : == bitint_prec_middle))
7946 : {
7947 1368 : int prec = TYPE_PRECISION (TREE_TYPE (lhs));
7948 1368 : int uns = TYPE_UNSIGNED (TREE_TYPE (lhs));
7949 1368 : type = build_nonstandard_integer_type (prec, uns);
7950 1368 : tree lhs2 = make_ssa_name (type);
7951 1368 : gimple_set_lhs (stmt, lhs2);
7952 1368 : gimple *g = gimple_build_assign (lhs, NOP_EXPR, lhs2);
7953 1368 : if (stmt_ends_bb_p (stmt))
7954 : {
7955 4 : edge e = find_fallthru_edge (gsi_bb (gsi)->succs);
7956 4 : gsi_insert_on_edge (e, g);
7957 4 : edge_insertions = true;
7958 : }
7959 : else
7960 1364 : gsi_insert_after (&gsi, g, GSI_SAME_STMT);
7961 : }
7962 5040 : update_stmt (stmt);
7963 5040 : continue;
7964 5040 : }
7965 :
7966 51299 : if (tree lhs = gimple_get_lhs (stmt))
7967 44708 : if (TREE_CODE (lhs) == SSA_NAME)
7968 : {
7969 35782 : tree type = TREE_TYPE (lhs);
7970 35782 : if (TREE_CODE (type) == COMPLEX_TYPE)
7971 4184 : type = TREE_TYPE (type);
7972 44535 : if (TREE_CODE (type) == BITINT_TYPE
7973 30417 : && bitint_precision_kind (type) >= bitint_prec_large
7974 65997 : && (large_huge.m_names == NULL
7975 30056 : || !bitmap_bit_p (large_huge.m_names,
7976 30056 : SSA_NAME_VERSION (lhs))))
7977 8753 : continue;
7978 : }
7979 :
7980 42546 : large_huge.lower_stmt (stmt);
7981 : }
7982 :
7983 38703 : tree atype = NULL_TREE;
7984 47239 : for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7985 8536 : gsi_next (&gsi))
7986 : {
7987 8536 : gphi *phi = gsi.phi ();
7988 8536 : tree lhs = gimple_phi_result (phi);
7989 8536 : if (TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
7990 8536 : || bitint_precision_kind (TREE_TYPE (lhs)) < bitint_prec_large)
7991 8393 : continue;
7992 143 : int p1 = var_to_partition (large_huge.m_map, lhs);
7993 143 : gcc_assert (large_huge.m_vars[p1] != NULL_TREE);
7994 : tree v1 = large_huge.m_vars[p1];
7995 544 : for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
7996 : {
7997 401 : tree arg = gimple_phi_arg_def (phi, i);
7998 401 : edge e = gimple_phi_arg_edge (phi, i);
7999 401 : gimple *g;
8000 401 : switch (TREE_CODE (arg))
8001 : {
8002 74 : case INTEGER_CST:
8003 74 : if (integer_zerop (arg) && VAR_P (v1))
8004 : {
8005 45 : tree zero = build_zero_cst (TREE_TYPE (v1));
8006 45 : g = gimple_build_assign (v1, zero);
8007 45 : gsi_insert_on_edge (e, g);
8008 45 : edge_insertions = true;
8009 136 : break;
8010 : }
8011 29 : int ext;
8012 29 : unsigned int min_prec, prec, rem;
8013 29 : tree c;
8014 29 : prec = TYPE_PRECISION (TREE_TYPE (arg));
8015 29 : rem = prec % (2 * limb_prec);
8016 29 : min_prec = bitint_min_cst_precision (arg, ext);
8017 29 : if (min_prec > prec - rem - 2 * limb_prec
8018 12 : && min_prec > (unsigned) limb_prec)
8019 : /* Constant which has enough significant bits that it
8020 : isn't worth trying to save .rodata space by extending
8021 : from smaller number. */
8022 : min_prec = prec;
8023 : else
8024 : {
8025 20 : min_prec = CEIL (min_prec, limb_prec) * limb_prec;
8026 20 : if (min_prec > (unsigned) limb_prec
8027 3 : && abi_limb_prec > limb_prec)
8028 : {
8029 : /* For targets with ABI limb precision higher than
8030 : limb precision round to ABI limb precision,
8031 : otherwise c can contain padding bits. */
8032 0 : min_prec
8033 0 : = CEIL (min_prec, abi_limb_prec) * abi_limb_prec;
8034 0 : if (min_prec > prec - rem - 2 * limb_prec)
8035 9 : min_prec = prec;
8036 : }
8037 : }
8038 29 : if (min_prec == 0)
8039 : c = NULL_TREE;
8040 26 : else if (min_prec == prec)
8041 9 : c = tree_output_constant_def (arg);
8042 17 : else if (min_prec == (unsigned) limb_prec)
8043 14 : c = fold_convert (large_huge.m_limb_type, arg);
8044 : else
8045 : {
8046 3 : tree ctype = build_bitint_type (min_prec, 1);
8047 3 : c = tree_output_constant_def (fold_convert (ctype, arg));
8048 : }
8049 26 : if (c)
8050 : {
8051 26 : if (VAR_P (v1) && min_prec == prec)
8052 : {
8053 8 : tree v2 = build1 (VIEW_CONVERT_EXPR,
8054 8 : TREE_TYPE (v1), c);
8055 8 : g = gimple_build_assign (v1, v2);
8056 8 : gsi_insert_on_edge (e, g);
8057 8 : edge_insertions = true;
8058 8 : break;
8059 : }
8060 18 : if (TREE_CODE (TREE_TYPE (c)) == INTEGER_TYPE)
8061 : {
8062 14 : if (bitint_big_endian)
8063 : {
8064 0 : tree ptype = build_pointer_type (TREE_TYPE (v1));
8065 0 : tree sz1 = TYPE_SIZE_UNIT (TREE_TYPE (v1));
8066 0 : tree sz2 = TYPE_SIZE_UNIT (TREE_TYPE (c));
8067 0 : tree off = build_int_cst (ptype,
8068 0 : tree_to_uhwi (sz1)
8069 0 : - tree_to_uhwi (sz2));
8070 0 : tree vd = build2 (MEM_REF, TREE_TYPE (c),
8071 : build_fold_addr_expr (v1),
8072 : off);
8073 0 : g = gimple_build_assign (vd, c);
8074 : }
8075 : else
8076 14 : g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
8077 14 : TREE_TYPE (c),
8078 : v1), c);
8079 : }
8080 : else
8081 : {
8082 4 : unsigned HOST_WIDE_INT nelts
8083 4 : = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (c)))
8084 4 : / limb_prec;
8085 4 : tree vtype
8086 8 : = build_array_type_nelts (large_huge.m_limb_type,
8087 4 : nelts);
8088 4 : tree vd;
8089 4 : if (bitint_big_endian)
8090 : {
8091 0 : tree ptype = build_pointer_type (TREE_TYPE (v1));
8092 0 : tree sz1 = TYPE_SIZE_UNIT (TREE_TYPE (v1));
8093 0 : tree sz2 = TYPE_SIZE_UNIT (vtype);
8094 0 : tree off = build_int_cst (ptype,
8095 0 : tree_to_uhwi (sz1)
8096 0 : - tree_to_uhwi (sz2));
8097 0 : vd = build2 (MEM_REF, vtype,
8098 : build_fold_addr_expr (v1), off);
8099 : }
8100 : else
8101 4 : vd = build1 (VIEW_CONVERT_EXPR, vtype, v1);
8102 4 : g = gimple_build_assign (vd,
8103 : build1 (VIEW_CONVERT_EXPR,
8104 : vtype, c));
8105 : }
8106 18 : gsi_insert_on_edge (e, g);
8107 18 : if (min_prec == prec)
8108 : {
8109 : edge_insertions = true;
8110 : break;
8111 : }
8112 : }
8113 20 : if (ext == 0)
8114 : {
8115 14 : unsigned HOST_WIDE_INT nelts
8116 14 : = (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (v1)))
8117 14 : - min_prec) / limb_prec;
8118 14 : tree vtype
8119 28 : = build_array_type_nelts (large_huge.m_limb_type,
8120 14 : nelts);
8121 14 : tree ptype = build_pointer_type (TREE_TYPE (v1));
8122 14 : tree off;
8123 14 : if (c && !bitint_big_endian)
8124 13 : off = fold_convert (ptype,
8125 : TYPE_SIZE_UNIT (TREE_TYPE (c)));
8126 : else
8127 1 : off = build_zero_cst (ptype);
8128 14 : tree vd = build2 (MEM_REF, vtype,
8129 : build_fold_addr_expr (v1), off);
8130 14 : g = gimple_build_assign (vd, build_zero_cst (vtype));
8131 : }
8132 : else
8133 : {
8134 6 : tree vd = v1;
8135 6 : if (c && !bitint_big_endian)
8136 : {
8137 4 : tree ptype = build_pointer_type (TREE_TYPE (v1));
8138 4 : tree off
8139 4 : = fold_convert (ptype,
8140 : TYPE_SIZE_UNIT (TREE_TYPE (c)));
8141 4 : vd = build2 (MEM_REF, large_huge.m_limb_type,
8142 : build_fold_addr_expr (v1), off);
8143 : }
8144 6 : vd = build_fold_addr_expr (vd);
8145 6 : unsigned HOST_WIDE_INT nbytes
8146 6 : = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (v1)));
8147 6 : if (c)
8148 4 : nbytes
8149 4 : -= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (c)));
8150 6 : tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
8151 6 : g = gimple_build_call (fn, 3, vd,
8152 : integer_minus_one_node,
8153 : build_int_cst (sizetype,
8154 6 : nbytes));
8155 : }
8156 20 : gsi_insert_on_edge (e, g);
8157 20 : edge_insertions = true;
8158 20 : break;
8159 0 : default:
8160 0 : gcc_unreachable ();
8161 327 : case SSA_NAME:
8162 327 : if (gimple_code (SSA_NAME_DEF_STMT (arg)) == GIMPLE_NOP)
8163 : {
8164 9 : if (large_huge.m_names == NULL
8165 18 : || !bitmap_bit_p (large_huge.m_names,
8166 9 : SSA_NAME_VERSION (arg)))
8167 310 : continue;
8168 : }
8169 327 : int p2 = var_to_partition (large_huge.m_map, arg);
8170 327 : if (p1 == p2)
8171 310 : continue;
8172 17 : gcc_assert (large_huge.m_vars[p2] != NULL_TREE);
8173 17 : tree v2 = large_huge.m_vars[p2];
8174 17 : if (VAR_P (v1) && VAR_P (v2))
8175 17 : g = gimple_build_assign (v1, v2);
8176 0 : else if (VAR_P (v1))
8177 0 : g = gimple_build_assign (v1, build1 (VIEW_CONVERT_EXPR,
8178 0 : TREE_TYPE (v1), v2));
8179 0 : else if (VAR_P (v2))
8180 0 : g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
8181 0 : TREE_TYPE (v2), v1), v2);
8182 : else
8183 : {
8184 0 : if (atype == NULL_TREE
8185 0 : || !tree_int_cst_equal (TYPE_SIZE (atype),
8186 0 : TYPE_SIZE (TREE_TYPE (lhs))))
8187 : {
8188 0 : unsigned HOST_WIDE_INT nelts
8189 0 : = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))
8190 0 : / limb_prec;
8191 0 : atype
8192 0 : = build_array_type_nelts (large_huge.m_limb_type,
8193 0 : nelts);
8194 : }
8195 0 : g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
8196 : atype, v1),
8197 : build1 (VIEW_CONVERT_EXPR,
8198 : atype, v2));
8199 : }
8200 17 : gsi_insert_on_edge (e, g);
8201 17 : edge_insertions = true;
8202 17 : break;
8203 : }
8204 : }
8205 : }
8206 : }
8207 :
8208 7222 : if (large_huge.m_names || has_large_huge)
8209 : {
8210 5754 : gimple *nop = NULL;
8211 382607 : for (i = 0; i < num_ssa_names; ++i)
8212 : {
8213 376853 : tree s = ssa_name (i);
8214 376853 : if (s == NULL_TREE)
8215 15308 : continue;
8216 361545 : tree type = TREE_TYPE (s);
8217 361545 : if (TREE_CODE (type) == COMPLEX_TYPE)
8218 17439 : type = TREE_TYPE (type);
8219 361545 : if (TREE_CODE (type) == BITINT_TYPE
8220 361545 : && bitint_precision_kind (type) >= bitint_prec_large)
8221 : {
8222 40377 : if (large_huge.m_preserved
8223 44851 : && bitmap_bit_p (large_huge.m_preserved,
8224 6809 : SSA_NAME_VERSION (s)))
8225 2335 : continue;
8226 35707 : gimple *g = SSA_NAME_DEF_STMT (s);
8227 35707 : if (gimple_code (g) == GIMPLE_NOP)
8228 : {
8229 9565 : if (SSA_NAME_VAR (s))
8230 5325 : set_ssa_default_def (cfun, SSA_NAME_VAR (s), NULL_TREE);
8231 9565 : release_ssa_name (s);
8232 9565 : continue;
8233 : }
8234 26142 : if (gimple_bb (g) == NULL)
8235 : {
8236 0 : release_ssa_name (s);
8237 0 : continue;
8238 : }
8239 26142 : if (gimple_code (g) != GIMPLE_ASM)
8240 : {
8241 26141 : gimple_stmt_iterator gsi = gsi_for_stmt (g);
8242 26141 : bool save_vta = flag_var_tracking_assignments;
8243 26141 : flag_var_tracking_assignments = false;
8244 26141 : gsi_remove (&gsi, true);
8245 26141 : flag_var_tracking_assignments = save_vta;
8246 : }
8247 26142 : if (nop == NULL)
8248 4819 : nop = gimple_build_nop ();
8249 26142 : SSA_NAME_DEF_STMT (s) = nop;
8250 26142 : release_ssa_name (s);
8251 : }
8252 : }
8253 5754 : if (optimize)
8254 2991 : disable_ranger (cfun);
8255 : }
8256 :
8257 7222 : if (edge_insertions)
8258 33 : gsi_commit_edge_inserts ();
8259 :
8260 : /* Fix up arguments of ECF_RETURNS_TWICE calls. Those were temporarily
8261 : inserted before the call, but that is invalid IL, so move them to the
8262 : right place and add corresponding PHIs. */
8263 7222 : if (!large_huge.m_returns_twice_calls.is_empty ())
8264 : {
8265 9 : auto_vec<gimple *, 16> arg_stmts;
8266 29 : while (!large_huge.m_returns_twice_calls.is_empty ())
8267 : {
8268 11 : gimple *stmt = large_huge.m_returns_twice_calls.pop ();
8269 11 : gimple_stmt_iterator gsi = gsi_after_labels (gimple_bb (stmt));
8270 36 : while (gsi_stmt (gsi) != stmt)
8271 : {
8272 25 : if (is_gimple_debug (gsi_stmt (gsi)))
8273 2 : gsi_next (&gsi);
8274 : else
8275 : {
8276 23 : arg_stmts.safe_push (gsi_stmt (gsi));
8277 23 : gsi_remove (&gsi, false);
8278 : }
8279 : }
8280 11 : gimple *g;
8281 11 : basic_block bb = NULL;
8282 11 : edge e = NULL, ead = NULL;
8283 34 : FOR_EACH_VEC_ELT (arg_stmts, i, g)
8284 : {
8285 23 : gsi_safe_insert_before (&gsi, g);
8286 23 : if (i == 0)
8287 : {
8288 11 : bb = gimple_bb (stmt);
8289 11 : gcc_checking_assert (EDGE_COUNT (bb->preds) == 2);
8290 11 : e = EDGE_PRED (bb, 0);
8291 11 : ead = EDGE_PRED (bb, 1);
8292 11 : if ((ead->flags & EDGE_ABNORMAL) == 0)
8293 0 : std::swap (e, ead);
8294 11 : gcc_checking_assert ((e->flags & EDGE_ABNORMAL) == 0
8295 : && (ead->flags & EDGE_ABNORMAL));
8296 : }
8297 23 : tree lhs = gimple_assign_lhs (g);
8298 23 : tree arg = lhs;
8299 23 : gphi *phi = create_phi_node (copy_ssa_name (arg), bb);
8300 23 : add_phi_arg (phi, arg, e, UNKNOWN_LOCATION);
8301 23 : tree var = create_tmp_reg (TREE_TYPE (arg));
8302 23 : suppress_warning (var, OPT_Wuninitialized);
8303 23 : arg = get_or_create_ssa_default_def (cfun, var);
8304 23 : SSA_NAME_OCCURS_IN_ABNORMAL_PHI (arg) = 1;
8305 23 : add_phi_arg (phi, arg, ead, UNKNOWN_LOCATION);
8306 23 : arg = gimple_phi_result (phi);
8307 23 : SSA_NAME_OCCURS_IN_ABNORMAL_PHI (arg) = 1;
8308 23 : imm_use_iterator iter;
8309 23 : gimple *use_stmt;
8310 92 : FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
8311 : {
8312 46 : if (use_stmt == phi)
8313 23 : continue;
8314 23 : gcc_checking_assert (use_stmt == stmt);
8315 23 : use_operand_p use_p;
8316 69 : FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
8317 23 : SET_USE (use_p, arg);
8318 23 : }
8319 : }
8320 11 : update_stmt (stmt);
8321 11 : arg_stmts.truncate (0);
8322 : }
8323 9 : }
8324 :
8325 7222 : return ret;
8326 7222 : }
8327 :
8328 : namespace {
8329 :
8330 : const pass_data pass_data_lower_bitint =
8331 : {
8332 : GIMPLE_PASS, /* type */
8333 : "bitintlower", /* name */
8334 : OPTGROUP_NONE, /* optinfo_flags */
8335 : TV_NONE, /* tv_id */
8336 : PROP_ssa, /* properties_required */
8337 : PROP_gimple_lbitint, /* properties_provided */
8338 : 0, /* properties_destroyed */
8339 : 0, /* todo_flags_start */
8340 : 0, /* todo_flags_finish */
8341 : };
8342 :
8343 : class pass_lower_bitint : public gimple_opt_pass
8344 : {
8345 : public:
8346 576094 : pass_lower_bitint (gcc::context *ctxt)
8347 1152188 : : gimple_opt_pass (pass_data_lower_bitint, ctxt)
8348 : {}
8349 :
8350 : /* opt_pass methods: */
8351 288047 : opt_pass * clone () final override { return new pass_lower_bitint (m_ctxt); }
8352 1041936 : unsigned int execute (function *) final override
8353 : {
8354 1041936 : return gimple_lower_bitint ();
8355 : }
8356 :
8357 : }; // class pass_lower_bitint
8358 :
8359 : } // anon namespace
8360 :
8361 : gimple_opt_pass *
8362 288047 : make_pass_lower_bitint (gcc::context *ctxt)
8363 : {
8364 288047 : return new pass_lower_bitint (ctxt);
8365 : }
8366 :
8367 :
8368 : namespace {
8369 :
8370 : const pass_data pass_data_lower_bitint_O0 =
8371 : {
8372 : GIMPLE_PASS, /* type */
8373 : "bitintlower0", /* name */
8374 : OPTGROUP_NONE, /* optinfo_flags */
8375 : TV_NONE, /* tv_id */
8376 : PROP_cfg, /* properties_required */
8377 : PROP_gimple_lbitint, /* properties_provided */
8378 : 0, /* properties_destroyed */
8379 : 0, /* todo_flags_start */
8380 : 0, /* todo_flags_finish */
8381 : };
8382 :
8383 : class pass_lower_bitint_O0 : public gimple_opt_pass
8384 : {
8385 : public:
8386 288047 : pass_lower_bitint_O0 (gcc::context *ctxt)
8387 576094 : : gimple_opt_pass (pass_data_lower_bitint_O0, ctxt)
8388 : {}
8389 :
8390 : /* opt_pass methods: */
8391 1475202 : bool gate (function *fun) final override
8392 : {
8393 : /* With errors, normal optimization passes are not run. If we don't
8394 : lower bitint operations at all, rtl expansion will abort. */
8395 1475202 : return !(fun->curr_properties & PROP_gimple_lbitint);
8396 : }
8397 :
8398 433369 : unsigned int execute (function *) final override
8399 : {
8400 433369 : return gimple_lower_bitint ();
8401 : }
8402 :
8403 : }; // class pass_lower_bitint_O0
8404 :
8405 : } // anon namespace
8406 :
8407 : gimple_opt_pass *
8408 288047 : make_pass_lower_bitint_O0 (gcc::context *ctxt)
8409 : {
8410 288047 : return new pass_lower_bitint_O0 (ctxt);
8411 : }
|