Line data Source code
1 : /* Lower _BitInt(N) operations to scalar operations.
2 : Copyright (C) 2023-2026 Free Software Foundation, Inc.
3 : Contributed by Jakub Jelinek <jakub@redhat.com>.
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify it
8 : under the terms of the GNU General Public License as published by the
9 : Free Software Foundation; either version 3, or (at your option) any
10 : later version.
11 :
12 : GCC is distributed in the hope that it will be useful, but WITHOUT
13 : ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 : for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 : #include "config.h"
22 : #include "system.h"
23 : #include "coretypes.h"
24 : #include "backend.h"
25 : #include "rtl.h"
26 : #include "tree.h"
27 : #include "gimple.h"
28 : #include "cfghooks.h"
29 : #include "tree-pass.h"
30 : #include "ssa.h"
31 : #include "fold-const.h"
32 : #include "gimplify.h"
33 : #include "gimple-iterator.h"
34 : #include "tree-cfg.h"
35 : #include "tree-dfa.h"
36 : #include "cfgloop.h"
37 : #include "cfganal.h"
38 : #include "target.h"
39 : #include "tree-ssa-live.h"
40 : #include "tree-ssa-coalesce.h"
41 : #include "domwalk.h"
42 : #include "memmodel.h"
43 : #include "optabs.h"
44 : #include "varasm.h"
45 : #include "gimple-range.h"
46 : #include "value-range.h"
47 : #include "langhooks.h"
48 : #include "gimplify-me.h"
49 : #include "diagnostic-core.h"
50 : #include "tree-eh.h"
51 : #include "tree-pretty-print.h"
52 : #include "alloc-pool.h"
53 : #include "tree-into-ssa.h"
54 : #include "tree-cfgcleanup.h"
55 : #include "tree-switch-conversion.h"
56 : #include "ubsan.h"
57 : #include "stor-layout.h"
58 : #include "gimple-lower-bitint.h"
59 :
60 : /* Split BITINT_TYPE precisions in 4 categories. Small _BitInt, where
61 : target hook says it is a single limb, middle _BitInt which per ABI
62 : does not, but there is some INTEGER_TYPE in which arithmetics can be
63 : performed (operations on such _BitInt are lowered to casts to that
64 : arithmetic type and cast back; e.g. on x86_64 limb is DImode, but
65 : target supports TImode, so _BitInt(65) to _BitInt(128) are middle
66 : ones), large _BitInt which should by straight line code and
67 : finally huge _BitInt which should be handled by loops over the limbs. */
68 :
69 : enum bitint_prec_kind {
70 : bitint_prec_small,
71 : bitint_prec_middle,
72 : bitint_prec_large,
73 : bitint_prec_huge
74 : };
75 :
76 : /* Caches to speed up bitint_precision_kind. */
77 :
78 : static int small_max_prec, mid_min_prec, large_min_prec, huge_min_prec;
79 : static int limb_prec, abi_limb_prec;
80 : static bool bitint_big_endian;
81 : static enum bitint_ext bitint_extended;
82 :
83 : /* Categorize _BitInt(PREC) as small, middle, large or huge. */
84 :
85 : static bitint_prec_kind
86 474972 : bitint_precision_kind (int prec)
87 : {
88 474972 : if (prec <= small_max_prec)
89 : return bitint_prec_small;
90 458912 : if (huge_min_prec && prec >= huge_min_prec)
91 : return bitint_prec_huge;
92 263251 : if (large_min_prec && prec >= large_min_prec)
93 : return bitint_prec_large;
94 51828 : if (mid_min_prec && prec >= mid_min_prec)
95 : return bitint_prec_middle;
96 :
97 9490 : struct bitint_info info;
98 9490 : bool ok = targetm.c.bitint_type_info (prec, &info);
99 9490 : gcc_assert (ok);
100 9490 : scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.limb_mode);
101 9490 : if (prec <= GET_MODE_PRECISION (limb_mode))
102 : {
103 2246 : small_max_prec = prec;
104 2246 : return bitint_prec_small;
105 : }
106 7244 : bitint_big_endian = info.big_endian;
107 7244 : bitint_extended = info.extended;
108 7244 : if (info.limb_mode == info.abi_limb_mode && bitint_extended == bitint_ext_full)
109 0 : bitint_extended = bitint_ext_partial;
110 7244 : if (!large_min_prec
111 14405 : && GET_MODE_PRECISION (limb_mode) <= MAX_FIXED_MODE_SIZE)
112 14322 : large_min_prec = MAX_FIXED_MODE_SIZE + 1;
113 7244 : if (!limb_prec)
114 7161 : limb_prec = GET_MODE_PRECISION (limb_mode);
115 7244 : if (!abi_limb_prec)
116 7161 : abi_limb_prec
117 7161 : = GET_MODE_PRECISION (as_a <scalar_int_mode> (info.abi_limb_mode));
118 : /* For bitint_ext_full with different limb_mode from abi_limb_mode we
119 : currently only support only abi_limb_mode twice the precision of
120 : limb_mode, and don't support big endian in that case either. */
121 7244 : gcc_assert (bitint_extended != bitint_ext_full
122 : || (abi_limb_prec == 2 * limb_prec
123 : && !bitint_big_endian));
124 7244 : if (!huge_min_prec)
125 : {
126 14322 : if (4 * limb_prec >= MAX_FIXED_MODE_SIZE)
127 7161 : huge_min_prec = 4 * limb_prec;
128 : else
129 0 : huge_min_prec = MAX_FIXED_MODE_SIZE + 1;
130 : }
131 14488 : if (prec <= MAX_FIXED_MODE_SIZE)
132 : {
133 1596 : if (!mid_min_prec || prec < mid_min_prec)
134 1596 : mid_min_prec = prec;
135 1596 : return bitint_prec_middle;
136 : }
137 5648 : if (huge_min_prec && prec >= huge_min_prec)
138 : return bitint_prec_huge;
139 : return bitint_prec_large;
140 : }
141 :
142 : /* Same for a TYPE. */
143 :
144 : static bitint_prec_kind
145 469666 : bitint_precision_kind (tree type)
146 : {
147 469666 : return bitint_precision_kind (TYPE_PRECISION (type));
148 : }
149 :
150 : /* Return minimum precision needed to describe INTEGER_CST
151 : CST. All bits above that precision up to precision of
152 : TREE_TYPE (CST) are cleared if EXT is set to 0, or set
153 : if EXT is set to -1. */
154 :
155 : static unsigned
156 5308 : bitint_min_cst_precision (tree cst, int &ext)
157 : {
158 5308 : ext = tree_int_cst_sgn (cst) < 0 ? -1 : 0;
159 5308 : wide_int w = wi::to_wide (cst);
160 5308 : unsigned min_prec = wi::min_precision (w, TYPE_SIGN (TREE_TYPE (cst)));
161 : /* For signed values, we don't need to count the sign bit,
162 : we'll use constant 0 or -1 for the upper bits. */
163 5308 : if (!TYPE_UNSIGNED (TREE_TYPE (cst)))
164 3234 : --min_prec;
165 : else
166 : {
167 : /* For unsigned values, also try signed min_precision
168 : in case the constant has lots of most significant bits set. */
169 2074 : unsigned min_prec2 = wi::min_precision (w, SIGNED) - 1;
170 2074 : if (min_prec2 < min_prec)
171 : {
172 990 : ext = -1;
173 990 : return min_prec2;
174 : }
175 : }
176 : return min_prec;
177 5308 : }
178 :
179 : namespace {
180 :
181 : /* If OP is middle _BitInt, cast it to corresponding INTEGER_TYPE
182 : cached in TYPE and return it. */
183 :
184 : tree
185 7760 : maybe_cast_middle_bitint (gimple_stmt_iterator *gsi, tree op, tree &type)
186 : {
187 7760 : if (op == NULL_TREE
188 7732 : || TREE_CODE (TREE_TYPE (op)) != BITINT_TYPE
189 14615 : || bitint_precision_kind (TREE_TYPE (op)) != bitint_prec_middle)
190 908 : return op;
191 :
192 6852 : int prec = TYPE_PRECISION (TREE_TYPE (op));
193 6852 : int uns = TYPE_UNSIGNED (TREE_TYPE (op));
194 6852 : if (type == NULL_TREE
195 2534 : || TYPE_PRECISION (type) != prec
196 9386 : || TYPE_UNSIGNED (type) != uns)
197 4318 : type = build_nonstandard_integer_type (prec, uns);
198 :
199 6852 : if (TREE_CODE (op) != SSA_NAME)
200 : {
201 2346 : tree nop = fold_convert (type, op);
202 2346 : if (is_gimple_val (nop))
203 : return nop;
204 : }
205 :
206 4506 : tree nop = make_ssa_name (type);
207 4506 : gimple *g = gimple_build_assign (nop, NOP_EXPR, op);
208 4506 : gsi_insert_before (gsi, g, GSI_SAME_STMT);
209 4506 : return nop;
210 : }
211 :
212 : /* Return true if STMT can be handled in a loop from least to most
213 : significant limb together with its dependencies. */
214 :
215 : bool
216 46816 : mergeable_op (gimple *stmt)
217 : {
218 46816 : if (!is_gimple_assign (stmt))
219 : return false;
220 38202 : switch (gimple_assign_rhs_code (stmt))
221 : {
222 : case PLUS_EXPR:
223 : case MINUS_EXPR:
224 : case NEGATE_EXPR:
225 : case BIT_AND_EXPR:
226 : case BIT_IOR_EXPR:
227 : case BIT_XOR_EXPR:
228 : case BIT_NOT_EXPR:
229 : case SSA_NAME:
230 : case INTEGER_CST:
231 : case BIT_FIELD_REF:
232 : return true;
233 407 : case LSHIFT_EXPR:
234 407 : {
235 407 : tree cnt = gimple_assign_rhs2 (stmt);
236 407 : if (tree_fits_uhwi_p (cnt)
237 198 : && tree_to_uhwi (cnt) < (unsigned HOST_WIDE_INT) limb_prec)
238 : return true;
239 : }
240 : break;
241 6114 : CASE_CONVERT:
242 6114 : case VIEW_CONVERT_EXPR:
243 6114 : {
244 6114 : tree lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
245 6114 : tree rhs_type = TREE_TYPE (gimple_assign_rhs1 (stmt));
246 6114 : if (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
247 6076 : && TREE_CODE (lhs_type) == BITINT_TYPE
248 3628 : && TREE_CODE (rhs_type) == BITINT_TYPE
249 3281 : && bitint_precision_kind (lhs_type) >= bitint_prec_large
250 3252 : && bitint_precision_kind (rhs_type) >= bitint_prec_large
251 9228 : && (CEIL (TYPE_PRECISION (lhs_type), limb_prec)
252 3114 : == CEIL (TYPE_PRECISION (rhs_type), limb_prec)))
253 : {
254 2176 : if (TYPE_PRECISION (rhs_type) >= TYPE_PRECISION (lhs_type))
255 : return true;
256 168 : if ((unsigned) TYPE_PRECISION (lhs_type) % (2 * limb_prec) != 0)
257 : return true;
258 17 : if (bitint_precision_kind (lhs_type) == bitint_prec_large)
259 : return true;
260 : }
261 : break;
262 : }
263 : default:
264 : break;
265 : }
266 : return false;
267 : }
268 :
269 : /* Return non-zero if stmt is .{ADD,SUB,MUL}_OVERFLOW call with
270 : _Complex large/huge _BitInt lhs which has at most two immediate uses,
271 : at most one use in REALPART_EXPR stmt in the same bb and exactly one
272 : IMAGPART_EXPR use in the same bb with a single use which casts it to
273 : non-BITINT_TYPE integral type. If there is a REALPART_EXPR use,
274 : return 2. Such cases (most common uses of those builtins) can be
275 : optimized by marking their lhs and lhs of IMAGPART_EXPR and maybe lhs
276 : of REALPART_EXPR as not needed to be backed up by a stack variable.
277 : For .UBSAN_CHECK_{ADD,SUB,MUL} return 3. */
278 :
279 : int
280 20463 : optimizable_arith_overflow (gimple *stmt)
281 : {
282 20463 : bool is_ubsan = false;
283 20463 : if (!is_gimple_call (stmt) || !gimple_call_internal_p (stmt))
284 : return false;
285 4955 : switch (gimple_call_internal_fn (stmt))
286 : {
287 : case IFN_ADD_OVERFLOW:
288 : case IFN_SUB_OVERFLOW:
289 : case IFN_MUL_OVERFLOW:
290 : break;
291 48 : case IFN_UBSAN_CHECK_ADD:
292 48 : case IFN_UBSAN_CHECK_SUB:
293 48 : case IFN_UBSAN_CHECK_MUL:
294 48 : is_ubsan = true;
295 48 : break;
296 : default:
297 : return 0;
298 : }
299 4955 : tree lhs = gimple_call_lhs (stmt);
300 4955 : if (!lhs)
301 : return 0;
302 4955 : if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
303 : return 0;
304 4955 : tree type = is_ubsan ? TREE_TYPE (lhs) : TREE_TYPE (TREE_TYPE (lhs));
305 4955 : if (TREE_CODE (type) != BITINT_TYPE
306 4955 : || bitint_precision_kind (type) < bitint_prec_large)
307 0 : return 0;
308 :
309 4955 : if (is_ubsan)
310 : {
311 48 : use_operand_p use_p;
312 48 : gimple *use_stmt;
313 48 : if (!single_imm_use (lhs, &use_p, &use_stmt)
314 48 : || gimple_bb (use_stmt) != gimple_bb (stmt)
315 48 : || !gimple_store_p (use_stmt)
316 48 : || !is_gimple_assign (use_stmt)
317 48 : || gimple_has_volatile_ops (use_stmt)
318 96 : || stmt_ends_bb_p (use_stmt))
319 0 : return 0;
320 : return 3;
321 : }
322 :
323 4907 : imm_use_iterator ui;
324 4907 : use_operand_p use_p;
325 4907 : int seen = 0;
326 4907 : gimple *realpart = NULL, *cast = NULL;
327 19347 : FOR_EACH_IMM_USE_FAST (use_p, ui, lhs)
328 : {
329 9537 : gimple *g = USE_STMT (use_p);
330 9537 : if (is_gimple_debug (g))
331 0 : continue;
332 9537 : if (!is_gimple_assign (g) || gimple_bb (g) != gimple_bb (stmt))
333 : return 0;
334 9537 : if (gimple_assign_rhs_code (g) == REALPART_EXPR)
335 : {
336 4630 : if ((seen & 1) != 0)
337 : return 0;
338 4630 : seen |= 1;
339 4630 : realpart = g;
340 : }
341 4907 : else if (gimple_assign_rhs_code (g) == IMAGPART_EXPR)
342 : {
343 4907 : if ((seen & 2) != 0)
344 4 : return 0;
345 4907 : seen |= 2;
346 :
347 4907 : use_operand_p use2_p;
348 4907 : gimple *use_stmt;
349 4907 : tree lhs2 = gimple_assign_lhs (g);
350 4907 : if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs2))
351 : return 0;
352 4907 : if (!single_imm_use (lhs2, &use2_p, &use_stmt)
353 4907 : || gimple_bb (use_stmt) != gimple_bb (stmt)
354 9814 : || !gimple_assign_cast_p (use_stmt))
355 : return 0;
356 :
357 4907 : lhs2 = gimple_assign_lhs (use_stmt);
358 9814 : if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs2))
359 9814 : || TREE_CODE (TREE_TYPE (lhs2)) == BITINT_TYPE)
360 : return 0;
361 4903 : cast = use_stmt;
362 : }
363 : else
364 : return 0;
365 4 : }
366 4903 : if ((seen & 2) == 0)
367 : return 0;
368 4903 : if (seen == 3)
369 : {
370 : /* Punt if the cast stmt appears before realpart stmt, because
371 : if both appear, the lowering wants to emit all the code
372 : at the location of realpart stmt. */
373 4630 : gimple_stmt_iterator gsi = gsi_for_stmt (realpart);
374 4630 : unsigned int cnt = 0;
375 4633 : do
376 : {
377 4633 : gsi_prev_nondebug (&gsi);
378 4633 : if (gsi_end_p (gsi) || gsi_stmt (gsi) == cast)
379 : return 0;
380 4630 : if (gsi_stmt (gsi) == stmt)
381 : return 2;
382 : /* If realpart is too far from stmt, punt as well.
383 : Usually it will appear right after it. */
384 3 : if (++cnt == 32)
385 : return 0;
386 : }
387 : while (1);
388 : }
389 : return 1;
390 : }
391 :
392 : /* If STMT is some kind of comparison (GIMPLE_COND, comparison assignment)
393 : comparing large/huge _BitInt types, return the comparison code and if
394 : non-NULL fill in the comparison operands to *POP1 and *POP2. */
395 :
396 : tree_code
397 35258 : comparison_op (gimple *stmt, tree *pop1, tree *pop2)
398 : {
399 35258 : tree op1 = NULL_TREE, op2 = NULL_TREE;
400 35258 : tree_code code = ERROR_MARK;
401 35258 : if (gimple_code (stmt) == GIMPLE_COND)
402 : {
403 6507 : code = gimple_cond_code (stmt);
404 6507 : op1 = gimple_cond_lhs (stmt);
405 6507 : op2 = gimple_cond_rhs (stmt);
406 : }
407 28751 : else if (is_gimple_assign (stmt))
408 : {
409 28736 : code = gimple_assign_rhs_code (stmt);
410 28736 : op1 = gimple_assign_rhs1 (stmt);
411 28736 : if (TREE_CODE_CLASS (code) == tcc_comparison
412 28736 : || TREE_CODE_CLASS (code) == tcc_binary)
413 2170 : op2 = gimple_assign_rhs2 (stmt);
414 : }
415 35258 : if (TREE_CODE_CLASS (code) != tcc_comparison)
416 : return ERROR_MARK;
417 7281 : tree type = TREE_TYPE (op1);
418 7281 : if (TREE_CODE (type) != BITINT_TYPE
419 7281 : || bitint_precision_kind (type) < bitint_prec_large)
420 0 : return ERROR_MARK;
421 7281 : if (pop1)
422 : {
423 7219 : *pop1 = op1;
424 7219 : *pop2 = op2;
425 : }
426 : return code;
427 : }
428 :
429 : /* Class used during large/huge _BitInt lowering containing all the
430 : state for the methods. */
431 :
432 : struct bitint_large_huge
433 : {
434 7159 : bitint_large_huge ()
435 7159 : : m_names (NULL), m_loads (NULL), m_preserved (NULL),
436 7159 : m_single_use_names (NULL), m_map (NULL), m_vars (NULL),
437 7159 : m_limb_type (NULL_TREE), m_data (vNULL),
438 7159 : m_returns_twice_calls (vNULL) {}
439 :
440 : ~bitint_large_huge ();
441 :
442 : void insert_before (gimple *);
443 : tree limb_access_type (tree, tree);
444 : tree limb_access (tree, tree, tree, bool, bool = false);
445 : tree build_bit_field_ref (tree, tree, unsigned HOST_WIDE_INT,
446 : unsigned HOST_WIDE_INT);
447 : void if_then (gimple *, profile_probability, edge &, edge &);
448 : void if_then_else (gimple *, profile_probability, edge &, edge &);
449 : void if_then_if_then_else (gimple *g, gimple *,
450 : profile_probability, profile_probability,
451 : edge &, edge &, edge &);
452 : tree handle_operand (tree, tree);
453 : tree prepare_data_in_out (tree, tree, tree *, tree = NULL_TREE);
454 : tree add_cast (tree, tree);
455 : tree handle_plus_minus (tree_code, tree, tree, tree);
456 : tree handle_lshift (tree, tree, tree);
457 : tree handle_cast (tree, tree, tree);
458 : tree handle_bit_field_ref (tree, tree);
459 : tree handle_load (gimple *, tree);
460 : tree handle_stmt (gimple *, tree);
461 : tree handle_operand_addr (tree, gimple *, int *, int *);
462 : tree create_loop (tree, tree *);
463 : tree lower_mergeable_stmt (gimple *, tree_code &, tree, tree);
464 : tree lower_comparison_stmt (gimple *, tree_code &, tree, tree);
465 : void lower_shift_stmt (tree, gimple *);
466 : void lower_muldiv_stmt (tree, gimple *);
467 : void lower_float_conv_stmt (tree, gimple *);
468 : tree arith_overflow_extract_bits (unsigned int, unsigned int, tree,
469 : unsigned int, bool);
470 : void finish_arith_overflow (tree, tree, tree, tree, tree, tree, gimple *,
471 : unsigned, tree_code);
472 : void lower_addsub_overflow (tree, gimple *);
473 : void lower_mul_overflow (tree, gimple *);
474 : void lower_cplxpart_stmt (tree, gimple *);
475 : void lower_complexexpr_stmt (gimple *);
476 : void lower_bit_query (gimple *);
477 : void lower_call (tree, gimple *);
478 : void lower_asm (gimple *);
479 : void lower_stmt (gimple *);
480 :
481 : /* Bitmap of large/huge _BitInt SSA_NAMEs except those can be
482 : merged with their uses. */
483 : bitmap m_names;
484 : /* Subset of those for lhs of load statements. These will be
485 : cleared in m_names if the loads will be mergeable with all
486 : their uses. */
487 : bitmap m_loads;
488 : /* Bitmap of large/huge _BitInt SSA_NAMEs that should survive
489 : to later passes (arguments or return values of calls). */
490 : bitmap m_preserved;
491 : /* Subset of m_names which have a single use. As the lowering
492 : can replace various original statements with their lowered
493 : form even before it is done iterating over all basic blocks,
494 : testing has_single_use for the purpose of emitting clobbers
495 : doesn't work properly. */
496 : bitmap m_single_use_names;
497 : /* Used for coalescing/partitioning of large/huge _BitInt SSA_NAMEs
498 : set in m_names. */
499 : var_map m_map;
500 : /* Mapping of the partitions to corresponding decls. */
501 : tree *m_vars;
502 : /* Unsigned integer type with limb precision. */
503 : tree m_limb_type;
504 : /* Its TYPE_SIZE_UNIT. */
505 : unsigned HOST_WIDE_INT m_limb_size;
506 : /* Location of a gimple stmt which is being currently lowered. */
507 : location_t m_loc;
508 : /* Current stmt iterator where code is being lowered currently. */
509 : gimple_stmt_iterator m_gsi;
510 : /* Statement after which any clobbers should be added if non-NULL. */
511 : gimple *m_after_stmt;
512 : /* Set when creating loops to the loop header bb and its preheader. */
513 : basic_block m_bb, m_preheader_bb;
514 : /* Stmt iterator after which initialization statements should be emitted. */
515 : gimple_stmt_iterator m_init_gsi;
516 : /* Decl into which a mergeable statement stores result. */
517 : tree m_lhs;
518 : /* handle_operand/handle_stmt can be invoked in various ways.
519 :
520 : lower_mergeable_stmt for large _BitInt calls those with constant
521 : idx only, expanding to straight line code, for huge _BitInt
522 : emits a loop from least significant limb upwards, where each loop
523 : iteration handles 2 limbs, plus there can be up to one full limb
524 : and one partial limb processed after the loop, where handle_operand
525 : and/or handle_stmt are called with constant idx. m_upwards_2limb
526 : is set for this case, false otherwise. m_upwards is true if it
527 : is either large or huge _BitInt handled by lower_mergeable_stmt,
528 : i.e. indexes always increase.
529 :
530 : Another way is used by lower_comparison_stmt, which walks limbs
531 : from most significant to least significant, partial limb if any
532 : processed first with constant idx and then loop processing a single
533 : limb per iteration with non-constant idx.
534 :
535 : Another way is used in lower_shift_stmt, where for LSHIFT_EXPR
536 : destination limbs are processed from most significant to least
537 : significant or for RSHIFT_EXPR the other way around, in loops or
538 : straight line code, but idx usually is non-constant (so from
539 : handle_operand/handle_stmt POV random access). The LSHIFT_EXPR
540 : handling there can access even partial limbs using non-constant
541 : idx (then m_var_msb should be true, for all the other cases
542 : including lower_mergeable_stmt/lower_comparison_stmt that is
543 : not the case and so m_var_msb should be false.
544 :
545 : m_first should be set the first time handle_operand/handle_stmt
546 : is called and clear when it is called for some other limb with
547 : the same argument. If the lowering of an operand (e.g. INTEGER_CST)
548 : or statement (e.g. +/-/<< with < limb_prec constant) needs some
549 : state between the different calls, when m_first is true it should
550 : push some trees to m_data vector and also make sure m_data_cnt is
551 : incremented by how many trees were pushed, and when m_first is
552 : false, it can use the m_data[m_data_cnt] etc. data or update them,
553 : just needs to bump m_data_cnt by the same amount as when it was
554 : called with m_first set. The toplevel calls to
555 : handle_operand/handle_stmt should set m_data_cnt to 0 and truncate
556 : m_data vector when setting m_first to true.
557 :
558 : m_cast_conditional and m_bitfld_load are used when handling a
559 : bit-field load inside of a widening cast. handle_cast sometimes
560 : needs to do runtime comparisons and handle_operand only conditionally
561 : or even in two separate conditional blocks for one idx (once with
562 : constant index after comparing the runtime one for equality with the
563 : constant). In these cases, m_cast_conditional is set to true and
564 : the bit-field load then communicates its m_data_cnt to handle_cast
565 : using m_bitfld_load. */
566 : bool m_first;
567 : bool m_var_msb;
568 : unsigned m_upwards_2limb;
569 : bool m_upwards;
570 : bool m_cast_conditional;
571 : unsigned m_bitfld_load;
572 : vec<tree> m_data;
573 : unsigned int m_data_cnt;
574 : vec<gimple *> m_returns_twice_calls;
575 : };
576 :
577 7159 : bitint_large_huge::~bitint_large_huge ()
578 : {
579 7159 : BITMAP_FREE (m_names);
580 7159 : BITMAP_FREE (m_loads);
581 7159 : BITMAP_FREE (m_preserved);
582 7159 : BITMAP_FREE (m_single_use_names);
583 7159 : if (m_map)
584 5522 : delete_var_map (m_map);
585 7159 : XDELETEVEC (m_vars);
586 7159 : m_data.release ();
587 7159 : m_returns_twice_calls.release ();
588 7159 : }
589 :
590 : /* Insert gimple statement G before current location
591 : and set its gimple_location. */
592 :
593 : void
594 355045 : bitint_large_huge::insert_before (gimple *g)
595 : {
596 355045 : gimple_set_location (g, m_loc);
597 355045 : gsi_insert_before (&m_gsi, g, GSI_SAME_STMT);
598 355045 : }
599 :
600 : /* Return type for accessing limb IDX of BITINT_TYPE TYPE.
601 : This is normally m_limb_type, except for a partial most
602 : significant limb if any. */
603 :
604 : tree
605 129175 : bitint_large_huge::limb_access_type (tree type, tree idx)
606 : {
607 129175 : if (type == NULL_TREE)
608 5596 : return m_limb_type;
609 123579 : unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
610 123579 : unsigned int prec = TYPE_PRECISION (type);
611 123579 : gcc_assert (i * limb_prec < prec
612 : || (bitint_extended == bitint_ext_full
613 : && abi_limb_prec > limb_prec
614 : && i * limb_prec
615 : < CEIL (prec, abi_limb_prec) * abi_limb_prec));
616 247158 : if (bitint_big_endian
617 123579 : ? (i != 0 || (prec % limb_prec) == 0)
618 123579 : : (i + 1) * limb_prec <= prec)
619 80946 : return m_limb_type;
620 : else
621 85266 : return build_nonstandard_integer_type (prec % limb_prec,
622 42633 : TYPE_UNSIGNED (type));
623 : }
624 :
625 : /* Return a tree how to access limb IDX of VAR corresponding to BITINT_TYPE
626 : TYPE. If WRITE_P is true, it will be a store, otherwise a read. */
627 :
628 : tree
629 153339 : bitint_large_huge::limb_access (tree type, tree var, tree idx, bool write_p,
630 : bool abi_load_p)
631 : {
632 153339 : tree atype = (tree_fits_uhwi_p (idx)
633 153339 : ? limb_access_type (type, idx) : m_limb_type);
634 :
635 153339 : tree ltype = (bitint_extended && abi_load_p) ? atype : m_limb_type;
636 :
637 153339 : addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (var));
638 153339 : tree ret;
639 153339 : if (DECL_P (var) && tree_fits_uhwi_p (idx))
640 : {
641 94600 : if (as != TYPE_ADDR_SPACE (ltype))
642 0 : ltype = build_qualified_type (ltype, TYPE_QUALS (ltype)
643 0 : | ENCODE_QUAL_ADDR_SPACE (as));
644 94600 : tree ptype = build_pointer_type (strip_array_types (TREE_TYPE (var)));
645 94600 : unsigned HOST_WIDE_INT off = tree_to_uhwi (idx) * m_limb_size;
646 94600 : if (bitint_big_endian)
647 0 : off += m_limb_size - tree_to_uhwi (TYPE_SIZE_UNIT (ltype));
648 94600 : ret = build2 (MEM_REF, ltype,
649 : build_fold_addr_expr (var),
650 94600 : build_int_cst (ptype, off));
651 94600 : TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
652 94600 : TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (var);
653 94600 : }
654 58739 : else if (TREE_CODE (var) == MEM_REF && tree_fits_uhwi_p (idx))
655 : {
656 4302 : if (as != TYPE_ADDR_SPACE (ltype))
657 0 : ltype = build_qualified_type (ltype, TYPE_QUALS (ltype)
658 0 : | ENCODE_QUAL_ADDR_SPACE (as));
659 4302 : unsigned HOST_WIDE_INT off = tree_to_uhwi (idx) * m_limb_size;
660 4302 : if (bitint_big_endian)
661 0 : off += m_limb_size - tree_to_uhwi (TYPE_SIZE_UNIT (ltype));
662 4302 : ret
663 8604 : = build2 (MEM_REF, ltype, unshare_expr (TREE_OPERAND (var, 0)),
664 8604 : size_binop (PLUS_EXPR, TREE_OPERAND (var, 1),
665 : build_int_cst (TREE_TYPE (TREE_OPERAND (var, 1)),
666 : off)));
667 4302 : TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (var);
668 4302 : TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (var);
669 4302 : TREE_THIS_NOTRAP (ret) = TREE_THIS_NOTRAP (var);
670 4302 : }
671 : else
672 : {
673 54437 : ltype = m_limb_type;
674 54437 : if (as != TYPE_ADDR_SPACE (ltype))
675 17 : ltype = build_qualified_type (ltype, TYPE_QUALS (ltype)
676 17 : | ENCODE_QUAL_ADDR_SPACE (as));
677 54437 : var = unshare_expr (var);
678 54437 : if (TREE_CODE (TREE_TYPE (var)) != ARRAY_TYPE
679 81740 : || !useless_type_conversion_p (m_limb_type,
680 27303 : TREE_TYPE (TREE_TYPE (var))))
681 : {
682 28058 : unsigned HOST_WIDE_INT nelts
683 28058 : = CEIL (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var))), limb_prec);
684 28058 : tree atype = build_array_type_nelts (ltype, nelts);
685 28058 : var = build1 (VIEW_CONVERT_EXPR, atype, var);
686 : }
687 54437 : ret = build4 (ARRAY_REF, ltype, var, idx, NULL_TREE, NULL_TREE);
688 : }
689 153339 : if (!write_p && !useless_type_conversion_p (atype, ltype))
690 : {
691 18683 : gimple *g = gimple_build_assign (make_ssa_name (m_limb_type), ret);
692 18683 : insert_before (g);
693 18683 : ret = gimple_assign_lhs (g);
694 18683 : ret = build1 (NOP_EXPR, atype, ret);
695 : }
696 153339 : return ret;
697 : }
698 :
699 : /* Build a BIT_FIELD_REF to access BITSIZE bits with FTYPE type at
700 : offset BITPOS inside of OBJ. */
701 :
702 : tree
703 265 : bitint_large_huge::build_bit_field_ref (tree ftype, tree obj,
704 : unsigned HOST_WIDE_INT bitsize,
705 : unsigned HOST_WIDE_INT bitpos)
706 : {
707 530 : if (INTEGRAL_TYPE_P (TREE_TYPE (obj))
708 274 : && !type_has_mode_precision_p (TREE_TYPE (obj)))
709 : {
710 9 : unsigned HOST_WIDE_INT nelts
711 9 : = CEIL (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))), limb_prec);
712 9 : tree ltype = m_limb_type;
713 9 : addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (obj));
714 9 : if (as != TYPE_ADDR_SPACE (ltype))
715 0 : ltype = build_qualified_type (ltype, TYPE_QUALS (ltype)
716 0 : | ENCODE_QUAL_ADDR_SPACE (as));
717 9 : tree atype = build_array_type_nelts (ltype, nelts);
718 9 : obj = build1 (VIEW_CONVERT_EXPR, atype, obj);
719 : }
720 265 : return build3 (BIT_FIELD_REF, ftype, obj, bitsize_int (bitsize),
721 265 : bitsize_int (bitpos));
722 : }
723 :
724 : /* Emit a half diamond,
725 : if (COND)
726 : |\
727 : | \
728 : | \
729 : | new_bb1
730 : | /
731 : | /
732 : |/
733 : or if (COND) new_bb1;
734 : PROB is the probability that the condition is true.
735 : Updates m_gsi to start of new_bb1.
736 : Sets EDGE_TRUE to edge from new_bb1 to successor and
737 : EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
738 :
739 : void
740 4111 : bitint_large_huge::if_then (gimple *cond, profile_probability prob,
741 : edge &edge_true, edge &edge_false)
742 : {
743 4111 : insert_before (cond);
744 4111 : edge e1 = split_block (gsi_bb (m_gsi), cond);
745 4111 : edge e2 = split_block (e1->dest, (gimple *) NULL);
746 4111 : edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
747 4111 : e1->flags = EDGE_TRUE_VALUE;
748 4111 : e1->probability = prob;
749 4111 : e3->probability = prob.invert ();
750 4111 : set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
751 4111 : edge_true = e2;
752 4111 : edge_false = e3;
753 4111 : m_gsi = gsi_after_labels (e1->dest);
754 4111 : }
755 :
756 : /* Emit a full diamond,
757 : if (COND)
758 : /\
759 : / \
760 : / \
761 : new_bb1 new_bb2
762 : \ /
763 : \ /
764 : \/
765 : or if (COND) new_bb2; else new_bb1;
766 : PROB is the probability that the condition is true.
767 : Updates m_gsi to start of new_bb2.
768 : Sets EDGE_TRUE to edge from new_bb1 to successor and
769 : EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND) bb. */
770 :
771 : void
772 110 : bitint_large_huge::if_then_else (gimple *cond, profile_probability prob,
773 : edge &edge_true, edge &edge_false)
774 : {
775 110 : insert_before (cond);
776 110 : edge e1 = split_block (gsi_bb (m_gsi), cond);
777 110 : edge e2 = split_block (e1->dest, (gimple *) NULL);
778 110 : basic_block bb = create_empty_bb (e1->dest);
779 110 : add_bb_to_loop (bb, e1->dest->loop_father);
780 110 : edge e3 = make_edge (e1->src, bb, EDGE_TRUE_VALUE);
781 110 : e1->flags = EDGE_FALSE_VALUE;
782 110 : e3->probability = prob;
783 110 : e1->probability = prob.invert ();
784 110 : bb->count = e1->src->count.apply_probability (prob);
785 110 : set_immediate_dominator (CDI_DOMINATORS, bb, e1->src);
786 110 : set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
787 110 : edge_true = make_single_succ_edge (bb, e2->dest, EDGE_FALLTHRU);
788 110 : edge_false = e2;
789 110 : m_gsi = gsi_after_labels (bb);
790 110 : }
791 :
792 : /* Emit a half diamond with full diamond in it
793 : if (COND1)
794 : |\
795 : | \
796 : | \
797 : | if (COND2)
798 : | / \
799 : | / \
800 : |new_bb1 new_bb2
801 : | | /
802 : \ | /
803 : \ | /
804 : \ | /
805 : \|/
806 : or if (COND1) { if (COND2) new_bb2; else new_bb1; }
807 : PROB1 is the probability that the condition 1 is true.
808 : PROB2 is the probability that the condition 2 is true.
809 : Updates m_gsi to start of new_bb1.
810 : Sets EDGE_TRUE_TRUE to edge from new_bb2 to successor,
811 : EDGE_TRUE_FALSE to edge from new_bb1 to successor and
812 : EDGE_FALSE to the EDGE_FALSE_VALUE edge from if (COND1) bb.
813 : If COND2 is NULL, this is equivalent to
814 : if_then (COND1, PROB1, EDGE_TRUE_FALSE, EDGE_FALSE);
815 : EDGE_TRUE_TRUE = NULL; */
816 :
817 : void
818 1936 : bitint_large_huge::if_then_if_then_else (gimple *cond1, gimple *cond2,
819 : profile_probability prob1,
820 : profile_probability prob2,
821 : edge &edge_true_true,
822 : edge &edge_true_false,
823 : edge &edge_false)
824 : {
825 1936 : edge e2, e3, e4 = NULL;
826 1936 : if_then (cond1, prob1, e2, e3);
827 1936 : if (cond2 == NULL)
828 : {
829 1168 : edge_true_true = NULL;
830 1168 : edge_true_false = e2;
831 1168 : edge_false = e3;
832 1168 : return;
833 : }
834 768 : insert_before (cond2);
835 768 : e2 = split_block (gsi_bb (m_gsi), cond2);
836 768 : basic_block bb = create_empty_bb (e2->dest);
837 768 : add_bb_to_loop (bb, e2->dest->loop_father);
838 768 : e4 = make_edge (e2->src, bb, EDGE_TRUE_VALUE);
839 768 : set_immediate_dominator (CDI_DOMINATORS, bb, e2->src);
840 768 : e4->probability = prob2;
841 768 : e2->flags = EDGE_FALSE_VALUE;
842 768 : e2->probability = prob2.invert ();
843 768 : bb->count = e2->src->count.apply_probability (prob2);
844 768 : e4 = make_single_succ_edge (bb, e3->dest, EDGE_FALLTHRU);
845 768 : e2 = find_edge (e2->dest, e3->dest);
846 768 : edge_true_true = e4;
847 768 : edge_true_false = e2;
848 768 : edge_false = e3;
849 768 : m_gsi = gsi_after_labels (e2->src);
850 : }
851 :
852 : /* Emit code to access limb IDX from OP. */
853 :
854 : tree
855 109483 : bitint_large_huge::handle_operand (tree op, tree idx)
856 : {
857 109483 : switch (TREE_CODE (op))
858 : {
859 75307 : case SSA_NAME:
860 75307 : if (m_names == NULL
861 75307 : || !bitmap_bit_p (m_names, SSA_NAME_VERSION (op)))
862 : {
863 14161 : if (SSA_NAME_IS_DEFAULT_DEF (op))
864 : {
865 5 : if (m_first)
866 : {
867 2 : tree v = create_tmp_reg (m_limb_type);
868 2 : if (SSA_NAME_VAR (op) && VAR_P (SSA_NAME_VAR (op)))
869 : {
870 2 : DECL_NAME (v) = DECL_NAME (SSA_NAME_VAR (op));
871 2 : DECL_SOURCE_LOCATION (v)
872 2 : = DECL_SOURCE_LOCATION (SSA_NAME_VAR (op));
873 : }
874 2 : v = get_or_create_ssa_default_def (cfun, v);
875 2 : m_data.safe_push (v);
876 : }
877 5 : tree ret = m_data[m_data_cnt];
878 5 : m_data_cnt++;
879 5 : if (tree_fits_uhwi_p (idx))
880 : {
881 3 : tree type = limb_access_type (TREE_TYPE (op), idx);
882 3 : ret = add_cast (type, ret);
883 : }
884 5 : return ret;
885 : }
886 14156 : location_t loc_save = m_loc;
887 14156 : m_loc = gimple_location (SSA_NAME_DEF_STMT (op));
888 14156 : tree ret = handle_stmt (SSA_NAME_DEF_STMT (op), idx);
889 14156 : m_loc = loc_save;
890 14156 : return ret;
891 : }
892 61146 : int p;
893 61146 : gimple *g;
894 61146 : tree t;
895 61146 : p = var_to_partition (m_map, op);
896 61146 : gcc_assert (m_vars[p] != NULL_TREE);
897 61146 : t = limb_access (TREE_TYPE (op), m_vars[p], idx, false);
898 61146 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
899 61146 : insert_before (g);
900 61146 : t = gimple_assign_lhs (g);
901 61146 : if (m_first
902 22092 : && m_single_use_names
903 21220 : && m_vars[p] != m_lhs
904 21123 : && m_after_stmt
905 69753 : && bitmap_bit_p (m_single_use_names, SSA_NAME_VERSION (op)))
906 : {
907 8323 : tree clobber = build_clobber (TREE_TYPE (m_vars[p]),
908 : CLOBBER_STORAGE_END);
909 8323 : g = gimple_build_assign (m_vars[p], clobber);
910 8323 : gimple_stmt_iterator gsi = gsi_for_stmt (m_after_stmt);
911 8323 : gsi_insert_after (&gsi, g, GSI_SAME_STMT);
912 : }
913 : return t;
914 34176 : case INTEGER_CST:
915 34176 : if (tree_fits_uhwi_p (idx))
916 : {
917 23709 : tree c, type = limb_access_type (TREE_TYPE (op), idx);
918 23709 : unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
919 23709 : if (m_first)
920 : {
921 6191 : m_data.safe_push (NULL_TREE);
922 6191 : m_data.safe_push (NULL_TREE);
923 : }
924 23709 : if (bitint_big_endian)
925 0 : i = CEIL (TYPE_PRECISION (TREE_TYPE (op)), limb_prec) - 1 - i;
926 23709 : if (limb_prec != HOST_BITS_PER_WIDE_INT)
927 : {
928 0 : wide_int w = wi::rshift (wi::to_wide (op), i * limb_prec,
929 0 : TYPE_SIGN (TREE_TYPE (op)));
930 0 : c = wide_int_to_tree (type,
931 0 : wide_int::from (w, TYPE_PRECISION (type),
932 : UNSIGNED));
933 0 : }
934 23709 : else if (i >= TREE_INT_CST_EXT_NUNITS (op))
935 7450 : c = build_int_cst (type,
936 13243 : tree_int_cst_sgn (op) < 0 ? -1 : 0);
937 : else
938 16259 : c = build_int_cst (type, TREE_INT_CST_ELT (op, i));
939 23709 : m_data_cnt += 2;
940 23709 : return c;
941 : }
942 10467 : if (m_first
943 10467 : || (m_data[m_data_cnt] == NULL_TREE
944 159 : && m_data[m_data_cnt + 1] == NULL_TREE))
945 : {
946 5279 : unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
947 5279 : unsigned int rem = prec % ((m_upwards_2limb ? 2 : 1) * limb_prec);
948 5279 : int ext;
949 5279 : unsigned min_prec = bitint_min_cst_precision (op, ext);
950 5279 : if (m_first)
951 : {
952 5120 : m_data.safe_push (NULL_TREE);
953 5120 : m_data.safe_push (NULL_TREE);
954 : }
955 5279 : if (integer_zerop (op))
956 : {
957 834 : tree c = build_zero_cst (m_limb_type);
958 834 : m_data[m_data_cnt] = c;
959 834 : m_data[m_data_cnt + 1] = c;
960 : }
961 4445 : else if (integer_all_onesp (op))
962 : {
963 667 : tree c = build_all_ones_cst (m_limb_type);
964 667 : m_data[m_data_cnt] = c;
965 667 : m_data[m_data_cnt + 1] = c;
966 : }
967 3778 : else if (m_upwards_2limb && min_prec <= (unsigned) limb_prec)
968 : {
969 : /* Single limb constant. Use a phi with that limb from
970 : the preheader edge and 0 or -1 constant from the other edge
971 : and for the second limb in the loop. */
972 858 : tree out;
973 858 : gcc_assert (m_first);
974 858 : m_data.pop ();
975 858 : m_data.pop ();
976 858 : prepare_data_in_out (fold_convert (m_limb_type, op), idx, &out,
977 858 : build_int_cst (m_limb_type, ext));
978 858 : }
979 2920 : else if (min_prec > prec - rem - 2 * limb_prec)
980 : {
981 : /* Constant which has enough significant bits that it isn't
982 : worth trying to save .rodata space by extending from smaller
983 : number. */
984 2401 : tree type;
985 2401 : if (m_var_msb)
986 25 : type = TREE_TYPE (op);
987 : else
988 : /* If we have a guarantee the most significant partial limb
989 : (if any) will be only accessed through handle_operand
990 : with INTEGER_CST idx, we don't need to include the partial
991 : limb in .rodata. */
992 2376 : type = build_bitint_type (prec - rem, 1);
993 2401 : tree c = tree_output_constant_def (fold_convert (type, op));
994 2401 : m_data[m_data_cnt] = c;
995 2401 : m_data[m_data_cnt + 1] = NULL_TREE;
996 : }
997 519 : else if (m_upwards_2limb)
998 : {
999 : /* Constant with smaller number of bits. Trade conditional
1000 : code for .rodata space by extending from smaller number. */
1001 444 : min_prec = CEIL (min_prec, 2 * limb_prec) * (2 * limb_prec);
1002 444 : tree type = build_bitint_type (min_prec, 1);
1003 444 : tree c = tree_output_constant_def (fold_convert (type, op));
1004 444 : tree ridx = idx;
1005 444 : if (bitint_big_endian)
1006 : {
1007 0 : ridx = make_ssa_name (sizetype);
1008 0 : g = gimple_build_assign (ridx, PLUS_EXPR, idx,
1009 0 : size_int (min_prec / limb_prec
1010 : - ((HOST_WIDE_INT)
1011 : CEIL (prec,
1012 : limb_prec))));
1013 0 : insert_before (g);
1014 : }
1015 444 : tree ridx2 = make_ssa_name (sizetype);
1016 444 : g = gimple_build_assign (ridx2, PLUS_EXPR, ridx,
1017 : bitint_big_endian
1018 0 : ? size_int (-1) : size_one_node);
1019 444 : insert_before (g);
1020 444 : if (bitint_big_endian)
1021 0 : g = gimple_build_cond (GE_EXPR, idx,
1022 0 : size_int (CEIL (prec, limb_prec)
1023 : - min_prec / limb_prec),
1024 : NULL_TREE, NULL_TREE);
1025 : else
1026 444 : g = gimple_build_cond (LT_EXPR, idx,
1027 444 : size_int (min_prec / limb_prec),
1028 : NULL_TREE, NULL_TREE);
1029 444 : edge edge_true, edge_false;
1030 888 : if_then (g, (min_prec >= (prec - rem) / 2
1031 312 : ? profile_probability::likely ()
1032 132 : : profile_probability::unlikely ()),
1033 : edge_true, edge_false);
1034 444 : tree c1 = limb_access (TREE_TYPE (op), c, ridx, false);
1035 444 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (c1)), c1);
1036 444 : insert_before (g);
1037 444 : c1 = gimple_assign_lhs (g);
1038 444 : tree c2 = limb_access (TREE_TYPE (op), c, ridx2, false);
1039 444 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (c2)), c2);
1040 444 : insert_before (g);
1041 444 : c2 = gimple_assign_lhs (g);
1042 444 : tree c3 = build_int_cst (m_limb_type, ext);
1043 444 : m_gsi = gsi_after_labels (edge_true->dest);
1044 444 : m_data[m_data_cnt] = make_ssa_name (m_limb_type);
1045 444 : m_data[m_data_cnt + 1] = make_ssa_name (m_limb_type);
1046 444 : gphi *phi = create_phi_node (m_data[m_data_cnt],
1047 : edge_true->dest);
1048 444 : add_phi_arg (phi, c1, edge_true, UNKNOWN_LOCATION);
1049 444 : add_phi_arg (phi, c3, edge_false, UNKNOWN_LOCATION);
1050 444 : phi = create_phi_node (m_data[m_data_cnt + 1], edge_true->dest);
1051 444 : add_phi_arg (phi, c2, edge_true, UNKNOWN_LOCATION);
1052 444 : add_phi_arg (phi, c3, edge_false, UNKNOWN_LOCATION);
1053 : }
1054 : else
1055 : {
1056 : /* Constant with smaller number of bits. Trade conditional
1057 : code for .rodata space by extending from smaller number.
1058 : Version for loops with random access to the limbs or
1059 : downwards loops. */
1060 75 : min_prec = CEIL (min_prec, limb_prec) * limb_prec;
1061 75 : tree c;
1062 75 : if (min_prec <= (unsigned) limb_prec)
1063 21 : c = fold_convert (m_limb_type, op);
1064 : else
1065 : {
1066 54 : tree type = build_bitint_type (min_prec, 1);
1067 54 : c = tree_output_constant_def (fold_convert (type, op));
1068 : }
1069 75 : m_data[m_data_cnt] = c;
1070 75 : m_data[m_data_cnt + 1] = integer_type_node;
1071 : }
1072 5279 : t = m_data[m_data_cnt];
1073 : }
1074 : else
1075 5188 : t = m_data[m_data_cnt + 1];
1076 10467 : if (m_data[m_data_cnt + 1] == NULL_TREE)
1077 : {
1078 4746 : tree ridx = idx;
1079 4746 : unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
1080 4746 : tree c = m_data[m_data_cnt];
1081 4746 : unsigned int min_prec = TYPE_PRECISION (TREE_TYPE (c));
1082 4746 : if (bitint_big_endian
1083 0 : && CEIL (min_prec, limb_prec) != CEIL (prec, limb_prec))
1084 : {
1085 0 : ridx = make_ssa_name (sizetype);
1086 0 : g = gimple_build_assign (ridx, PLUS_EXPR, idx,
1087 0 : size_int (CEIL (min_prec, limb_prec)
1088 : - ((HOST_WIDE_INT)
1089 : CEIL (prec, limb_prec))));
1090 0 : insert_before (g);
1091 : }
1092 4746 : t = limb_access (TREE_TYPE (op), c, ridx, false);
1093 4746 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
1094 4746 : insert_before (g);
1095 4746 : t = gimple_assign_lhs (g);
1096 : }
1097 5721 : else if (m_data[m_data_cnt + 1] == integer_type_node)
1098 : {
1099 115 : unsigned int prec = TYPE_PRECISION (TREE_TYPE (op));
1100 115 : unsigned rem = prec % ((m_upwards_2limb ? 2 : 1) * limb_prec);
1101 115 : int ext = wi::neg_p (wi::to_wide (op)) ? -1 : 0;
1102 115 : tree c = m_data[m_data_cnt];
1103 115 : unsigned min_prec = TYPE_PRECISION (TREE_TYPE (c));
1104 115 : if (bitint_big_endian)
1105 0 : g = gimple_build_cond (GE_EXPR, idx,
1106 0 : size_int (CEIL (prec, limb_prec)
1107 : - min_prec / limb_prec),
1108 : NULL_TREE, NULL_TREE);
1109 : else
1110 115 : g = gimple_build_cond (LT_EXPR, idx,
1111 115 : size_int (min_prec / limb_prec),
1112 : NULL_TREE, NULL_TREE);
1113 115 : edge edge_true, edge_false;
1114 230 : if_then (g, (min_prec >= (prec - rem) / 2
1115 29 : ? profile_probability::likely ()
1116 86 : : profile_probability::unlikely ()),
1117 : edge_true, edge_false);
1118 115 : if (min_prec > (unsigned) limb_prec)
1119 : {
1120 70 : tree ridx = idx;
1121 70 : if (bitint_big_endian)
1122 : {
1123 0 : ridx = make_ssa_name (sizetype);
1124 0 : g = gimple_build_assign (ridx, PLUS_EXPR, idx,
1125 0 : size_int (min_prec / limb_prec
1126 : - ((HOST_WIDE_INT)
1127 : CEIL (prec,
1128 : limb_prec))));
1129 0 : insert_before (g);
1130 : }
1131 70 : c = limb_access (TREE_TYPE (op), c, ridx, false);
1132 70 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (c)), c);
1133 70 : insert_before (g);
1134 70 : c = gimple_assign_lhs (g);
1135 : }
1136 115 : tree c2 = build_int_cst (m_limb_type, ext);
1137 115 : m_gsi = gsi_after_labels (edge_true->dest);
1138 115 : t = make_ssa_name (m_limb_type);
1139 115 : gphi *phi = create_phi_node (t, edge_true->dest);
1140 115 : add_phi_arg (phi, c, edge_true, UNKNOWN_LOCATION);
1141 115 : add_phi_arg (phi, c2, edge_false, UNKNOWN_LOCATION);
1142 : }
1143 10467 : m_data_cnt += 2;
1144 10467 : return t;
1145 0 : default:
1146 0 : gcc_unreachable ();
1147 : }
1148 : }
1149 :
1150 : /* Helper method, add a PHI node with VAL from preheader edge if
1151 : inside of a loop and m_first. Keep state in a pair of m_data
1152 : elements. If VAL_OUT is non-NULL, use that as PHI argument from
1153 : the latch edge, otherwise create a new SSA_NAME for it and let
1154 : caller initialize it. */
1155 :
1156 : tree
1157 15323 : bitint_large_huge::prepare_data_in_out (tree val, tree idx, tree *data_out,
1158 : tree val_out)
1159 : {
1160 15323 : if (!m_first)
1161 : {
1162 9145 : *data_out = tree_fits_uhwi_p (idx) ? NULL_TREE : m_data[m_data_cnt + 1];
1163 9145 : return m_data[m_data_cnt];
1164 : }
1165 :
1166 6178 : *data_out = NULL_TREE;
1167 6178 : if (tree_fits_uhwi_p (idx))
1168 : {
1169 1986 : m_data.safe_push (val);
1170 1986 : m_data.safe_push (NULL_TREE);
1171 1986 : return val;
1172 : }
1173 :
1174 4192 : tree in = make_ssa_name (TREE_TYPE (val));
1175 4192 : gphi *phi = create_phi_node (in, m_bb);
1176 4192 : edge e1 = find_edge (m_preheader_bb, m_bb);
1177 4192 : edge e2 = EDGE_PRED (m_bb, 0);
1178 4192 : if (e1 == e2)
1179 4192 : e2 = EDGE_PRED (m_bb, 1);
1180 4192 : add_phi_arg (phi, val, e1, UNKNOWN_LOCATION);
1181 4192 : tree out = val_out ? val_out : make_ssa_name (TREE_TYPE (val));
1182 4192 : add_phi_arg (phi, out, e2, UNKNOWN_LOCATION);
1183 4192 : m_data.safe_push (in);
1184 4192 : m_data.safe_push (out);
1185 4192 : return in;
1186 : }
1187 :
1188 : /* Return VAL cast to TYPE. If VAL is INTEGER_CST, just
1189 : convert it without emitting any code, otherwise emit
1190 : the conversion statement before the current location. */
1191 :
1192 : tree
1193 37501 : bitint_large_huge::add_cast (tree type, tree val)
1194 : {
1195 37501 : if (TREE_CODE (val) == INTEGER_CST)
1196 4494 : return fold_convert (type, val);
1197 :
1198 33007 : tree lhs = make_ssa_name (type);
1199 33007 : gimple *g = gimple_build_assign (lhs, NOP_EXPR, val);
1200 33007 : insert_before (g);
1201 33007 : return lhs;
1202 : }
1203 :
1204 : /* Helper of handle_stmt method, handle PLUS_EXPR or MINUS_EXPR. */
1205 :
1206 : tree
1207 12966 : bitint_large_huge::handle_plus_minus (tree_code code, tree rhs1, tree rhs2,
1208 : tree idx)
1209 : {
1210 12966 : tree lhs, data_out, ctype;
1211 12966 : tree rhs1_type = TREE_TYPE (rhs1);
1212 12966 : gimple *g;
1213 12966 : tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
1214 : &data_out);
1215 :
1216 18576 : if (optab_handler (code == PLUS_EXPR ? uaddc5_optab : usubc5_optab,
1217 12966 : TYPE_MODE (m_limb_type)) != CODE_FOR_nothing)
1218 : {
1219 12966 : ctype = build_complex_type (m_limb_type);
1220 12966 : if (!types_compatible_p (rhs1_type, m_limb_type))
1221 : {
1222 1038 : if (!TYPE_UNSIGNED (rhs1_type))
1223 : {
1224 329 : tree type = unsigned_type_for (rhs1_type);
1225 329 : rhs1 = add_cast (type, rhs1);
1226 329 : rhs2 = add_cast (type, rhs2);
1227 : }
1228 1038 : rhs1 = add_cast (m_limb_type, rhs1);
1229 1038 : rhs2 = add_cast (m_limb_type, rhs2);
1230 : }
1231 12966 : lhs = make_ssa_name (ctype);
1232 18576 : g = gimple_build_call_internal (code == PLUS_EXPR
1233 : ? IFN_UADDC : IFN_USUBC,
1234 : 3, rhs1, rhs2, data_in);
1235 12966 : gimple_call_set_lhs (g, lhs);
1236 12966 : insert_before (g);
1237 12966 : if (data_out == NULL_TREE)
1238 10853 : data_out = make_ssa_name (m_limb_type);
1239 12966 : g = gimple_build_assign (data_out, IMAGPART_EXPR,
1240 : build1 (IMAGPART_EXPR, m_limb_type, lhs));
1241 12966 : insert_before (g);
1242 : }
1243 0 : else if (types_compatible_p (rhs1_type, m_limb_type))
1244 : {
1245 0 : ctype = build_complex_type (m_limb_type);
1246 0 : lhs = make_ssa_name (ctype);
1247 0 : g = gimple_build_call_internal (code == PLUS_EXPR
1248 : ? IFN_ADD_OVERFLOW : IFN_SUB_OVERFLOW,
1249 : 2, rhs1, rhs2);
1250 0 : gimple_call_set_lhs (g, lhs);
1251 0 : insert_before (g);
1252 0 : if (data_out == NULL_TREE)
1253 0 : data_out = make_ssa_name (m_limb_type);
1254 0 : if (!integer_zerop (data_in))
1255 : {
1256 0 : rhs1 = make_ssa_name (m_limb_type);
1257 0 : g = gimple_build_assign (rhs1, REALPART_EXPR,
1258 : build1 (REALPART_EXPR, m_limb_type, lhs));
1259 0 : insert_before (g);
1260 0 : rhs2 = make_ssa_name (m_limb_type);
1261 0 : g = gimple_build_assign (rhs2, IMAGPART_EXPR,
1262 : build1 (IMAGPART_EXPR, m_limb_type, lhs));
1263 0 : insert_before (g);
1264 0 : lhs = make_ssa_name (ctype);
1265 0 : g = gimple_build_call_internal (code == PLUS_EXPR
1266 : ? IFN_ADD_OVERFLOW
1267 : : IFN_SUB_OVERFLOW,
1268 : 2, rhs1, data_in);
1269 0 : gimple_call_set_lhs (g, lhs);
1270 0 : insert_before (g);
1271 0 : data_in = make_ssa_name (m_limb_type);
1272 0 : g = gimple_build_assign (data_in, IMAGPART_EXPR,
1273 : build1 (IMAGPART_EXPR, m_limb_type, lhs));
1274 0 : insert_before (g);
1275 0 : g = gimple_build_assign (data_out, PLUS_EXPR, rhs2, data_in);
1276 0 : insert_before (g);
1277 : }
1278 : else
1279 : {
1280 0 : g = gimple_build_assign (data_out, IMAGPART_EXPR,
1281 : build1 (IMAGPART_EXPR, m_limb_type, lhs));
1282 0 : insert_before (g);
1283 : }
1284 : }
1285 : else
1286 : {
1287 0 : tree in = add_cast (rhs1_type, data_in);
1288 0 : lhs = make_ssa_name (rhs1_type);
1289 0 : g = gimple_build_assign (lhs, code, rhs1, rhs2);
1290 0 : insert_before (g);
1291 0 : rhs1 = make_ssa_name (rhs1_type);
1292 0 : g = gimple_build_assign (rhs1, code, lhs, in);
1293 0 : insert_before (g);
1294 0 : m_data[m_data_cnt] = NULL_TREE;
1295 0 : m_data_cnt += 2;
1296 0 : return rhs1;
1297 : }
1298 12966 : rhs1 = make_ssa_name (m_limb_type);
1299 12966 : g = gimple_build_assign (rhs1, REALPART_EXPR,
1300 : build1 (REALPART_EXPR, m_limb_type, lhs));
1301 12966 : insert_before (g);
1302 12966 : if (!types_compatible_p (rhs1_type, m_limb_type))
1303 1038 : rhs1 = add_cast (rhs1_type, rhs1);
1304 12966 : m_data[m_data_cnt] = data_out;
1305 12966 : m_data_cnt += 2;
1306 12966 : return rhs1;
1307 : }
1308 :
1309 : /* Helper function for handle_stmt method, handle LSHIFT_EXPR by
1310 : count in [0, limb_prec - 1] range. */
1311 :
1312 : tree
1313 152 : bitint_large_huge::handle_lshift (tree rhs1, tree rhs2, tree idx)
1314 : {
1315 152 : unsigned HOST_WIDE_INT cnt = tree_to_uhwi (rhs2);
1316 152 : gcc_checking_assert (cnt < (unsigned) limb_prec);
1317 152 : if (cnt == 0)
1318 : return rhs1;
1319 :
1320 152 : tree lhs, data_out, rhs1_type = TREE_TYPE (rhs1);
1321 152 : gimple *g;
1322 152 : tree data_in = prepare_data_in_out (build_zero_cst (m_limb_type), idx,
1323 : &data_out);
1324 :
1325 152 : if (!integer_zerop (data_in))
1326 : {
1327 136 : lhs = make_ssa_name (m_limb_type);
1328 136 : g = gimple_build_assign (lhs, RSHIFT_EXPR, data_in,
1329 : build_int_cst (unsigned_type_node,
1330 136 : limb_prec - cnt));
1331 136 : insert_before (g);
1332 136 : if (!types_compatible_p (rhs1_type, m_limb_type))
1333 35 : lhs = add_cast (rhs1_type, lhs);
1334 : data_in = lhs;
1335 : }
1336 152 : if (types_compatible_p (rhs1_type, m_limb_type))
1337 : {
1338 117 : if (data_out == NULL_TREE)
1339 82 : data_out = make_ssa_name (m_limb_type);
1340 117 : g = gimple_build_assign (data_out, rhs1);
1341 117 : insert_before (g);
1342 : }
1343 152 : if (cnt < (unsigned) TYPE_PRECISION (rhs1_type))
1344 : {
1345 137 : lhs = make_ssa_name (rhs1_type);
1346 137 : g = gimple_build_assign (lhs, LSHIFT_EXPR, rhs1, rhs2);
1347 137 : insert_before (g);
1348 137 : if (!integer_zerop (data_in))
1349 : {
1350 121 : rhs1 = lhs;
1351 121 : lhs = make_ssa_name (rhs1_type);
1352 121 : g = gimple_build_assign (lhs, BIT_IOR_EXPR, rhs1, data_in);
1353 121 : insert_before (g);
1354 : }
1355 : }
1356 : else
1357 : lhs = data_in;
1358 152 : m_data[m_data_cnt] = data_out;
1359 152 : m_data_cnt += 2;
1360 152 : return lhs;
1361 : }
1362 :
1363 : /* Helper function for handle_stmt method, handle an integral
1364 : to integral conversion. */
1365 :
1366 : tree
1367 7455 : bitint_large_huge::handle_cast (tree lhs_type, tree rhs1, tree idx)
1368 : {
1369 7455 : tree rhs_type = TREE_TYPE (rhs1);
1370 7455 : gimple *g;
1371 7455 : if ((TREE_CODE (rhs1) == SSA_NAME || TREE_CODE (rhs1) == INTEGER_CST)
1372 7455 : && TREE_CODE (lhs_type) == BITINT_TYPE
1373 7455 : && TREE_CODE (rhs_type) == BITINT_TYPE
1374 6466 : && bitint_precision_kind (lhs_type) >= bitint_prec_large
1375 13921 : && bitint_precision_kind (rhs_type) >= bitint_prec_large)
1376 : {
1377 5877 : if (TYPE_PRECISION (rhs_type) >= TYPE_PRECISION (lhs_type)
1378 : /* If lhs has bigger precision than rhs, we can use
1379 : the simple case only if there is a guarantee that
1380 : the most significant limb is handled in straight
1381 : line code. If m_var_msb (on left shifts) or
1382 : if m_upwards_2limb * limb_prec is equal to
1383 : lhs precision or if not m_upwards_2limb and lhs_type
1384 : has precision which is multiple of limb_prec that is
1385 : not the case. */
1386 5877 : || (!m_var_msb
1387 1481 : && (CEIL (TYPE_PRECISION (lhs_type), limb_prec)
1388 1481 : == CEIL (TYPE_PRECISION (rhs_type), limb_prec))
1389 346 : && ((!m_upwards_2limb
1390 182 : && (TYPE_PRECISION (lhs_type) % limb_prec != 0))
1391 243 : || (m_upwards_2limb
1392 328 : && (m_upwards_2limb * limb_prec
1393 164 : < TYPE_PRECISION (lhs_type))))))
1394 : {
1395 4637 : tree ridx = idx;
1396 4637 : if (bitint_big_endian
1397 4637 : && (CEIL (TYPE_PRECISION (lhs_type), limb_prec)
1398 0 : != CEIL (TYPE_PRECISION (rhs_type), limb_prec)))
1399 : {
1400 0 : HOST_WIDE_INT diff = CEIL (TYPE_PRECISION (rhs_type), limb_prec);
1401 0 : diff -= CEIL (TYPE_PRECISION (lhs_type), limb_prec);
1402 0 : if (tree_fits_uhwi_p (idx))
1403 0 : ridx = size_int (tree_to_uhwi (idx) + diff);
1404 : else
1405 : {
1406 0 : tree t = make_ssa_name (sizetype);
1407 0 : g = gimple_build_assign (t, PLUS_EXPR, idx, size_int (diff));
1408 0 : insert_before (g);
1409 0 : ridx = t;
1410 : }
1411 : }
1412 4637 : rhs1 = handle_operand (rhs1, ridx);
1413 4637 : if (tree_fits_uhwi_p (idx))
1414 : {
1415 2372 : tree type = limb_access_type (lhs_type, idx);
1416 2372 : if (!types_compatible_p (type, TREE_TYPE (rhs1)))
1417 1241 : rhs1 = add_cast (type, rhs1);
1418 : }
1419 4637 : return rhs1;
1420 : }
1421 1240 : tree t;
1422 : /* Indexes lower than this don't need any special processing. */
1423 1240 : unsigned low = ((unsigned) TYPE_PRECISION (rhs_type)
1424 1240 : - !TYPE_UNSIGNED (rhs_type)) / limb_prec;
1425 : /* Indexes >= than this always contain an extension. */
1426 1240 : unsigned high = CEIL ((unsigned) TYPE_PRECISION (rhs_type), limb_prec);
1427 1240 : unsigned lcnt = CEIL ((unsigned) TYPE_PRECISION (lhs_type), limb_prec);
1428 1240 : unsigned lowe = bitint_big_endian ? lcnt - 1 - low : low;
1429 1240 : bool save_first = m_first;
1430 1240 : if (m_first)
1431 : {
1432 405 : m_data.safe_push (NULL_TREE);
1433 405 : m_data.safe_push (NULL_TREE);
1434 405 : m_data.safe_push (NULL_TREE);
1435 405 : if (TYPE_UNSIGNED (rhs_type))
1436 : /* No need to keep state between iterations. */
1437 : ;
1438 192 : else if (m_upwards && !m_upwards_2limb)
1439 : /* We need to keep state between iterations, but
1440 : not within any loop, everything is straight line
1441 : code with only increasing indexes. */
1442 : ;
1443 152 : else if (!m_upwards_2limb)
1444 : {
1445 3 : unsigned save_data_cnt = m_data_cnt;
1446 3 : gimple_stmt_iterator save_gsi = m_gsi;
1447 3 : m_gsi = m_init_gsi;
1448 3 : if (gsi_end_p (m_gsi))
1449 0 : m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1450 : else
1451 3 : gsi_next (&m_gsi);
1452 3 : m_data_cnt = save_data_cnt + 3;
1453 3 : t = handle_operand (rhs1, size_int (bitint_big_endian
1454 : ? high - 1 - low : low));
1455 3 : m_first = false;
1456 3 : m_data[save_data_cnt + 2]
1457 3 : = build_int_cst (NULL_TREE, m_data_cnt);
1458 3 : m_data_cnt = save_data_cnt;
1459 3 : t = add_cast (signed_type_for (m_limb_type), t);
1460 3 : tree lpm1 = build_int_cst (unsigned_type_node, limb_prec - 1);
1461 3 : tree n = make_ssa_name (TREE_TYPE (t));
1462 3 : g = gimple_build_assign (n, RSHIFT_EXPR, t, lpm1);
1463 3 : insert_before (g);
1464 3 : m_data[save_data_cnt + 1] = add_cast (m_limb_type, n);
1465 3 : m_init_gsi = m_gsi;
1466 3 : if (gsi_end_p (m_init_gsi))
1467 0 : m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1468 : else
1469 3 : gsi_prev (&m_init_gsi);
1470 3 : m_gsi = save_gsi;
1471 : }
1472 149 : else if (m_upwards_2limb * limb_prec < TYPE_PRECISION (rhs_type))
1473 : /* We need to keep state between iterations, but
1474 : fortunately not within the loop, only afterwards. */
1475 : ;
1476 : else
1477 : {
1478 145 : tree out;
1479 145 : m_data.truncate (m_data_cnt);
1480 145 : prepare_data_in_out (build_zero_cst (m_limb_type), idx, &out);
1481 145 : m_data.safe_push (NULL_TREE);
1482 : }
1483 : }
1484 :
1485 1240 : unsigned save_data_cnt = m_data_cnt;
1486 1240 : m_data_cnt += 3;
1487 1240 : if (!tree_fits_uhwi_p (idx))
1488 : {
1489 670 : if (m_upwards_2limb
1490 650 : && low >= m_upwards_2limb - m_first)
1491 : {
1492 158 : if (bitint_big_endian
1493 158 : && (CEIL (TYPE_PRECISION (lhs_type), limb_prec)
1494 0 : != CEIL (TYPE_PRECISION (rhs_type), limb_prec)))
1495 : {
1496 0 : HOST_WIDE_INT diff
1497 0 : = CEIL (TYPE_PRECISION (rhs_type), limb_prec);
1498 0 : diff -= CEIL (TYPE_PRECISION (lhs_type), limb_prec);
1499 0 : tree t = make_ssa_name (sizetype);
1500 0 : g = gimple_build_assign (t, PLUS_EXPR, idx, size_int (diff));
1501 0 : insert_before (g);
1502 0 : idx = t;
1503 : }
1504 158 : rhs1 = handle_operand (rhs1, idx);
1505 158 : if (m_first)
1506 131 : m_data[save_data_cnt + 2]
1507 262 : = build_int_cst (NULL_TREE, m_data_cnt);
1508 158 : m_first = save_first;
1509 158 : return rhs1;
1510 : }
1511 1289 : bool single_comparison
1512 512 : = low == high || (m_upwards_2limb && (low & 1) == m_first);
1513 265 : tree idxc = idx;
1514 265 : if (!single_comparison
1515 265 : && m_upwards_2limb
1516 245 : && !m_first
1517 112 : && low + 1 == m_upwards_2limb)
1518 : /* In this case we know that idx <= low always,
1519 : so effectively we just needs a single comparison,
1520 : idx < low or idx == low, but we'd need to emit different
1521 : code for the 2 branches than single_comparison normally
1522 : emits. So, instead of special-casing that, emit a
1523 : low <= low comparison which cfg cleanup will clean up
1524 : at the end of the pass. */
1525 89 : idxc = size_int (lowe);
1526 512 : if (bitint_big_endian)
1527 0 : g = gimple_build_cond (single_comparison ? GT_EXPR : GE_EXPR,
1528 0 : idxc, size_int (lowe),
1529 : NULL_TREE, NULL_TREE);
1530 : else
1531 777 : g = gimple_build_cond (single_comparison ? LT_EXPR : LE_EXPR,
1532 512 : idxc, size_int (low), NULL_TREE, NULL_TREE);
1533 512 : edge edge_true_true, edge_true_false, edge_false;
1534 777 : if_then_if_then_else (g, (single_comparison ? NULL
1535 265 : : gimple_build_cond (EQ_EXPR, idx,
1536 265 : size_int (lowe),
1537 : NULL_TREE,
1538 : NULL_TREE)),
1539 : profile_probability::likely (),
1540 : profile_probability::unlikely (),
1541 : edge_true_true, edge_true_false, edge_false);
1542 512 : bool save_cast_conditional = m_cast_conditional;
1543 512 : m_cast_conditional = true;
1544 512 : m_bitfld_load = 0;
1545 512 : tree t1 = idx, t2 = NULL_TREE;
1546 512 : if (bitint_big_endian
1547 512 : && (CEIL (TYPE_PRECISION (lhs_type), limb_prec)
1548 0 : != CEIL (TYPE_PRECISION (rhs_type), limb_prec)))
1549 : {
1550 0 : HOST_WIDE_INT diff = CEIL (TYPE_PRECISION (rhs_type), limb_prec);
1551 0 : diff -= CEIL (TYPE_PRECISION (lhs_type), limb_prec);
1552 0 : t1 = make_ssa_name (sizetype);
1553 0 : g = gimple_build_assign (t1, PLUS_EXPR, idx, size_int (diff));
1554 0 : insert_before (g);
1555 : }
1556 512 : t1 = handle_operand (rhs1, t1);
1557 512 : if (m_first)
1558 199 : m_data[save_data_cnt + 2]
1559 398 : = build_int_cst (NULL_TREE, m_data_cnt);
1560 512 : tree ext = NULL_TREE;
1561 512 : tree bitfld = NULL_TREE;
1562 512 : if (!single_comparison)
1563 : {
1564 265 : m_gsi = gsi_after_labels (edge_true_true->src);
1565 265 : m_first = false;
1566 265 : m_data_cnt = save_data_cnt + 3;
1567 265 : if (m_bitfld_load)
1568 : {
1569 4 : bitfld = m_data[m_bitfld_load];
1570 4 : m_data[m_bitfld_load] = m_data[m_bitfld_load + 2];
1571 4 : m_bitfld_load = 0;
1572 : }
1573 265 : t2 = handle_operand (rhs1, size_int (bitint_big_endian
1574 : ? high - 1 - low : low));
1575 265 : if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t2)))
1576 220 : t2 = add_cast (m_limb_type, t2);
1577 265 : if (!TYPE_UNSIGNED (rhs_type) && m_upwards_2limb)
1578 : {
1579 145 : ext = add_cast (signed_type_for (m_limb_type), t2);
1580 290 : tree lpm1 = build_int_cst (unsigned_type_node,
1581 145 : limb_prec - 1);
1582 145 : tree n = make_ssa_name (TREE_TYPE (ext));
1583 145 : g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
1584 145 : insert_before (g);
1585 145 : ext = add_cast (m_limb_type, n);
1586 : }
1587 : }
1588 512 : tree t3;
1589 512 : if (TYPE_UNSIGNED (rhs_type))
1590 262 : t3 = build_zero_cst (m_limb_type);
1591 250 : else if (m_upwards_2limb && (save_first || ext != NULL_TREE))
1592 167 : t3 = m_data[save_data_cnt];
1593 : else
1594 83 : t3 = m_data[save_data_cnt + 1];
1595 512 : m_gsi = gsi_after_labels (edge_true_false->dest);
1596 512 : t = make_ssa_name (m_limb_type);
1597 512 : gphi *phi = create_phi_node (t, edge_true_false->dest);
1598 512 : add_phi_arg (phi, t1, edge_true_false, UNKNOWN_LOCATION);
1599 512 : add_phi_arg (phi, t3, edge_false, UNKNOWN_LOCATION);
1600 512 : if (edge_true_true)
1601 265 : add_phi_arg (phi, t2, edge_true_true, UNKNOWN_LOCATION);
1602 512 : if (ext)
1603 : {
1604 145 : tree t4 = make_ssa_name (m_limb_type);
1605 145 : phi = create_phi_node (t4, edge_true_false->dest);
1606 145 : add_phi_arg (phi, build_zero_cst (m_limb_type), edge_true_false,
1607 : UNKNOWN_LOCATION);
1608 145 : add_phi_arg (phi, m_data[save_data_cnt], edge_false,
1609 : UNKNOWN_LOCATION);
1610 145 : add_phi_arg (phi, ext, edge_true_true, UNKNOWN_LOCATION);
1611 145 : if (!save_cast_conditional)
1612 : {
1613 135 : g = gimple_build_assign (m_data[save_data_cnt + 1], t4);
1614 135 : insert_before (g);
1615 : }
1616 : else
1617 10 : for (basic_block bb = gsi_bb (m_gsi);;)
1618 : {
1619 10 : edge e1 = single_succ_edge (bb);
1620 10 : edge e2 = find_edge (e1->dest, m_bb), e3;
1621 10 : tree t5 = (e2 ? m_data[save_data_cnt + 1]
1622 10 : : make_ssa_name (m_limb_type));
1623 10 : phi = create_phi_node (t5, e1->dest);
1624 10 : edge_iterator ei;
1625 30 : FOR_EACH_EDGE (e3, ei, e1->dest->preds)
1626 30 : add_phi_arg (phi, (e3 == e1 ? t4
1627 10 : : build_zero_cst (m_limb_type)),
1628 : e3, UNKNOWN_LOCATION);
1629 10 : if (e2)
1630 : break;
1631 0 : t4 = t5;
1632 0 : bb = e1->dest;
1633 0 : }
1634 : }
1635 512 : if (m_bitfld_load)
1636 : {
1637 8 : tree t4;
1638 8 : if (!save_first && !save_cast_conditional)
1639 2 : t4 = m_data[m_bitfld_load + 1];
1640 : else
1641 6 : t4 = make_ssa_name (m_limb_type);
1642 8 : phi = create_phi_node (t4, edge_true_false->dest);
1643 12 : add_phi_arg (phi,
1644 4 : edge_true_true ? bitfld : m_data[m_bitfld_load],
1645 : edge_true_false, UNKNOWN_LOCATION);
1646 8 : add_phi_arg (phi, m_data[m_bitfld_load + 2],
1647 : edge_false, UNKNOWN_LOCATION);
1648 8 : if (edge_true_true)
1649 4 : add_phi_arg (phi, m_data[m_bitfld_load], edge_true_true,
1650 : UNKNOWN_LOCATION);
1651 8 : if (save_cast_conditional)
1652 4 : for (basic_block bb = gsi_bb (m_gsi);;)
1653 : {
1654 4 : edge e1 = single_succ_edge (bb);
1655 4 : edge e2 = find_edge (e1->dest, m_bb), e3;
1656 4 : tree t5 = ((e2 && !save_first) ? m_data[m_bitfld_load + 1]
1657 4 : : make_ssa_name (m_limb_type));
1658 4 : phi = create_phi_node (t5, e1->dest);
1659 4 : edge_iterator ei;
1660 14 : FOR_EACH_EDGE (e3, ei, e1->dest->preds)
1661 16 : add_phi_arg (phi, (e3 == e1 ? t4
1662 6 : : build_zero_cst (m_limb_type)),
1663 : e3, UNKNOWN_LOCATION);
1664 4 : t4 = t5;
1665 4 : if (e2)
1666 : break;
1667 0 : bb = e1->dest;
1668 0 : }
1669 8 : m_data[m_bitfld_load] = t4;
1670 8 : m_data[m_bitfld_load + 2] = t4;
1671 8 : m_bitfld_load = 0;
1672 : }
1673 512 : m_cast_conditional = save_cast_conditional;
1674 512 : m_first = save_first;
1675 512 : return t;
1676 : }
1677 : else
1678 : {
1679 570 : unsigned tidx = tree_to_uhwi (idx);
1680 570 : if (bitint_big_endian)
1681 0 : tidx = lcnt - 1 - tidx;
1682 570 : if (tidx < low)
1683 : {
1684 152 : t = handle_operand (rhs1, (bitint_big_endian
1685 0 : ? size_int (high - 1 - tidx) : idx));
1686 152 : if (m_first)
1687 71 : m_data[save_data_cnt + 2]
1688 142 : = build_int_cst (NULL_TREE, m_data_cnt);
1689 : }
1690 418 : else if (tidx < high)
1691 : {
1692 68 : t = handle_operand (rhs1, size_int (bitint_big_endian
1693 : ? high - 1 - low : low));
1694 68 : if (m_first)
1695 1 : m_data[save_data_cnt + 2]
1696 2 : = build_int_cst (NULL_TREE, m_data_cnt);
1697 68 : if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (t)))
1698 60 : t = add_cast (m_limb_type, t);
1699 68 : tree ext = NULL_TREE;
1700 68 : if (!TYPE_UNSIGNED (rhs_type) && m_upwards)
1701 : {
1702 44 : ext = add_cast (signed_type_for (m_limb_type), t);
1703 88 : tree lpm1 = build_int_cst (unsigned_type_node,
1704 44 : limb_prec - 1);
1705 44 : tree n = make_ssa_name (TREE_TYPE (ext));
1706 44 : g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
1707 44 : insert_before (g);
1708 44 : ext = add_cast (m_limb_type, n);
1709 44 : m_data[save_data_cnt + 1] = ext;
1710 : }
1711 : }
1712 : else
1713 : {
1714 350 : if (TYPE_UNSIGNED (rhs_type) && m_first)
1715 : {
1716 0 : handle_operand (rhs1, (bitint_big_endian
1717 0 : ? size_int (high - 1)
1718 : : size_zero_node));
1719 0 : m_data[save_data_cnt + 2]
1720 0 : = build_int_cst (NULL_TREE, m_data_cnt);
1721 : }
1722 : else
1723 350 : m_data_cnt = tree_to_uhwi (m_data[save_data_cnt + 2]);
1724 350 : if (TYPE_UNSIGNED (rhs_type))
1725 184 : t = build_zero_cst (m_limb_type);
1726 166 : else if (m_bb
1727 16 : && m_data[save_data_cnt]
1728 179 : && ((tidx & 1) == 0 || tidx != low + 1))
1729 : t = m_data[save_data_cnt];
1730 : else
1731 160 : t = m_data[save_data_cnt + 1];
1732 : }
1733 570 : tree type = limb_access_type (lhs_type, idx);
1734 570 : if (!useless_type_conversion_p (type, m_limb_type))
1735 291 : t = add_cast (type, t);
1736 570 : m_first = save_first;
1737 570 : return t;
1738 : }
1739 : }
1740 1578 : else if (TREE_CODE (lhs_type) == BITINT_TYPE
1741 1578 : && bitint_precision_kind (lhs_type) >= bitint_prec_large
1742 3156 : && INTEGRAL_TYPE_P (rhs_type))
1743 : {
1744 : /* Add support for 3 or more limbs filled in from normal integral
1745 : type if this assert fails. If no target chooses limb mode smaller
1746 : than half of largest supported normal integral type, this will not
1747 : be needed. */
1748 1578 : gcc_assert (TYPE_PRECISION (rhs_type) <= 2 * limb_prec);
1749 1578 : tree r1 = NULL_TREE, r2 = NULL_TREE, rext = NULL_TREE;
1750 1578 : if (m_first)
1751 : {
1752 576 : gimple_stmt_iterator save_gsi = m_gsi;
1753 576 : m_gsi = m_init_gsi;
1754 576 : if (gsi_end_p (m_gsi))
1755 56 : m_gsi = gsi_after_labels (gsi_bb (m_gsi));
1756 : else
1757 520 : gsi_next (&m_gsi);
1758 576 : if (TREE_CODE (rhs_type) == BITINT_TYPE
1759 576 : && bitint_precision_kind (rhs_type) == bitint_prec_middle)
1760 : {
1761 63 : tree type = NULL_TREE;
1762 63 : rhs1 = maybe_cast_middle_bitint (&m_gsi, rhs1, type);
1763 63 : rhs_type = TREE_TYPE (rhs1);
1764 : }
1765 576 : r1 = rhs1;
1766 576 : if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
1767 508 : r1 = add_cast (m_limb_type, rhs1);
1768 576 : if (TYPE_PRECISION (rhs_type) > limb_prec)
1769 : {
1770 109 : g = gimple_build_assign (make_ssa_name (rhs_type),
1771 : RSHIFT_EXPR, rhs1,
1772 : build_int_cst (unsigned_type_node,
1773 109 : limb_prec));
1774 109 : insert_before (g);
1775 109 : r2 = add_cast (m_limb_type, gimple_assign_lhs (g));
1776 : }
1777 576 : if (TYPE_UNSIGNED (rhs_type))
1778 279 : rext = build_zero_cst (m_limb_type);
1779 : else
1780 : {
1781 297 : rext = add_cast (signed_type_for (m_limb_type), r2 ? r2 : r1);
1782 297 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (rext)),
1783 : RSHIFT_EXPR, rext,
1784 : build_int_cst (unsigned_type_node,
1785 297 : limb_prec - 1));
1786 297 : insert_before (g);
1787 297 : rext = add_cast (m_limb_type, gimple_assign_lhs (g));
1788 : }
1789 576 : m_init_gsi = m_gsi;
1790 576 : if (gsi_end_p (m_init_gsi))
1791 562 : m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
1792 : else
1793 295 : gsi_prev (&m_init_gsi);
1794 576 : m_gsi = save_gsi;
1795 : }
1796 1578 : tree t;
1797 1578 : if (m_upwards_2limb)
1798 : {
1799 722 : if (m_first)
1800 : {
1801 279 : tree out1, out2;
1802 279 : prepare_data_in_out (r1, idx, &out1, rext);
1803 279 : if (TYPE_PRECISION (rhs_type) > limb_prec)
1804 : {
1805 70 : prepare_data_in_out (r2, idx, &out2, rext);
1806 70 : m_data.pop ();
1807 70 : t = m_data.pop ();
1808 70 : m_data[m_data_cnt + 1] = t;
1809 : }
1810 : else
1811 209 : m_data[m_data_cnt + 1] = rext;
1812 279 : m_data.safe_push (rext);
1813 279 : t = m_data[m_data_cnt];
1814 : }
1815 443 : else if (!tree_fits_uhwi_p (idx))
1816 279 : t = m_data[m_data_cnt + 1];
1817 : else
1818 : {
1819 164 : tree type = limb_access_type (lhs_type, idx);
1820 164 : t = m_data[m_data_cnt + 2];
1821 164 : if (!useless_type_conversion_p (type, m_limb_type))
1822 136 : t = add_cast (type, t);
1823 : }
1824 722 : m_data_cnt += 3;
1825 722 : return t;
1826 : }
1827 856 : else if (m_first)
1828 : {
1829 297 : m_data.safe_push (r1);
1830 297 : m_data.safe_push (r2);
1831 297 : m_data.safe_push (rext);
1832 : }
1833 856 : unsigned lcnt = CEIL ((unsigned) TYPE_PRECISION (lhs_type), limb_prec);
1834 856 : if (tree_fits_uhwi_p (idx))
1835 : {
1836 812 : tree type = limb_access_type (lhs_type, idx);
1837 812 : if (bitint_big_endian
1838 812 : ? tree_to_uhwi (idx) == lcnt - 1 : integer_zerop (idx))
1839 269 : t = m_data[m_data_cnt];
1840 543 : else if (TYPE_PRECISION (rhs_type) > limb_prec
1841 543 : && (bitint_big_endian
1842 72 : ? tree_to_uhwi (idx) == lcnt - 2
1843 72 : : integer_onep (idx)))
1844 33 : t = m_data[m_data_cnt + 1];
1845 : else
1846 510 : t = m_data[m_data_cnt + 2];
1847 812 : if (!useless_type_conversion_p (type, m_limb_type))
1848 250 : t = add_cast (type, t);
1849 812 : m_data_cnt += 3;
1850 812 : return t;
1851 : }
1852 44 : g = gimple_build_cond (NE_EXPR, idx,
1853 : bitint_big_endian
1854 0 : ? size_int (lcnt - 1) : size_zero_node,
1855 : NULL_TREE, NULL_TREE);
1856 44 : edge e2, e3, e4 = NULL;
1857 44 : if_then (g, profile_probability::likely (), e2, e3);
1858 44 : if (m_data[m_data_cnt + 1])
1859 : {
1860 14 : g = gimple_build_cond (EQ_EXPR, idx,
1861 : bitint_big_endian
1862 0 : ? size_int (lcnt - 2) : size_one_node,
1863 : NULL_TREE, NULL_TREE);
1864 14 : insert_before (g);
1865 14 : edge e5 = split_block (gsi_bb (m_gsi), g);
1866 14 : e4 = make_edge (e5->src, e2->dest, EDGE_TRUE_VALUE);
1867 14 : e2 = find_edge (e5->dest, e2->dest);
1868 14 : e4->probability = profile_probability::unlikely ();
1869 14 : e5->flags = EDGE_FALSE_VALUE;
1870 14 : e5->probability = e4->probability.invert ();
1871 : }
1872 44 : m_gsi = gsi_after_labels (e2->dest);
1873 44 : t = make_ssa_name (m_limb_type);
1874 44 : gphi *phi = create_phi_node (t, e2->dest);
1875 44 : add_phi_arg (phi, m_data[m_data_cnt + 2], e2, UNKNOWN_LOCATION);
1876 44 : add_phi_arg (phi, m_data[m_data_cnt], e3, UNKNOWN_LOCATION);
1877 44 : if (e4)
1878 14 : add_phi_arg (phi, m_data[m_data_cnt + 1], e4, UNKNOWN_LOCATION);
1879 44 : m_data_cnt += 3;
1880 44 : return t;
1881 : }
1882 : return NULL_TREE;
1883 : }
1884 :
1885 : /* Helper function for handle_stmt method, handle a BIT_FIELD_REF. */
1886 :
1887 : tree
1888 33 : bitint_large_huge::handle_bit_field_ref (tree op, tree idx)
1889 : {
1890 33 : if (tree_fits_uhwi_p (idx))
1891 : {
1892 21 : if (m_first)
1893 6 : m_data.safe_push (NULL);
1894 21 : ++m_data_cnt;
1895 21 : unsigned HOST_WIDE_INT sz = tree_to_uhwi (TYPE_SIZE (m_limb_type));
1896 21 : unsigned i = tree_to_uhwi (idx);
1897 21 : if (bitint_big_endian)
1898 0 : i = CEIL (TYPE_PRECISION (TREE_TYPE (op)), limb_prec) - 1 - i;
1899 42 : tree bfr = build3 (BIT_FIELD_REF, m_limb_type,
1900 21 : TREE_OPERAND (op, 0),
1901 21 : TYPE_SIZE (m_limb_type),
1902 21 : size_binop (PLUS_EXPR, TREE_OPERAND (op, 2),
1903 : bitsize_int (i * sz)));
1904 21 : tree r = make_ssa_name (m_limb_type);
1905 21 : gimple *g = gimple_build_assign (r, bfr);
1906 21 : insert_before (g);
1907 21 : tree type = limb_access_type (TREE_TYPE (op), idx);
1908 21 : if (!useless_type_conversion_p (type, m_limb_type))
1909 0 : r = add_cast (type, r);
1910 21 : return r;
1911 : }
1912 12 : tree var;
1913 12 : if (m_first)
1914 : {
1915 6 : unsigned HOST_WIDE_INT sz = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (op)));
1916 6 : machine_mode mode;
1917 6 : tree type, bfr;
1918 6 : if (bitwise_mode_for_size (sz).exists (&mode)
1919 2 : && known_eq (GET_MODE_BITSIZE (mode), sz))
1920 1 : type = bitwise_type_for_mode (mode);
1921 : else
1922 : {
1923 5 : mode = VOIDmode;
1924 5 : type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (op, 0)));
1925 : }
1926 6 : if (TYPE_ALIGN (type) < TYPE_ALIGN (TREE_TYPE (op)))
1927 0 : type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op)));
1928 6 : var = create_tmp_var (type);
1929 6 : TREE_ADDRESSABLE (var) = 1;
1930 6 : gimple *g;
1931 6 : if (mode != VOIDmode)
1932 : {
1933 1 : bfr = build3 (BIT_FIELD_REF, type, TREE_OPERAND (op, 0),
1934 1 : TYPE_SIZE (type), TREE_OPERAND (op, 2));
1935 1 : g = gimple_build_assign (make_ssa_name (type),
1936 : BIT_FIELD_REF, bfr);
1937 1 : gimple_set_location (g, m_loc);
1938 1 : gsi_insert_after (&m_init_gsi, g, GSI_NEW_STMT);
1939 1 : bfr = gimple_assign_lhs (g);
1940 : }
1941 : else
1942 5 : bfr = TREE_OPERAND (op, 0);
1943 6 : g = gimple_build_assign (var, bfr);
1944 6 : gimple_set_location (g, m_loc);
1945 6 : gsi_insert_after (&m_init_gsi, g, GSI_NEW_STMT);
1946 6 : if (mode == VOIDmode)
1947 : {
1948 5 : unsigned HOST_WIDE_INT nelts
1949 5 : = CEIL (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (op))), limb_prec);
1950 5 : tree atype = build_array_type_nelts (m_limb_type, nelts);
1951 5 : var = build2 (MEM_REF, atype, build_fold_addr_expr (var),
1952 : build_int_cst (build_pointer_type (type),
1953 5 : tree_to_uhwi (TREE_OPERAND (op, 2))
1954 5 : / BITS_PER_UNIT));
1955 : }
1956 6 : m_data.safe_push (var);
1957 : }
1958 : else
1959 6 : var = unshare_expr (m_data[m_data_cnt]);
1960 12 : ++m_data_cnt;
1961 12 : var = limb_access (TREE_TYPE (op), var, idx, false);
1962 12 : tree r = make_ssa_name (m_limb_type);
1963 12 : gimple *g = gimple_build_assign (r, var);
1964 12 : insert_before (g);
1965 12 : return r;
1966 : }
1967 :
1968 : /* Add a new EH edge from SRC to EH_EDGE->dest, where EH_EDGE
1969 : is an older EH edge, and except for virtual PHIs duplicate the
1970 : PHI argument from the EH_EDGE to the new EH edge. */
1971 :
1972 : static void
1973 20 : add_eh_edge (basic_block src, edge eh_edge)
1974 : {
1975 20 : edge e = make_edge (src, eh_edge->dest, EDGE_EH);
1976 20 : e->probability = profile_probability::very_unlikely ();
1977 20 : for (gphi_iterator gsi = gsi_start_phis (eh_edge->dest);
1978 27 : !gsi_end_p (gsi); gsi_next (&gsi))
1979 : {
1980 7 : gphi *phi = gsi.phi ();
1981 7 : tree lhs = gimple_phi_result (phi);
1982 14 : if (virtual_operand_p (lhs))
1983 4 : continue;
1984 3 : const phi_arg_d *arg = gimple_phi_arg (phi, eh_edge->dest_idx);
1985 3 : add_phi_arg (phi, arg->def, e, arg->locus);
1986 : }
1987 20 : }
1988 :
1989 : /* Helper function for handle_stmt method, handle a load from memory. */
1990 :
1991 : tree
1992 21263 : bitint_large_huge::handle_load (gimple *stmt, tree idx)
1993 : {
1994 21263 : tree rhs1 = gimple_assign_rhs1 (stmt);
1995 21263 : tree rhs_type = TREE_TYPE (rhs1);
1996 21263 : bool eh = stmt_ends_bb_p (stmt);
1997 21263 : bool load_bitfield_p = false;
1998 21263 : edge eh_edge = NULL;
1999 21263 : gimple *g;
2000 :
2001 21263 : if (TREE_CODE (rhs1) == BIT_FIELD_REF
2002 21263 : && integer_zerop (TREE_OPERAND (rhs1, 2)))
2003 2 : rhs1 = TREE_OPERAND (rhs1, 0);
2004 :
2005 21263 : if (eh)
2006 : {
2007 10 : edge_iterator ei;
2008 10 : basic_block bb = gimple_bb (stmt);
2009 :
2010 10 : FOR_EACH_EDGE (eh_edge, ei, bb->succs)
2011 10 : if (eh_edge->flags & EDGE_EH)
2012 : break;
2013 : }
2014 :
2015 21263 : if (TREE_CODE (rhs1) == COMPONENT_REF
2016 21263 : && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
2017 : {
2018 1213 : tree fld = TREE_OPERAND (rhs1, 1);
2019 : /* For little-endian, we can allow as inputs bit-fields
2020 : which start at a limb boundary. */
2021 1213 : gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
2022 1213 : if (!bitint_big_endian
2023 1213 : && DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
2024 2426 : && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % limb_prec) == 0)
2025 : {
2026 739 : load_bitfield_p = true;
2027 751 : goto normal_load;
2028 : }
2029 : /* Even if DECL_FIELD_BIT_OFFSET (fld) is a multiple of BITS_PER_UNIT,
2030 : handle it normally for now. */
2031 474 : if (!bitint_big_endian
2032 474 : && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)) % BITS_PER_UNIT) == 0)
2033 : {
2034 12 : load_bitfield_p = true;
2035 12 : goto normal_load;
2036 : }
2037 462 : tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
2038 462 : poly_int64 bitoffset;
2039 462 : poly_uint64 field_offset, repr_offset;
2040 462 : bool var_field_off = false;
2041 462 : if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
2042 924 : && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
2043 462 : bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
2044 : else
2045 : {
2046 : bitoffset = 0;
2047 : var_field_off = true;
2048 : }
2049 462 : bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
2050 462 : - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
2051 924 : tree nrhs1 = build3 (COMPONENT_REF, TREE_TYPE (repr),
2052 462 : TREE_OPERAND (rhs1, 0), repr,
2053 0 : var_field_off ? TREE_OPERAND (rhs1, 2) : NULL_TREE);
2054 462 : HOST_WIDE_INT bo = bitoffset.to_constant ();
2055 462 : unsigned bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
2056 462 : unsigned bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
2057 462 : unsigned bo_last = 0;
2058 462 : unsigned bo_shift = bo_bit;
2059 462 : unsigned nelts = CEIL (TYPE_PRECISION (rhs_type), limb_prec);
2060 462 : if (bitint_big_endian)
2061 : {
2062 0 : bo_last = CEIL (TYPE_PRECISION (rhs_type) + bo_bit, limb_prec) - 1;
2063 0 : bo_shift = (TYPE_PRECISION (rhs_type) + bo_bit) % limb_prec;
2064 0 : if (bo_shift)
2065 0 : bo_shift = limb_prec - bo_shift;
2066 : }
2067 462 : if (m_first)
2068 : {
2069 137 : if (m_upwards && bo_shift)
2070 : {
2071 134 : gimple_stmt_iterator save_gsi = m_gsi;
2072 134 : m_gsi = m_init_gsi;
2073 134 : if (gsi_end_p (m_gsi))
2074 53 : m_gsi = gsi_after_labels (gsi_bb (m_gsi));
2075 : else
2076 81 : gsi_next (&m_gsi);
2077 134 : tree t = limb_access (NULL_TREE, nrhs1,
2078 134 : size_int (bo_idx + bo_last), true);
2079 134 : tree iv = make_ssa_name (m_limb_type);
2080 134 : g = gimple_build_assign (iv, t);
2081 134 : insert_before (g);
2082 134 : if (eh)
2083 : {
2084 2 : maybe_duplicate_eh_stmt (g, stmt);
2085 2 : if (eh_edge)
2086 : {
2087 2 : edge e = split_block (gsi_bb (m_gsi), g);
2088 2 : add_eh_edge (e->src, eh_edge);
2089 2 : m_gsi = gsi_after_labels (e->dest);
2090 2 : if (gsi_bb (save_gsi) == e->src)
2091 : {
2092 1 : if (gsi_end_p (save_gsi))
2093 0 : save_gsi = gsi_end_bb (e->dest);
2094 : else
2095 1 : save_gsi = gsi_for_stmt (gsi_stmt (save_gsi));
2096 : }
2097 2 : if (m_preheader_bb == e->src)
2098 1 : m_preheader_bb = e->dest;
2099 : }
2100 : }
2101 134 : m_init_gsi = m_gsi;
2102 134 : if (gsi_end_p (m_init_gsi))
2103 218 : m_init_gsi = gsi_last_bb (gsi_bb (m_init_gsi));
2104 : else
2105 25 : gsi_prev (&m_init_gsi);
2106 134 : m_gsi = save_gsi;
2107 134 : tree out;
2108 134 : prepare_data_in_out (iv, idx, &out);
2109 134 : out = m_data[m_data_cnt];
2110 134 : m_data.safe_push (out);
2111 134 : }
2112 : else
2113 : {
2114 3 : m_data.safe_push (NULL_TREE);
2115 3 : m_data.safe_push (NULL_TREE);
2116 3 : m_data.safe_push (NULL_TREE);
2117 : }
2118 : }
2119 :
2120 462 : tree nidx0 = NULL_TREE, nidx1 = NULL_TREE;
2121 462 : tree iv = m_data[m_data_cnt];
2122 462 : if (m_cast_conditional && iv)
2123 : {
2124 12 : gcc_assert (!m_bitfld_load);
2125 12 : m_bitfld_load = m_data_cnt;
2126 : }
2127 462 : if (tree_fits_uhwi_p (idx))
2128 : {
2129 264 : unsigned prec = TYPE_PRECISION (rhs_type);
2130 264 : unsigned HOST_WIDE_INT i = tree_to_uhwi (idx);
2131 264 : if (bitint_big_endian)
2132 0 : i = nelts - 1 - i;
2133 264 : gcc_assert (i * limb_prec < prec);
2134 264 : if (bo_shift)
2135 264 : nidx1 = size_int (bo_idx + (bitint_big_endian
2136 : ? bo_last - i - 1 : i + 1));
2137 264 : if ((i + 1) * limb_prec > prec)
2138 : {
2139 96 : prec %= limb_prec;
2140 96 : if (prec + bo_bit <= (unsigned) limb_prec)
2141 264 : nidx1 = NULL_TREE;
2142 : }
2143 264 : if (!iv)
2144 4 : nidx0 = size_int (bo_idx + (bitint_big_endian ? bo_last - i : i));
2145 : }
2146 : else
2147 : {
2148 198 : HOST_WIDE_INT adj = bo_idx;
2149 198 : if (bitint_big_endian)
2150 0 : adj += (HOST_WIDE_INT) bo_last + 1 - nelts;
2151 198 : if (!iv)
2152 : {
2153 4 : if (adj == 0)
2154 : nidx0 = idx;
2155 : else
2156 : {
2157 0 : nidx0 = make_ssa_name (sizetype);
2158 0 : g = gimple_build_assign (nidx0, PLUS_EXPR, idx,
2159 0 : size_int (adj));
2160 0 : insert_before (g);
2161 : }
2162 : }
2163 198 : if (bo_shift)
2164 : {
2165 198 : if (bitint_big_endian && adj == 1)
2166 : nidx1 = idx;
2167 : else
2168 : {
2169 198 : nidx1 = make_ssa_name (sizetype);
2170 198 : g = gimple_build_assign (nidx1, PLUS_EXPR, idx,
2171 396 : size_int (adj + (bitint_big_endian
2172 : ? -1 : 1)));
2173 198 : insert_before (g);
2174 : }
2175 : }
2176 : }
2177 :
2178 664 : tree iv2 = NULL_TREE;
2179 202 : if (nidx0)
2180 : {
2181 8 : tree t = limb_access (NULL_TREE, nrhs1, nidx0, true);
2182 8 : iv = make_ssa_name (m_limb_type);
2183 8 : g = gimple_build_assign (iv, t);
2184 8 : insert_before (g);
2185 8 : if (eh)
2186 : {
2187 0 : maybe_duplicate_eh_stmt (g, stmt);
2188 0 : if (eh_edge)
2189 : {
2190 0 : edge e = split_block (gsi_bb (m_gsi), g);
2191 0 : m_gsi = gsi_after_labels (e->dest);
2192 0 : add_eh_edge (e->src, eh_edge);
2193 : }
2194 : }
2195 : }
2196 462 : if (nidx1)
2197 : {
2198 377 : bool conditional = m_var_msb && !tree_fits_uhwi_p (idx);
2199 377 : unsigned prec = TYPE_PRECISION (rhs_type);
2200 377 : if (conditional)
2201 : {
2202 3 : if ((prec % limb_prec) == 0
2203 3 : || ((prec % limb_prec) + bo_bit > (unsigned) limb_prec))
2204 374 : conditional = false;
2205 : }
2206 377 : edge edge_true = NULL, edge_false = NULL;
2207 377 : if (conditional)
2208 : {
2209 6 : g = gimple_build_cond (NE_EXPR, idx,
2210 : bitint_big_endian
2211 : ? size_zero_node
2212 3 : : size_int (prec / limb_prec),
2213 : NULL_TREE, NULL_TREE);
2214 3 : if_then (g, profile_probability::likely (),
2215 : edge_true, edge_false);
2216 : }
2217 377 : tree t = limb_access (NULL_TREE, nrhs1, nidx1, true);
2218 377 : if (m_upwards_2limb
2219 279 : && !m_first
2220 182 : && !m_bitfld_load
2221 176 : && !tree_fits_uhwi_p (idx))
2222 93 : iv2 = m_data[m_data_cnt + 1];
2223 : else
2224 284 : iv2 = make_ssa_name (m_limb_type);
2225 377 : g = gimple_build_assign (iv2, t);
2226 377 : insert_before (g);
2227 377 : if (eh)
2228 : {
2229 5 : maybe_duplicate_eh_stmt (g, stmt);
2230 5 : if (eh_edge)
2231 : {
2232 5 : edge e = split_block (gsi_bb (m_gsi), g);
2233 5 : m_gsi = gsi_after_labels (e->dest);
2234 5 : add_eh_edge (e->src, eh_edge);
2235 : }
2236 : }
2237 377 : if (conditional)
2238 : {
2239 3 : tree iv3 = make_ssa_name (m_limb_type);
2240 3 : if (eh)
2241 0 : edge_true = find_edge (gsi_bb (m_gsi), edge_false->dest);
2242 3 : gphi *phi = create_phi_node (iv3, edge_true->dest);
2243 3 : add_phi_arg (phi, iv2, edge_true, UNKNOWN_LOCATION);
2244 3 : add_phi_arg (phi, build_zero_cst (m_limb_type),
2245 : edge_false, UNKNOWN_LOCATION);
2246 3 : m_gsi = gsi_after_labels (edge_true->dest);
2247 3 : iv2 = iv3;
2248 : }
2249 : }
2250 462 : if (bo_shift)
2251 : {
2252 462 : g = gimple_build_assign (make_ssa_name (m_limb_type), RSHIFT_EXPR,
2253 : iv, build_int_cst (unsigned_type_node,
2254 462 : bo_shift));
2255 462 : insert_before (g);
2256 462 : iv = gimple_assign_lhs (g);
2257 : }
2258 462 : if (iv2)
2259 : {
2260 377 : g = gimple_build_assign (make_ssa_name (m_limb_type), LSHIFT_EXPR,
2261 : iv2, build_int_cst (unsigned_type_node,
2262 377 : limb_prec - bo_shift));
2263 377 : insert_before (g);
2264 377 : g = gimple_build_assign (make_ssa_name (m_limb_type), BIT_IOR_EXPR,
2265 : gimple_assign_lhs (g), iv);
2266 377 : insert_before (g);
2267 377 : iv = gimple_assign_lhs (g);
2268 377 : if (m_data[m_data_cnt])
2269 371 : m_data[m_data_cnt] = iv2;
2270 : }
2271 462 : if (tree_fits_uhwi_p (idx))
2272 : {
2273 264 : tree atype = limb_access_type (rhs_type, idx);
2274 264 : if (!useless_type_conversion_p (atype, TREE_TYPE (iv)))
2275 96 : iv = add_cast (atype, iv);
2276 : }
2277 462 : m_data_cnt += 3;
2278 462 : return iv;
2279 : }
2280 :
2281 20801 : normal_load:
2282 : /* Use write_p = true for loads with EH edges to make
2283 : sure limb_access doesn't add a cast as separate
2284 : statement after it. */
2285 20801 : rhs1 = limb_access (rhs_type, rhs1, idx, eh, !load_bitfield_p);
2286 20801 : tree ret = make_ssa_name (TREE_TYPE (rhs1));
2287 20801 : g = gimple_build_assign (ret, rhs1);
2288 20801 : insert_before (g);
2289 20801 : if (eh)
2290 : {
2291 3 : maybe_duplicate_eh_stmt (g, stmt);
2292 3 : if (eh_edge)
2293 : {
2294 3 : edge e = split_block (gsi_bb (m_gsi), g);
2295 3 : m_gsi = gsi_after_labels (e->dest);
2296 3 : add_eh_edge (e->src, eh_edge);
2297 : }
2298 3 : if (tree_fits_uhwi_p (idx))
2299 : {
2300 1 : tree atype = limb_access_type (rhs_type, idx);
2301 1 : if (!useless_type_conversion_p (atype, TREE_TYPE (rhs1)))
2302 1 : ret = add_cast (atype, ret);
2303 : }
2304 : }
2305 : return ret;
2306 : }
2307 :
2308 : /* Return a limb IDX from a mergeable statement STMT. */
2309 :
2310 : tree
2311 38731 : bitint_large_huge::handle_stmt (gimple *stmt, tree idx)
2312 : {
2313 38731 : tree lhs, rhs1, rhs2 = NULL_TREE;
2314 38731 : gimple *g;
2315 38731 : switch (gimple_code (stmt))
2316 : {
2317 38731 : case GIMPLE_ASSIGN:
2318 38731 : if (gimple_assign_load_p (stmt))
2319 21263 : return handle_load (stmt, idx);
2320 17468 : switch (gimple_assign_rhs_code (stmt))
2321 : {
2322 916 : case BIT_AND_EXPR:
2323 916 : case BIT_IOR_EXPR:
2324 916 : case BIT_XOR_EXPR:
2325 916 : rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
2326 : /* FALLTHRU */
2327 1312 : case BIT_NOT_EXPR:
2328 1312 : rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
2329 1312 : lhs = make_ssa_name (TREE_TYPE (rhs1));
2330 1312 : g = gimple_build_assign (lhs, gimple_assign_rhs_code (stmt),
2331 : rhs1, rhs2);
2332 1312 : insert_before (g);
2333 1312 : return lhs;
2334 3944 : case PLUS_EXPR:
2335 3944 : case MINUS_EXPR:
2336 3944 : rhs1 = handle_operand (gimple_assign_rhs1 (stmt), idx);
2337 3944 : rhs2 = handle_operand (gimple_assign_rhs2 (stmt), idx);
2338 3944 : return handle_plus_minus (gimple_assign_rhs_code (stmt),
2339 3944 : rhs1, rhs2, idx);
2340 123 : case NEGATE_EXPR:
2341 123 : rhs2 = handle_operand (gimple_assign_rhs1 (stmt), idx);
2342 123 : rhs1 = build_zero_cst (TREE_TYPE (rhs2));
2343 123 : return handle_plus_minus (MINUS_EXPR, rhs1, rhs2, idx);
2344 152 : case LSHIFT_EXPR:
2345 152 : return handle_lshift (handle_operand (gimple_assign_rhs1 (stmt),
2346 : idx),
2347 152 : gimple_assign_rhs2 (stmt), idx);
2348 5033 : case SSA_NAME:
2349 5033 : case PAREN_EXPR:
2350 5033 : case INTEGER_CST:
2351 5033 : return handle_operand (gimple_assign_rhs1 (stmt), idx);
2352 6863 : CASE_CONVERT:
2353 6863 : return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt)),
2354 6863 : gimple_assign_rhs1 (stmt), idx);
2355 8 : case VIEW_CONVERT_EXPR:
2356 8 : return handle_cast (TREE_TYPE (gimple_assign_lhs (stmt)),
2357 8 : TREE_OPERAND (gimple_assign_rhs1 (stmt), 0),
2358 8 : idx);
2359 33 : case BIT_FIELD_REF:
2360 33 : return handle_bit_field_ref (gimple_assign_rhs1 (stmt), idx);
2361 : default:
2362 : break;
2363 : }
2364 : break;
2365 : default:
2366 : break;
2367 : }
2368 0 : gcc_unreachable ();
2369 : }
2370 :
2371 : /* Return minimum precision of OP at STMT.
2372 : Positive value is minimum precision above which all bits
2373 : are zero, negative means all bits above negation of the
2374 : value are copies of the sign bit. */
2375 :
2376 : static int
2377 8061 : range_to_prec (tree op, gimple *stmt)
2378 : {
2379 8061 : int_range_max r;
2380 8061 : wide_int w;
2381 8061 : tree type = TREE_TYPE (op);
2382 8061 : unsigned int prec = TYPE_PRECISION (type);
2383 :
2384 8061 : if (!optimize
2385 7064 : || !get_range_query (cfun)->range_of_expr (r, op, stmt)
2386 11593 : || r.undefined_p ())
2387 : {
2388 4530 : if (TYPE_UNSIGNED (type))
2389 1832 : return prec;
2390 : else
2391 2698 : return MIN ((int) -prec, -2);
2392 : }
2393 :
2394 3531 : if (!TYPE_UNSIGNED (TREE_TYPE (op)))
2395 : {
2396 2203 : w = r.lower_bound ();
2397 2203 : if (wi::neg_p (w))
2398 : {
2399 1803 : int min_prec1 = wi::min_precision (w, SIGNED);
2400 1803 : w = r.upper_bound ();
2401 1803 : int min_prec2 = wi::min_precision (w, SIGNED);
2402 1803 : int min_prec = MAX (min_prec1, min_prec2);
2403 1803 : return MIN (-min_prec, -2);
2404 : }
2405 : }
2406 :
2407 1728 : w = r.upper_bound ();
2408 1728 : int min_prec = wi::min_precision (w, UNSIGNED);
2409 1728 : return MAX (min_prec, 1);
2410 8061 : }
2411 :
2412 : /* Return address of the first limb of OP and write into *PREC
2413 : its precision. If positive, the operand is zero extended
2414 : from that precision, if it is negative, the operand is sign-extended
2415 : from -*PREC. If PREC_STORED is NULL, it is the toplevel call,
2416 : otherwise *PREC_STORED is prec from the innermost call without
2417 : range optimizations (0 for uninitialized SSA_NAME). */
2418 :
2419 : tree
2420 3598 : bitint_large_huge::handle_operand_addr (tree op, gimple *stmt,
2421 : int *prec_stored, int *prec)
2422 : {
2423 3598 : wide_int w;
2424 3598 : location_t loc_save = m_loc;
2425 3598 : tree ret = NULL_TREE;
2426 3598 : int precs = 0;
2427 3598 : if ((TREE_CODE (TREE_TYPE (op)) != BITINT_TYPE
2428 3591 : || bitint_precision_kind (TREE_TYPE (op)) < bitint_prec_large)
2429 3693 : && TREE_CODE (op) != INTEGER_CST)
2430 : {
2431 109 : do_int:
2432 109 : *prec = range_to_prec (op, stmt);
2433 109 : bitint_prec_kind kind = bitint_prec_small;
2434 109 : gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (op)));
2435 109 : if (TREE_CODE (TREE_TYPE (op)) == BITINT_TYPE)
2436 98 : kind = bitint_precision_kind (TREE_TYPE (op));
2437 98 : if (kind == bitint_prec_middle)
2438 : {
2439 12 : tree type = NULL_TREE;
2440 12 : op = maybe_cast_middle_bitint (&m_gsi, op, type);
2441 : }
2442 109 : tree op_type = TREE_TYPE (op);
2443 109 : unsigned HOST_WIDE_INT nelts
2444 109 : = CEIL (TYPE_PRECISION (op_type), limb_prec);
2445 : /* Add support for 3 or more limbs filled in from normal
2446 : integral type if this assert fails. If no target chooses
2447 : limb mode smaller than half of largest supported normal
2448 : integral type, this will not be needed. */
2449 109 : gcc_assert (nelts <= 2);
2450 109 : precs = (TYPE_UNSIGNED (op_type)
2451 109 : ? TYPE_PRECISION (op_type) : -TYPE_PRECISION (op_type));
2452 109 : if (*prec <= limb_prec && *prec >= -limb_prec)
2453 : {
2454 96 : nelts = 1;
2455 96 : if (TYPE_UNSIGNED (op_type))
2456 : {
2457 26 : if (precs > limb_prec)
2458 109 : precs = limb_prec;
2459 : }
2460 70 : else if (precs < -limb_prec)
2461 109 : precs = -limb_prec;
2462 : }
2463 109 : if (prec_stored)
2464 0 : *prec_stored = precs;
2465 109 : tree atype = build_array_type_nelts (m_limb_type, nelts);
2466 109 : tree var = create_tmp_var (atype);
2467 109 : tree t1 = op;
2468 109 : if (!useless_type_conversion_p (m_limb_type, op_type))
2469 109 : t1 = add_cast (m_limb_type, t1);
2470 109 : tree v = build4 (ARRAY_REF, m_limb_type, var,
2471 0 : bitint_big_endian && nelts > 1
2472 : ? size_one_node : size_zero_node,
2473 : NULL_TREE, NULL_TREE);
2474 109 : gimple *g = gimple_build_assign (v, t1);
2475 109 : insert_before (g);
2476 109 : if (nelts > 1)
2477 : {
2478 13 : tree lp = build_int_cst (unsigned_type_node, limb_prec);
2479 13 : g = gimple_build_assign (make_ssa_name (op_type),
2480 : RSHIFT_EXPR, op, lp);
2481 13 : insert_before (g);
2482 13 : tree t2 = gimple_assign_lhs (g);
2483 13 : t2 = add_cast (m_limb_type, t2);
2484 13 : v = build4 (ARRAY_REF, m_limb_type, var,
2485 : bitint_big_endian ? size_zero_node : size_one_node,
2486 : NULL_TREE, NULL_TREE);
2487 13 : g = gimple_build_assign (v, t2);
2488 13 : insert_before (g);
2489 : }
2490 109 : ret = build_fold_addr_expr (var);
2491 109 : if (!stmt_ends_bb_p (gsi_stmt (m_gsi)))
2492 : {
2493 108 : tree clobber = build_clobber (atype, CLOBBER_STORAGE_END);
2494 108 : g = gimple_build_assign (var, clobber);
2495 108 : gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
2496 : }
2497 109 : m_loc = loc_save;
2498 109 : goto do_ret;
2499 : }
2500 3525 : switch (TREE_CODE (op))
2501 : {
2502 2700 : case SSA_NAME:
2503 2700 : if (m_names == NULL
2504 2700 : || !bitmap_bit_p (m_names, SSA_NAME_VERSION (op)))
2505 : {
2506 90 : gimple *g = SSA_NAME_DEF_STMT (op);
2507 90 : m_loc = gimple_location (g);
2508 90 : if (gimple_assign_load_p (g))
2509 : {
2510 36 : *prec = range_to_prec (op, NULL);
2511 36 : precs = (TYPE_UNSIGNED (TREE_TYPE (op))
2512 36 : ? TYPE_PRECISION (TREE_TYPE (op))
2513 25 : : -TYPE_PRECISION (TREE_TYPE (op)));
2514 36 : if (prec_stored)
2515 6 : *prec_stored = precs;
2516 36 : ret = build_fold_addr_expr (gimple_assign_rhs1 (g));
2517 36 : ret = force_gimple_operand_gsi (&m_gsi, ret, true,
2518 : NULL_TREE, true, GSI_SAME_STMT);
2519 : }
2520 54 : else if (gimple_code (g) == GIMPLE_NOP)
2521 : {
2522 2 : *prec = TYPE_UNSIGNED (TREE_TYPE (op)) ? limb_prec : -limb_prec;
2523 2 : precs = *prec;
2524 2 : if (prec_stored)
2525 1 : *prec_stored = 0;
2526 2 : tree var = create_tmp_var (m_limb_type);
2527 2 : TREE_ADDRESSABLE (var) = 1;
2528 2 : ret = build_fold_addr_expr (var);
2529 2 : if (!stmt_ends_bb_p (gsi_stmt (m_gsi)))
2530 : {
2531 2 : tree clobber = build_clobber (m_limb_type,
2532 : CLOBBER_STORAGE_END);
2533 2 : g = gimple_build_assign (var, clobber);
2534 2 : gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
2535 : }
2536 : }
2537 : else
2538 : {
2539 52 : gcc_assert (gimple_assign_cast_p (g));
2540 52 : tree rhs1 = gimple_assign_rhs1 (g);
2541 52 : bitint_prec_kind kind = bitint_prec_small;
2542 52 : if (TREE_CODE (rhs1) == VIEW_CONVERT_EXPR)
2543 1 : rhs1 = TREE_OPERAND (rhs1, 0);
2544 52 : gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (rhs1)));
2545 52 : if (TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE)
2546 43 : kind = bitint_precision_kind (TREE_TYPE (rhs1));
2547 43 : if (kind >= bitint_prec_large)
2548 : {
2549 16 : tree lhs_type = TREE_TYPE (op);
2550 16 : tree rhs_type = TREE_TYPE (rhs1);
2551 16 : int prec_stored_val = 0;
2552 16 : ret = handle_operand_addr (rhs1, g, &prec_stored_val, prec);
2553 16 : precs = prec_stored_val;
2554 16 : if (prec_stored)
2555 0 : *prec_stored = prec_stored_val;
2556 16 : if (precs == 0)
2557 : {
2558 1 : gcc_assert (*prec == limb_prec || *prec == -limb_prec);
2559 : precs = *prec;
2560 : }
2561 16 : if (TYPE_PRECISION (lhs_type) > TYPE_PRECISION (rhs_type))
2562 : {
2563 4 : if (TYPE_UNSIGNED (lhs_type)
2564 4 : && !TYPE_UNSIGNED (rhs_type))
2565 1 : gcc_assert (*prec >= 0 || prec_stored == NULL);
2566 : }
2567 : else
2568 : {
2569 12 : if (prec_stored_val == 0)
2570 : /* Non-widening cast of uninitialized value. */;
2571 11 : else if (*prec > 0 && *prec < TYPE_PRECISION (lhs_type))
2572 : ;
2573 11 : else if (TYPE_UNSIGNED (lhs_type))
2574 : {
2575 8 : gcc_assert (*prec > 0
2576 : || prec_stored_val > 0
2577 : || (-prec_stored_val
2578 : >= TYPE_PRECISION (lhs_type)));
2579 8 : *prec = TYPE_PRECISION (lhs_type);
2580 : }
2581 3 : else if (*prec < 0 && -*prec < TYPE_PRECISION (lhs_type))
2582 : ;
2583 : else
2584 3 : *prec = -TYPE_PRECISION (lhs_type);
2585 : }
2586 : }
2587 : else
2588 : {
2589 36 : op = rhs1;
2590 36 : stmt = g;
2591 36 : goto do_int;
2592 : }
2593 : }
2594 54 : m_loc = loc_save;
2595 54 : goto do_ret;
2596 : }
2597 : else
2598 : {
2599 2610 : int p = var_to_partition (m_map, op);
2600 2610 : gcc_assert (m_vars[p] != NULL_TREE);
2601 2610 : *prec = range_to_prec (op, stmt);
2602 2610 : precs = (TYPE_UNSIGNED (TREE_TYPE (op))
2603 2610 : ? TYPE_PRECISION (TREE_TYPE (op))
2604 1552 : : -TYPE_PRECISION (TREE_TYPE (op)));
2605 2610 : if (prec_stored)
2606 9 : *prec_stored = precs;
2607 2610 : ret = build_fold_addr_expr (m_vars[p]);
2608 2610 : goto do_ret;
2609 : }
2610 825 : case INTEGER_CST:
2611 825 : unsigned int min_prec, mp;
2612 825 : tree type;
2613 825 : w = wi::to_wide (op);
2614 825 : if (tree_int_cst_sgn (op) >= 0)
2615 : {
2616 610 : min_prec = wi::min_precision (w, UNSIGNED);
2617 610 : *prec = MAX (min_prec, 1);
2618 : }
2619 : else
2620 : {
2621 215 : min_prec = wi::min_precision (w, SIGNED);
2622 215 : *prec = MIN ((int) -min_prec, -2);
2623 : }
2624 825 : mp = CEIL (min_prec, limb_prec) * limb_prec;
2625 825 : if (mp == 0)
2626 : mp = 1;
2627 825 : if (mp >= (unsigned) TYPE_PRECISION (TREE_TYPE (op))
2628 825 : && (TREE_CODE (TREE_TYPE (op)) == BITINT_TYPE
2629 5 : || TYPE_PRECISION (TREE_TYPE (op)) <= limb_prec))
2630 288 : type = TREE_TYPE (op);
2631 : else
2632 537 : type = build_bitint_type (mp, 1);
2633 825 : if (TREE_CODE (type) != BITINT_TYPE
2634 825 : || bitint_precision_kind (type) == bitint_prec_small)
2635 : {
2636 555 : if (TYPE_PRECISION (type) <= limb_prec)
2637 555 : type = m_limb_type;
2638 : else
2639 : {
2640 0 : while (bitint_precision_kind (mp) == bitint_prec_small)
2641 0 : mp += limb_prec;
2642 : /* This case is for targets which e.g. have 64-bit
2643 : limb but categorize up to 128-bits _BitInts as
2644 : small. We could use type of m_limb_type[2] and
2645 : similar instead to save space. */
2646 0 : type = build_bitint_type (mp, 1);
2647 : }
2648 : }
2649 825 : if (tree_int_cst_sgn (op) >= 0)
2650 610 : precs = MAX (TYPE_PRECISION (type), 1);
2651 : else
2652 215 : precs = MIN ((int) -TYPE_PRECISION (type), -2);
2653 825 : if (prec_stored)
2654 0 : *prec_stored = precs;
2655 825 : op = tree_output_constant_def (fold_convert (type, op));
2656 825 : ret = build_fold_addr_expr (op);
2657 825 : goto do_ret;
2658 0 : default:
2659 0 : gcc_unreachable ();
2660 : }
2661 3598 : do_ret:
2662 3598 : if (bitint_big_endian && prec_stored == NULL)
2663 : {
2664 0 : int p1 = *prec < 0 ? -*prec : *prec;
2665 0 : int p2 = precs < 0 ? -precs : precs;
2666 0 : int c1 = CEIL (p1, limb_prec);
2667 0 : int c2 = CEIL (p2, limb_prec);
2668 0 : gcc_assert (c1 <= c2);
2669 0 : if (c1 != c2)
2670 : {
2671 0 : gimple *g
2672 0 : = gimple_build_assign (make_ssa_name (TREE_TYPE (ret)),
2673 : POINTER_PLUS_EXPR, ret,
2674 0 : size_int ((c2 - c1) * m_limb_size));
2675 0 : insert_before (g);
2676 0 : ret = gimple_assign_lhs (g);
2677 : }
2678 : }
2679 3598 : return ret;
2680 3598 : }
2681 :
2682 : /* Helper function, create a loop before the current location,
2683 : start with sizetype INIT value from the preheader edge. Return
2684 : a PHI result and set *IDX_NEXT to SSA_NAME it creates and uses
2685 : from the latch edge. */
2686 :
2687 : tree
2688 14272 : bitint_large_huge::create_loop (tree init, tree *idx_next)
2689 : {
2690 14272 : if (!gsi_end_p (m_gsi))
2691 12194 : gsi_prev (&m_gsi);
2692 : else
2693 4156 : m_gsi = gsi_last_bb (gsi_bb (m_gsi));
2694 14272 : edge e1 = split_block (gsi_bb (m_gsi), gsi_stmt (m_gsi));
2695 14272 : edge e2 = split_block (e1->dest, (gimple *) NULL);
2696 14272 : edge e3 = make_edge (e1->dest, e1->dest, EDGE_TRUE_VALUE);
2697 14272 : e3->probability = profile_probability::very_unlikely ();
2698 14272 : e2->flags = EDGE_FALSE_VALUE;
2699 14272 : e2->probability = e3->probability.invert ();
2700 14272 : tree idx = make_ssa_name (sizetype);
2701 14272 : gphi *phi = create_phi_node (idx, e1->dest);
2702 14272 : add_phi_arg (phi, init, e1, UNKNOWN_LOCATION);
2703 14272 : *idx_next = make_ssa_name (sizetype);
2704 14272 : add_phi_arg (phi, *idx_next, e3, UNKNOWN_LOCATION);
2705 14272 : m_gsi = gsi_after_labels (e1->dest);
2706 14272 : m_bb = e1->dest;
2707 14272 : m_preheader_bb = e1->src;
2708 14272 : class loop *loop = alloc_loop ();
2709 14272 : loop->header = e1->dest;
2710 14272 : add_loop (loop, e1->src->loop_father);
2711 14272 : return idx;
2712 : }
2713 :
2714 : /* Lower large/huge _BitInt statement mergeable or similar STMT which can be
2715 : lowered using iteration from the least significant limb up to the most
2716 : significant limb. For large _BitInt it is emitted as straight line code
2717 : before current location, for huge _BitInt as a loop handling two limbs
2718 : at once, followed by handling up to limbs in straight line code (at most
2719 : one full and one partial limb). It can also handle EQ_EXPR/NE_EXPR
2720 : comparisons, in that case CMP_CODE should be the comparison code and
2721 : CMP_OP1/CMP_OP2 the comparison operands. */
2722 :
2723 : tree
2724 23128 : bitint_large_huge::lower_mergeable_stmt (gimple *stmt, tree_code &cmp_code,
2725 : tree cmp_op1, tree cmp_op2)
2726 : {
2727 23128 : bool eq_p = cmp_code != ERROR_MARK;
2728 23128 : tree type;
2729 23128 : if (eq_p)
2730 6497 : type = TREE_TYPE (cmp_op1);
2731 : else
2732 16631 : type = TREE_TYPE (gimple_assign_lhs (stmt));
2733 23128 : gcc_assert (TREE_CODE (type) == BITINT_TYPE);
2734 23128 : bitint_prec_kind kind = bitint_precision_kind (type);
2735 23128 : gcc_assert (kind >= bitint_prec_large);
2736 23128 : gimple *g;
2737 23128 : tree lhs = gimple_get_lhs (stmt);
2738 23128 : tree rhs1, lhs_type = lhs ? TREE_TYPE (lhs) : NULL_TREE;
2739 17063 : if (lhs
2740 17063 : && TREE_CODE (lhs) == SSA_NAME
2741 8684 : && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
2742 8252 : && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
2743 : {
2744 8252 : int p = var_to_partition (m_map, lhs);
2745 8252 : gcc_assert (m_vars[p] != NULL_TREE);
2746 8252 : m_lhs = lhs = m_vars[p];
2747 : }
2748 23128 : unsigned cnt, rem = 0, end = 0, prec = TYPE_PRECISION (type);
2749 23128 : bool sext = false;
2750 23128 : tree ext = NULL_TREE, store_operand = NULL_TREE;
2751 23128 : bool eh = false;
2752 23128 : basic_block eh_pad = NULL;
2753 23128 : tree nlhs = NULL_TREE;
2754 23128 : unsigned HOST_WIDE_INT bo_idx = 0;
2755 23128 : unsigned HOST_WIDE_INT bo_bit = 0;
2756 23128 : unsigned bo_shift = 0;
2757 23128 : unsigned bo_last = 0;
2758 23128 : bool bo_be_p = false;
2759 23128 : tree bf_cur = NULL_TREE, bf_next = NULL_TREE;
2760 23128 : if (gimple_store_p (stmt))
2761 : {
2762 8379 : store_operand = gimple_assign_rhs1 (stmt);
2763 8379 : eh = stmt_ends_bb_p (stmt);
2764 8379 : if (eh)
2765 : {
2766 2 : edge e;
2767 2 : edge_iterator ei;
2768 2 : basic_block bb = gimple_bb (stmt);
2769 :
2770 2 : FOR_EACH_EDGE (e, ei, bb->succs)
2771 2 : if (e->flags & EDGE_EH)
2772 : {
2773 2 : eh_pad = e->dest;
2774 2 : break;
2775 : }
2776 : }
2777 8379 : if (TREE_CODE (lhs) == COMPONENT_REF
2778 8379 : && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
2779 : {
2780 158 : tree fld = TREE_OPERAND (lhs, 1);
2781 158 : gcc_assert (tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld)));
2782 158 : tree repr = DECL_BIT_FIELD_REPRESENTATIVE (fld);
2783 158 : poly_int64 bitoffset;
2784 158 : poly_uint64 field_offset, repr_offset;
2785 158 : if (!bitint_big_endian
2786 158 : && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
2787 158 : % BITS_PER_UNIT) == 0)
2788 : nlhs = lhs;
2789 : else
2790 : {
2791 139 : bool var_field_off = false;
2792 139 : if (poly_int_tree_p (DECL_FIELD_OFFSET (fld), &field_offset)
2793 278 : && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
2794 139 : bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
2795 : else
2796 : {
2797 : bitoffset = 0;
2798 : var_field_off = true;
2799 : }
2800 139 : bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
2801 139 : - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
2802 278 : nlhs = build3 (COMPONENT_REF, TREE_TYPE (repr),
2803 139 : TREE_OPERAND (lhs, 0), repr,
2804 : var_field_off
2805 0 : ? TREE_OPERAND (lhs, 2) : NULL_TREE);
2806 139 : HOST_WIDE_INT bo = bitoffset.to_constant ();
2807 139 : bo_idx = (unsigned HOST_WIDE_INT) bo / limb_prec;
2808 139 : bo_bit = (unsigned HOST_WIDE_INT) bo % limb_prec;
2809 139 : bo_shift = bo_bit;
2810 139 : if (bitint_big_endian)
2811 : {
2812 0 : bo_last = CEIL (prec + bo_bit, limb_prec) - 1;
2813 0 : bo_shift = (prec + bo_bit) % limb_prec;
2814 0 : bo_be_p = true;
2815 0 : if (bo_shift)
2816 0 : bo_shift = limb_prec - bo_shift;
2817 : }
2818 : }
2819 : }
2820 : }
2821 8379 : if ((store_operand
2822 8379 : && TREE_CODE (store_operand) == SSA_NAME
2823 7413 : && (m_names == NULL
2824 7395 : || !bitmap_bit_p (m_names, SSA_NAME_VERSION (store_operand)))
2825 1125 : && gimple_assign_cast_p (SSA_NAME_DEF_STMT (store_operand)))
2826 30961 : || gimple_assign_cast_p (stmt))
2827 : {
2828 2133 : rhs1 = gimple_assign_rhs1 (store_operand
2829 546 : ? SSA_NAME_DEF_STMT (store_operand)
2830 : : stmt);
2831 1587 : if (TREE_CODE (rhs1) == VIEW_CONVERT_EXPR)
2832 2 : rhs1 = TREE_OPERAND (rhs1, 0);
2833 : /* Optimize mergeable ops ending with widening cast to _BitInt
2834 : (or followed by store). We can lower just the limbs of the
2835 : cast operand and widen afterwards. */
2836 1587 : if (TREE_CODE (rhs1) == SSA_NAME
2837 1587 : && (m_names == NULL
2838 1580 : || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1)))
2839 608 : && TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE
2840 470 : && bitint_precision_kind (TREE_TYPE (rhs1)) >= bitint_prec_large
2841 2011 : && (CEIL ((unsigned) TYPE_PRECISION (TREE_TYPE (rhs1)),
2842 424 : limb_prec) < CEIL (prec, limb_prec)
2843 308 : || (kind == bitint_prec_huge
2844 262 : && TYPE_PRECISION (TREE_TYPE (rhs1)) < prec)))
2845 : {
2846 122 : store_operand = rhs1;
2847 122 : prec = TYPE_PRECISION (TREE_TYPE (rhs1));
2848 122 : kind = bitint_precision_kind (TREE_TYPE (rhs1));
2849 122 : if (!TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2850 46 : sext = true;
2851 : }
2852 : }
2853 23128 : tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
2854 23128 : if (kind == bitint_prec_large)
2855 12158 : cnt = CEIL (prec, limb_prec);
2856 : else
2857 : {
2858 10970 : rem = (prec % (2 * limb_prec));
2859 10970 : end = (prec - rem) / limb_prec;
2860 10970 : cnt = 2 + CEIL (rem, limb_prec);
2861 10970 : idx = idx_first = create_loop (bitint_big_endian
2862 10970 : ? size_int (cnt - 2 + end - 1)
2863 : : size_zero_node, &idx_next);
2864 : }
2865 :
2866 23128 : basic_block edge_bb = NULL;
2867 23128 : if (eq_p)
2868 : {
2869 6497 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2870 6497 : gsi_prev (&gsi);
2871 6497 : edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
2872 6497 : edge_bb = e->src;
2873 6497 : if (kind == bitint_prec_large)
2874 7156 : m_gsi = gsi_end_bb (edge_bb);
2875 : }
2876 : else
2877 16631 : m_after_stmt = stmt;
2878 23128 : if (kind != bitint_prec_large)
2879 10970 : m_upwards_2limb = end;
2880 23128 : m_upwards = true;
2881 :
2882 23128 : bool separate_ext
2883 23128 : = (prec != (unsigned) TYPE_PRECISION (type)
2884 23128 : && (CEIL ((unsigned) TYPE_PRECISION (type), limb_prec)
2885 122 : > CEIL (prec, limb_prec)));
2886 23128 : bool zero_ms_limb = false;
2887 23128 : if (bitint_extended == bitint_ext_full
2888 0 : && !eq_p
2889 0 : && !nlhs
2890 0 : && abi_limb_prec > limb_prec
2891 23128 : && ((CEIL ((unsigned) TYPE_PRECISION (type), abi_limb_prec)
2892 0 : * abi_limb_prec / limb_prec)
2893 0 : > CEIL ((unsigned) TYPE_PRECISION (type), limb_prec)))
2894 : {
2895 0 : if (prec == (unsigned) TYPE_PRECISION (type))
2896 : {
2897 0 : sext = !TYPE_UNSIGNED (type);
2898 0 : separate_ext = true;
2899 : }
2900 0 : else if (TYPE_UNSIGNED (type) && sext)
2901 : zero_ms_limb = true;
2902 : else
2903 : separate_ext = true;
2904 : }
2905 23244 : unsigned dst_idx_off = 0;
2906 23128 : if (separate_ext && bitint_big_endian)
2907 0 : dst_idx_off = (CEIL ((unsigned) TYPE_PRECISION (type), limb_prec)
2908 0 : - CEIL (prec, limb_prec));
2909 :
2910 92724 : for (unsigned i = 0; i < cnt; i++)
2911 : {
2912 69596 : m_data_cnt = 0;
2913 69596 : if (kind == bitint_prec_large)
2914 37085 : idx = size_int (bitint_big_endian ? cnt - 1 - i : i);
2915 32511 : else if (i >= 2)
2916 10571 : idx = size_int (bitint_big_endian ? cnt - 1 - i : end + (i > 2));
2917 69596 : if (eq_p)
2918 : {
2919 19682 : rhs1 = handle_operand (cmp_op1, idx);
2920 19682 : tree rhs2 = handle_operand (cmp_op2, idx);
2921 19682 : g = gimple_build_cond (NE_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
2922 19682 : insert_before (g);
2923 19682 : edge e1 = split_block (gsi_bb (m_gsi), g);
2924 19682 : e1->flags = EDGE_FALSE_VALUE;
2925 19682 : edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
2926 19682 : e1->probability = profile_probability::unlikely ();
2927 19682 : e2->probability = e1->probability.invert ();
2928 19682 : if (i == 0)
2929 6497 : set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
2930 19682 : m_gsi = gsi_after_labels (e1->dest);
2931 : }
2932 : else
2933 : {
2934 49914 : if (store_operand)
2935 25339 : rhs1 = handle_operand (store_operand, idx);
2936 : else
2937 24575 : rhs1 = handle_stmt (stmt, idx);
2938 49914 : if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
2939 11883 : rhs1 = add_cast (m_limb_type, rhs1);
2940 49914 : if (sext && i == cnt - 1)
2941 49914 : ext = rhs1;
2942 49914 : tree nidx = idx;
2943 49914 : HOST_WIDE_INT adj = bo_idx;
2944 49914 : if (bo_be_p)
2945 0 : adj += bo_last - (CEIL (prec, limb_prec) - 1);
2946 : else
2947 49914 : adj += dst_idx_off;
2948 49914 : if (adj)
2949 : {
2950 208 : if (tree_fits_uhwi_p (idx))
2951 108 : nidx = size_int (tree_to_uhwi (idx) + adj);
2952 : else
2953 : {
2954 100 : nidx = make_ssa_name (sizetype);
2955 100 : g = gimple_build_assign (nidx, PLUS_EXPR, idx,
2956 100 : size_int (adj));
2957 100 : insert_before (g);
2958 : }
2959 : }
2960 49914 : bool done = false;
2961 49914 : basic_block new_bb = NULL;
2962 : /* Handle stores into bit-fields. */
2963 49914 : if (bo_shift)
2964 : {
2965 443 : if (i == 0)
2966 : {
2967 139 : edge e2 = NULL;
2968 139 : if (kind != bitint_prec_large)
2969 : {
2970 110 : prepare_data_in_out (build_zero_cst (m_limb_type),
2971 : idx, &bf_next);
2972 110 : bf_next = m_data.pop ();
2973 110 : bf_cur = m_data.pop ();
2974 110 : g = gimple_build_cond (EQ_EXPR, idx,
2975 : bitint_big_endian
2976 0 : ? size_int (CEIL (prec,
2977 : limb_prec) - 1)
2978 : : size_zero_node,
2979 : NULL_TREE, NULL_TREE);
2980 110 : edge edge_true;
2981 110 : if_then_else (g, profile_probability::unlikely (),
2982 : edge_true, e2);
2983 110 : new_bb = e2->dest;
2984 : }
2985 139 : tree ftype
2986 139 : = build_nonstandard_integer_type (limb_prec - bo_shift, 1);
2987 278 : tree bfr = build_bit_field_ref (ftype, unshare_expr (nlhs),
2988 139 : limb_prec - bo_shift,
2989 : bitint_big_endian
2990 0 : ? (bo_idx + bo_last)
2991 0 : * limb_prec
2992 139 : : bo_idx * limb_prec
2993 : + bo_bit);
2994 139 : tree t = add_cast (ftype, rhs1);
2995 139 : g = gimple_build_assign (bfr, t);
2996 139 : insert_before (g);
2997 139 : if (eh)
2998 : {
2999 0 : maybe_duplicate_eh_stmt (g, stmt);
3000 0 : if (eh_pad)
3001 : {
3002 0 : edge e = split_block (gsi_bb (m_gsi), g);
3003 0 : m_gsi = gsi_after_labels (e->dest);
3004 0 : add_eh_edge (e->src,
3005 : find_edge (gimple_bb (stmt), eh_pad));
3006 : }
3007 : }
3008 139 : if (kind == bitint_prec_large)
3009 : {
3010 : bf_cur = rhs1;
3011 : done = true;
3012 : }
3013 110 : else if (e2)
3014 110 : m_gsi = gsi_after_labels (e2->src);
3015 : }
3016 110 : if (!done)
3017 : {
3018 414 : tree t1 = make_ssa_name (m_limb_type);
3019 414 : tree t2 = make_ssa_name (m_limb_type);
3020 414 : tree t3 = make_ssa_name (m_limb_type);
3021 414 : g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
3022 : build_int_cst (unsigned_type_node,
3023 414 : limb_prec
3024 414 : - bo_shift));
3025 414 : insert_before (g);
3026 414 : g = gimple_build_assign (t2, LSHIFT_EXPR, rhs1,
3027 : build_int_cst (unsigned_type_node,
3028 414 : bo_shift));
3029 414 : insert_before (g);
3030 414 : bf_cur = rhs1;
3031 414 : g = gimple_build_assign (t3, BIT_IOR_EXPR, t1, t2);
3032 414 : insert_before (g);
3033 414 : rhs1 = t3;
3034 414 : if (bf_next && i == 1)
3035 : {
3036 110 : g = gimple_build_assign (bf_next, bf_cur);
3037 110 : insert_before (g);
3038 : }
3039 : }
3040 : }
3041 29 : if (!done)
3042 : {
3043 : /* Handle bit-field access to partial last limb if needed. */
3044 49885 : if (nlhs
3045 470 : && i == cnt - 1
3046 158 : && !separate_ext
3047 134 : && tree_fits_uhwi_p (idx))
3048 : {
3049 119 : unsigned int tprec = TYPE_PRECISION (type);
3050 119 : unsigned int rprec = (tprec - 1) % limb_prec + 1;
3051 119 : if (rprec + bo_shift < (unsigned) limb_prec)
3052 : {
3053 42 : tree ftype
3054 42 : = build_nonstandard_integer_type (rprec + bo_shift, 1);
3055 42 : tree bfr
3056 84 : = build_bit_field_ref (ftype, unshare_expr (nlhs),
3057 : rprec + bo_shift,
3058 : bitint_big_endian
3059 0 : ? bo_idx * limb_prec + bo_bit
3060 42 : : (bo_idx + tprec / limb_prec)
3061 42 : * limb_prec);
3062 42 : tree t = add_cast (ftype, rhs1);
3063 42 : g = gimple_build_assign (bfr, t);
3064 42 : done = true;
3065 42 : bf_cur = NULL_TREE;
3066 : }
3067 77 : else if (rprec + bo_shift == (unsigned) limb_prec)
3068 49843 : bf_cur = NULL_TREE;
3069 : }
3070 : /* Otherwise, stores to any other lhs. */
3071 42 : if (!done)
3072 : {
3073 99258 : tree l = limb_access (nlhs ? NULL_TREE : lhs_type,
3074 : nlhs ? nlhs : lhs, nidx, true);
3075 49843 : g = gimple_build_assign (l, rhs1);
3076 : }
3077 49885 : insert_before (g);
3078 49885 : if (eh)
3079 : {
3080 6 : maybe_duplicate_eh_stmt (g, stmt);
3081 6 : if (eh_pad)
3082 : {
3083 6 : edge e = split_block (gsi_bb (m_gsi), g);
3084 6 : m_gsi = gsi_after_labels (e->dest);
3085 6 : add_eh_edge (e->src,
3086 : find_edge (gimple_bb (stmt), eh_pad));
3087 : }
3088 : }
3089 49885 : if (new_bb)
3090 110 : m_gsi = gsi_after_labels (new_bb);
3091 : }
3092 : }
3093 69596 : m_first = false;
3094 69596 : if (kind == bitint_prec_huge && i <= 1)
3095 : {
3096 21940 : if (i == 0)
3097 : {
3098 10970 : idx = make_ssa_name (sizetype);
3099 10970 : g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
3100 : bitint_big_endian
3101 0 : ? size_int (-1) : size_one_node);
3102 10970 : insert_before (g);
3103 : }
3104 : else
3105 : {
3106 10970 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
3107 21940 : size_int (bitint_big_endian ? -2 : 2));
3108 10970 : insert_before (g);
3109 10970 : if (bitint_big_endian)
3110 0 : g = gimple_build_cond (NE_EXPR, idx_first, size_int (cnt - 1),
3111 : NULL_TREE, NULL_TREE);
3112 : else
3113 10970 : g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
3114 : NULL_TREE, NULL_TREE);
3115 10970 : insert_before (g);
3116 10970 : if (eq_p)
3117 2919 : m_gsi = gsi_after_labels (edge_bb);
3118 : else
3119 8051 : m_gsi = gsi_for_stmt (stmt);
3120 10970 : m_bb = NULL;
3121 : }
3122 : }
3123 : }
3124 :
3125 23128 : if (separate_ext)
3126 : {
3127 116 : if (sext)
3128 : {
3129 43 : ext = add_cast (signed_type_for (m_limb_type), ext);
3130 86 : tree lpm1 = build_int_cst (unsigned_type_node,
3131 43 : limb_prec - 1);
3132 43 : tree n = make_ssa_name (TREE_TYPE (ext));
3133 43 : g = gimple_build_assign (n, RSHIFT_EXPR, ext, lpm1);
3134 43 : insert_before (g);
3135 43 : ext = add_cast (m_limb_type, n);
3136 : }
3137 : else
3138 73 : ext = build_zero_cst (m_limb_type);
3139 116 : kind = bitint_precision_kind (type);
3140 116 : unsigned start = CEIL (prec, limb_prec);
3141 116 : prec = TYPE_PRECISION (type);
3142 116 : if (bitint_extended == bitint_ext_full
3143 0 : && !nlhs
3144 0 : && !zero_ms_limb
3145 0 : && abi_limb_prec > limb_prec)
3146 : {
3147 0 : prec = CEIL (prec, abi_limb_prec) * abi_limb_prec;
3148 0 : kind = bitint_precision_kind (prec);
3149 : }
3150 116 : unsigned total = CEIL (prec, limb_prec);
3151 116 : idx = idx_first = idx_next = NULL_TREE;
3152 116 : if (prec <= (start + 2 + (bo_shift != 0)) * limb_prec)
3153 : kind = bitint_prec_large;
3154 78 : if (kind == bitint_prec_large)
3155 38 : cnt = total - start;
3156 : else
3157 : {
3158 78 : rem = prec % limb_prec;
3159 78 : end = (prec - rem) / limb_prec;
3160 148 : cnt = (bo_shift != 0) + 1 + (rem != 0);
3161 : }
3162 116 : if (bitint_big_endian && bo_shift != 0 && (prec % limb_prec) == 0)
3163 0 : ++total;
3164 296 : for (unsigned i = 0; i < cnt; i++)
3165 : {
3166 180 : if (kind == bitint_prec_large || (i == 0 && bo_shift != 0))
3167 48 : idx = size_int (bo_idx
3168 : + (bitint_big_endian
3169 : ? total - 1 - start - i : start + i));
3170 132 : else if (i == cnt - 1 && rem != 0)
3171 108 : idx = size_int (bo_idx + (bitint_big_endian ? 0 : end));
3172 78 : else if (i == (bo_shift != 0))
3173 78 : idx = create_loop (size_int (bo_idx
3174 : + (bitint_big_endian
3175 : ? total - 1 - start - i
3176 : : start + i)), &idx_next);
3177 180 : rhs1 = ext;
3178 180 : if (bf_cur != NULL_TREE && bf_cur != ext)
3179 : {
3180 20 : tree t1 = make_ssa_name (m_limb_type);
3181 20 : g = gimple_build_assign (t1, RSHIFT_EXPR, bf_cur,
3182 : build_int_cst (unsigned_type_node,
3183 20 : limb_prec - bo_shift));
3184 20 : insert_before (g);
3185 20 : if (integer_zerop (ext))
3186 : rhs1 = t1;
3187 : else
3188 : {
3189 10 : tree t2 = make_ssa_name (m_limb_type);
3190 10 : rhs1 = make_ssa_name (m_limb_type);
3191 10 : g = gimple_build_assign (t2, LSHIFT_EXPR, ext,
3192 : build_int_cst (unsigned_type_node,
3193 10 : bo_shift));
3194 10 : insert_before (g);
3195 10 : g = gimple_build_assign (rhs1, BIT_IOR_EXPR, t1, t2);
3196 10 : insert_before (g);
3197 : }
3198 : bf_cur = ext;
3199 : }
3200 180 : bool done = false;
3201 : /* Handle bit-field access to partial last limb if needed. */
3202 180 : if (nlhs && i == cnt - 1)
3203 : {
3204 24 : unsigned int tprec = TYPE_PRECISION (type);
3205 24 : unsigned int rprec = (tprec - 1) % limb_prec + 1;
3206 24 : if (rprec + bo_shift < (unsigned) limb_prec)
3207 : {
3208 12 : tree ftype
3209 12 : = build_nonstandard_integer_type (rprec + bo_shift, 1);
3210 12 : tree bfr
3211 24 : = build_bit_field_ref (ftype, unshare_expr (nlhs),
3212 : rprec + bo_shift,
3213 : bitint_big_endian
3214 0 : ? bo_idx * limb_prec + bo_bit
3215 12 : : (bo_idx + tprec / limb_prec)
3216 12 : * limb_prec);
3217 12 : tree t = add_cast (ftype, rhs1);
3218 12 : g = gimple_build_assign (bfr, t);
3219 12 : done = true;
3220 12 : bf_cur = NULL_TREE;
3221 : }
3222 12 : else if (rprec + bo_shift == (unsigned) limb_prec)
3223 : bf_cur = NULL_TREE;
3224 : }
3225 : /* Otherwise, stores to any other lhs. */
3226 12 : if (!done)
3227 : {
3228 306 : tree l = limb_access (nlhs ? NULL_TREE : lhs_type,
3229 : nlhs ? nlhs : lhs, idx, true);
3230 :
3231 168 : if (bitint_extended
3232 0 : && sext
3233 0 : && TYPE_UNSIGNED (lhs_type)
3234 0 : && tree_fits_uhwi_p (idx)
3235 168 : && !nlhs)
3236 : {
3237 0 : rhs1 = add_cast (limb_access_type (lhs_type, idx), rhs1);
3238 0 : rhs1 = add_cast (TREE_TYPE (l), rhs1);
3239 : }
3240 :
3241 168 : g = gimple_build_assign (l, rhs1);
3242 : }
3243 180 : insert_before (g);
3244 180 : if (eh)
3245 : {
3246 0 : maybe_duplicate_eh_stmt (g, stmt);
3247 0 : if (eh_pad)
3248 : {
3249 0 : edge e = split_block (gsi_bb (m_gsi), g);
3250 0 : m_gsi = gsi_after_labels (e->dest);
3251 0 : add_eh_edge (e->src, find_edge (gimple_bb (stmt), eh_pad));
3252 : }
3253 : }
3254 180 : if (kind == bitint_prec_huge && i == (bo_shift != 0))
3255 : {
3256 78 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
3257 : bitint_big_endian
3258 0 : ? size_int (-1) : size_one_node);
3259 78 : insert_before (g);
3260 78 : if (bitint_big_endian && rem != 0)
3261 0 : g = gimple_build_cond (NE_EXPR, idx,
3262 0 : size_int (bo_idx + 1),
3263 : NULL_TREE, NULL_TREE);
3264 : else
3265 78 : g = gimple_build_cond (NE_EXPR, idx_next,
3266 156 : size_int (bo_idx
3267 : + (bitint_big_endian
3268 : ? 0 : end)),
3269 : NULL_TREE, NULL_TREE);
3270 78 : insert_before (g);
3271 78 : m_gsi = gsi_for_stmt (stmt);
3272 78 : m_bb = NULL;
3273 : }
3274 : }
3275 : }
3276 23128 : if (bf_cur != NULL_TREE)
3277 : {
3278 72 : unsigned int tprec = TYPE_PRECISION (type);
3279 72 : unsigned int rprec = (tprec + bo_shift) % limb_prec;
3280 72 : tree ftype = build_nonstandard_integer_type (rprec, 1);
3281 144 : tree bfr = build_bit_field_ref (ftype, unshare_expr (nlhs),
3282 : rprec,
3283 : bitint_big_endian
3284 0 : ? bo_idx * limb_prec + bo_bit
3285 72 : : (bo_idx + (tprec + bo_bit) / limb_prec)
3286 : * limb_prec);
3287 72 : rhs1 = bf_cur;
3288 72 : if (bf_cur != ext)
3289 : {
3290 64 : rhs1 = make_ssa_name (TREE_TYPE (rhs1));
3291 64 : g = gimple_build_assign (rhs1, RSHIFT_EXPR, bf_cur,
3292 : build_int_cst (unsigned_type_node,
3293 64 : limb_prec - bo_shift));
3294 64 : insert_before (g);
3295 : }
3296 72 : rhs1 = add_cast (ftype, rhs1);
3297 72 : g = gimple_build_assign (bfr, rhs1);
3298 72 : insert_before (g);
3299 72 : if (eh)
3300 : {
3301 0 : maybe_duplicate_eh_stmt (g, stmt);
3302 0 : if (eh_pad)
3303 : {
3304 0 : edge e = split_block (gsi_bb (m_gsi), g);
3305 0 : m_gsi = gsi_after_labels (e->dest);
3306 0 : add_eh_edge (e->src, find_edge (gimple_bb (stmt), eh_pad));
3307 : }
3308 : }
3309 : }
3310 23128 : if (zero_ms_limb)
3311 : {
3312 0 : tree p2 = build_int_cst (sizetype,
3313 0 : CEIL ((unsigned) TYPE_PRECISION (type),
3314 : abi_limb_prec)
3315 0 : * abi_limb_prec / limb_prec - 1);
3316 0 : tree l = limb_access (lhs_type, lhs, p2, true);
3317 0 : g = gimple_build_assign (l, build_zero_cst (m_limb_type));
3318 0 : insert_before (g);
3319 0 : if (eh)
3320 : {
3321 0 : maybe_duplicate_eh_stmt (g, stmt);
3322 0 : if (eh_pad)
3323 : {
3324 0 : edge e = split_block (gsi_bb (m_gsi), g);
3325 0 : m_gsi = gsi_after_labels (e->dest);
3326 0 : add_eh_edge (e->src, find_edge (gimple_bb (stmt), eh_pad));
3327 : }
3328 : }
3329 : }
3330 :
3331 23128 : if (gimple_store_p (stmt))
3332 : {
3333 8379 : unlink_stmt_vdef (stmt);
3334 16758 : release_ssa_name (gimple_vdef (stmt));
3335 8379 : gsi_remove (&m_gsi, true);
3336 : }
3337 23128 : if (eq_p)
3338 : {
3339 6497 : lhs = make_ssa_name (boolean_type_node);
3340 6497 : basic_block bb = gimple_bb (stmt);
3341 6497 : gphi *phi = create_phi_node (lhs, bb);
3342 6497 : edge e = find_edge (gsi_bb (m_gsi), bb);
3343 6497 : unsigned int n = EDGE_COUNT (bb->preds);
3344 32676 : for (unsigned int i = 0; i < n; i++)
3345 : {
3346 26179 : edge e2 = EDGE_PRED (bb, i);
3347 26179 : add_phi_arg (phi, e == e2 ? boolean_true_node : boolean_false_node,
3348 : e2, UNKNOWN_LOCATION);
3349 : }
3350 6497 : cmp_code = cmp_code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3351 6497 : return lhs;
3352 : }
3353 : else
3354 : return NULL_TREE;
3355 : }
3356 :
3357 : /* Handle a large/huge _BitInt comparison statement STMT other than
3358 : EQ_EXPR/NE_EXPR. CMP_CODE, CMP_OP1 and CMP_OP2 meaning is like in
3359 : lower_mergeable_stmt. The {GT,GE,LT,LE}_EXPR comparisons are
3360 : lowered by iteration from the most significant limb downwards to
3361 : the least significant one, for large _BitInt in straight line code,
3362 : otherwise with most significant limb handled in
3363 : straight line code followed by a loop handling one limb at a time.
3364 : Comparisons with unsigned huge _BitInt with precisions which are
3365 : multiples of limb precision can use just the loop and don't need to
3366 : handle most significant limb before the loop. The loop or straight
3367 : line code jumps to final basic block if a particular pair of limbs
3368 : is not equal. */
3369 :
3370 : tree
3371 722 : bitint_large_huge::lower_comparison_stmt (gimple *stmt, tree_code &cmp_code,
3372 : tree cmp_op1, tree cmp_op2)
3373 : {
3374 722 : tree type = TREE_TYPE (cmp_op1);
3375 722 : gcc_assert (TREE_CODE (type) == BITINT_TYPE);
3376 722 : bitint_prec_kind kind = bitint_precision_kind (type);
3377 722 : gcc_assert (kind >= bitint_prec_large);
3378 722 : gimple *g;
3379 722 : if (!TYPE_UNSIGNED (type)
3380 441 : && integer_zerop (cmp_op2)
3381 750 : && (cmp_code == GE_EXPR || cmp_code == LT_EXPR))
3382 : {
3383 28 : unsigned end = CEIL ((unsigned) TYPE_PRECISION (type), limb_prec) - 1;
3384 56 : tree idx = size_int (bitint_big_endian ? 0 : end);
3385 28 : m_data_cnt = 0;
3386 28 : tree rhs1 = handle_operand (cmp_op1, idx);
3387 28 : if (TYPE_UNSIGNED (TREE_TYPE (rhs1)))
3388 : {
3389 24 : tree stype = signed_type_for (TREE_TYPE (rhs1));
3390 24 : rhs1 = add_cast (stype, rhs1);
3391 : }
3392 28 : tree lhs = make_ssa_name (boolean_type_node);
3393 28 : g = gimple_build_assign (lhs, cmp_code, rhs1,
3394 28 : build_zero_cst (TREE_TYPE (rhs1)));
3395 28 : insert_before (g);
3396 28 : cmp_code = NE_EXPR;
3397 28 : return lhs;
3398 : }
3399 :
3400 694 : unsigned cnt, rem = 0, end = 0;
3401 694 : tree idx = NULL_TREE, idx_next = NULL_TREE;
3402 694 : if (kind == bitint_prec_large)
3403 377 : cnt = CEIL ((unsigned) TYPE_PRECISION (type), limb_prec);
3404 : else
3405 : {
3406 317 : rem = ((unsigned) TYPE_PRECISION (type) % limb_prec);
3407 317 : if (rem == 0 && !TYPE_UNSIGNED (type))
3408 : rem = limb_prec;
3409 317 : end = ((unsigned) TYPE_PRECISION (type) - rem) / limb_prec;
3410 317 : cnt = 1 + (rem != 0);
3411 : }
3412 :
3413 694 : basic_block edge_bb = NULL;
3414 694 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3415 694 : gsi_prev (&gsi);
3416 694 : edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
3417 694 : edge_bb = e->src;
3418 694 : m_gsi = gsi_end_bb (edge_bb);
3419 :
3420 694 : edge *edges = XALLOCAVEC (edge, cnt * 2);
3421 2519 : for (unsigned i = 0; i < cnt; i++)
3422 : {
3423 1825 : m_data_cnt = 0;
3424 1825 : if (kind == bitint_prec_large)
3425 1246 : idx = size_int (bitint_big_endian ? i : cnt - i - 1);
3426 579 : else if (i == cnt - 1)
3427 317 : idx = create_loop (size_int (bitint_big_endian ? cnt - 1 : end - 1),
3428 : &idx_next);
3429 : else
3430 524 : idx = size_int (bitint_big_endian ? 0 : end);
3431 1825 : tree rhs1 = handle_operand (cmp_op1, idx);
3432 1825 : tree rhs2 = handle_operand (cmp_op2, idx);
3433 1825 : if (i == 0
3434 694 : && !TYPE_UNSIGNED (type)
3435 2238 : && TYPE_UNSIGNED (TREE_TYPE (rhs1)))
3436 : {
3437 112 : tree stype = signed_type_for (TREE_TYPE (rhs1));
3438 112 : rhs1 = add_cast (stype, rhs1);
3439 112 : rhs2 = add_cast (stype, rhs2);
3440 : }
3441 1825 : g = gimple_build_cond (GT_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
3442 1825 : insert_before (g);
3443 1825 : edge e1 = split_block (gsi_bb (m_gsi), g);
3444 1825 : e1->flags = EDGE_FALSE_VALUE;
3445 1825 : edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
3446 1825 : e1->probability = profile_probability::likely ();
3447 1825 : e2->probability = e1->probability.invert ();
3448 1825 : if (i == 0)
3449 694 : set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
3450 1825 : m_gsi = gsi_after_labels (e1->dest);
3451 1825 : edges[2 * i] = e2;
3452 1825 : g = gimple_build_cond (LT_EXPR, rhs1, rhs2, NULL_TREE, NULL_TREE);
3453 1825 : insert_before (g);
3454 1825 : e1 = split_block (gsi_bb (m_gsi), g);
3455 1825 : e1->flags = EDGE_FALSE_VALUE;
3456 1825 : e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
3457 1825 : e1->probability = profile_probability::unlikely ();
3458 1825 : e2->probability = e1->probability.invert ();
3459 1825 : m_gsi = gsi_after_labels (e1->dest);
3460 1825 : edges[2 * i + 1] = e2;
3461 1825 : m_first = false;
3462 1825 : if (kind == bitint_prec_huge && i == cnt - 1)
3463 : {
3464 634 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
3465 : bitint_big_endian ? size_one_node
3466 317 : : size_int (-1));
3467 317 : insert_before (g);
3468 317 : g = gimple_build_cond (NE_EXPR, idx,
3469 : bitint_big_endian
3470 0 : ? size_int (end - 1 + (cnt != 1))
3471 : : size_zero_node,
3472 : NULL_TREE, NULL_TREE);
3473 317 : insert_before (g);
3474 317 : edge true_edge, false_edge;
3475 317 : extract_true_false_edges_from_block (gsi_bb (m_gsi),
3476 : &true_edge, &false_edge);
3477 317 : m_gsi = gsi_after_labels (false_edge->dest);
3478 317 : m_bb = NULL;
3479 : }
3480 : }
3481 :
3482 694 : tree lhs = make_ssa_name (boolean_type_node);
3483 694 : basic_block bb = gimple_bb (stmt);
3484 694 : gphi *phi = create_phi_node (lhs, bb);
3485 4344 : for (unsigned int i = 0; i < cnt * 2; i++)
3486 : {
3487 3650 : tree val = ((cmp_code == GT_EXPR || cmp_code == GE_EXPR)
3488 3650 : ^ (i & 1)) ? boolean_true_node : boolean_false_node;
3489 3650 : add_phi_arg (phi, val, edges[i], UNKNOWN_LOCATION);
3490 : }
3491 694 : add_phi_arg (phi, (cmp_code == GE_EXPR || cmp_code == LE_EXPR)
3492 : ? boolean_true_node : boolean_false_node,
3493 : find_edge (gsi_bb (m_gsi), bb), UNKNOWN_LOCATION);
3494 694 : cmp_code = NE_EXPR;
3495 694 : return lhs;
3496 : }
3497 :
3498 : /* Lower large/huge _BitInt left and right shift except for left
3499 : shift by < limb_prec constant. */
3500 :
3501 : void
3502 575 : bitint_large_huge::lower_shift_stmt (tree obj, gimple *stmt)
3503 : {
3504 575 : tree rhs1 = gimple_assign_rhs1 (stmt);
3505 575 : tree lhs = gimple_assign_lhs (stmt);
3506 575 : tree_code rhs_code = gimple_assign_rhs_code (stmt);
3507 575 : tree type = TREE_TYPE (rhs1);
3508 575 : gimple *final_stmt = gsi_stmt (m_gsi);
3509 575 : gcc_assert (TREE_CODE (type) == BITINT_TYPE
3510 : && bitint_precision_kind (type) >= bitint_prec_large);
3511 575 : int prec = TYPE_PRECISION (type);
3512 575 : tree n = gimple_assign_rhs2 (stmt), n1, n2, n3, n4;
3513 575 : gimple *g;
3514 575 : if (obj == NULL_TREE)
3515 : {
3516 457 : int part = var_to_partition (m_map, lhs);
3517 457 : gcc_assert (m_vars[part] != NULL_TREE);
3518 : obj = m_vars[part];
3519 : }
3520 : /* Preparation code common for both left and right shifts.
3521 : unsigned n1 = n % limb_prec;
3522 : size_t n2 = n / limb_prec;
3523 : size_t n3 = n1 != 0;
3524 : unsigned n4 = (limb_prec - n1) % limb_prec;
3525 : (for power of 2 limb_prec n4 can be -n1 & limb_prec). */
3526 575 : if (TREE_CODE (n) == INTEGER_CST)
3527 : {
3528 242 : tree lp = build_int_cst (TREE_TYPE (n), limb_prec);
3529 242 : n1 = int_const_binop (TRUNC_MOD_EXPR, n, lp);
3530 242 : n2 = fold_convert (sizetype, int_const_binop (TRUNC_DIV_EXPR, n, lp));
3531 242 : n3 = size_int (!integer_zerop (n1));
3532 242 : n4 = int_const_binop (TRUNC_MOD_EXPR,
3533 242 : int_const_binop (MINUS_EXPR, lp, n1), lp);
3534 : }
3535 : else
3536 : {
3537 333 : n1 = make_ssa_name (TREE_TYPE (n));
3538 333 : n2 = make_ssa_name (sizetype);
3539 333 : n3 = make_ssa_name (sizetype);
3540 333 : n4 = make_ssa_name (TREE_TYPE (n));
3541 333 : if (pow2p_hwi (limb_prec))
3542 : {
3543 333 : tree lpm1 = build_int_cst (TREE_TYPE (n), limb_prec - 1);
3544 333 : g = gimple_build_assign (n1, BIT_AND_EXPR, n, lpm1);
3545 333 : insert_before (g);
3546 983 : g = gimple_build_assign (useless_type_conversion_p (sizetype,
3547 333 : TREE_TYPE (n))
3548 317 : ? n2 : make_ssa_name (TREE_TYPE (n)),
3549 : RSHIFT_EXPR, n,
3550 333 : build_int_cst (TREE_TYPE (n),
3551 666 : exact_log2 (limb_prec)));
3552 333 : insert_before (g);
3553 333 : if (gimple_assign_lhs (g) != n2)
3554 : {
3555 317 : g = gimple_build_assign (n2, NOP_EXPR, gimple_assign_lhs (g));
3556 317 : insert_before (g);
3557 : }
3558 333 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (n)),
3559 : NEGATE_EXPR, n1);
3560 333 : insert_before (g);
3561 333 : g = gimple_build_assign (n4, BIT_AND_EXPR, gimple_assign_lhs (g),
3562 : lpm1);
3563 333 : insert_before (g);
3564 : }
3565 : else
3566 : {
3567 0 : tree lp = build_int_cst (TREE_TYPE (n), limb_prec);
3568 0 : g = gimple_build_assign (n1, TRUNC_MOD_EXPR, n, lp);
3569 0 : insert_before (g);
3570 0 : g = gimple_build_assign (useless_type_conversion_p (sizetype,
3571 0 : TREE_TYPE (n))
3572 0 : ? n2 : make_ssa_name (TREE_TYPE (n)),
3573 : TRUNC_DIV_EXPR, n, lp);
3574 0 : insert_before (g);
3575 0 : if (gimple_assign_lhs (g) != n2)
3576 : {
3577 0 : g = gimple_build_assign (n2, NOP_EXPR, gimple_assign_lhs (g));
3578 0 : insert_before (g);
3579 : }
3580 0 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (n)),
3581 : MINUS_EXPR, lp, n1);
3582 0 : insert_before (g);
3583 0 : g = gimple_build_assign (n4, TRUNC_MOD_EXPR, gimple_assign_lhs (g),
3584 : lp);
3585 0 : insert_before (g);
3586 : }
3587 333 : g = gimple_build_assign (make_ssa_name (boolean_type_node), NE_EXPR, n1,
3588 333 : build_zero_cst (TREE_TYPE (n)));
3589 333 : insert_before (g);
3590 333 : g = gimple_build_assign (n3, NOP_EXPR, gimple_assign_lhs (g));
3591 333 : insert_before (g);
3592 : }
3593 1150 : tree p = build_int_cst (sizetype,
3594 575 : prec / limb_prec - (prec % limb_prec == 0));
3595 575 : if (rhs_code == RSHIFT_EXPR)
3596 : {
3597 : /* Lower
3598 : dst = src >> n;
3599 : as
3600 : unsigned n1 = n % limb_prec;
3601 : size_t n2 = n / limb_prec;
3602 : size_t n3 = n1 != 0;
3603 : unsigned n4 = (limb_prec - n1) % limb_prec;
3604 : size_t idx;
3605 : size_t p = prec / limb_prec - (prec % limb_prec == 0);
3606 : int signed_p = (typeof (src) -1) < 0;
3607 : for (idx = n2; idx < ((!signed_p && (prec % limb_prec == 0))
3608 : ? p : p - n3); ++idx)
3609 : dst[idx - n2] = (src[idx] >> n1) | (src[idx + n3] << n4);
3610 : limb_type ext;
3611 : if (prec % limb_prec == 0)
3612 : ext = src[p];
3613 : else if (signed_p)
3614 : ext = ((signed limb_type) (src[p] << (limb_prec
3615 : - (prec % limb_prec))))
3616 : >> (limb_prec - (prec % limb_prec));
3617 : else
3618 : ext = src[p] & (((limb_type) 1 << (prec % limb_prec)) - 1);
3619 : if (!signed_p && (prec % limb_prec == 0))
3620 : ;
3621 : else if (idx < prec / 64)
3622 : {
3623 : dst[idx - n2] = (src[idx] >> n1) | (ext << n4);
3624 : ++idx;
3625 : }
3626 : idx -= n2;
3627 : if (signed_p)
3628 : {
3629 : dst[idx] = ((signed limb_type) ext) >> n1;
3630 : ext = ((signed limb_type) ext) >> (limb_prec - 1);
3631 : }
3632 : else
3633 : {
3634 : dst[idx] = ext >> n1;
3635 : ext = 0;
3636 : }
3637 : for (++idx; idx <= p; ++idx)
3638 : dst[idx] = ext; */
3639 380 : tree pmn3;
3640 380 : if (TYPE_UNSIGNED (type) && prec % limb_prec == 0)
3641 100 : pmn3 = bitint_big_endian ? size_zero_node : p;
3642 280 : else if (bitint_big_endian)
3643 : pmn3 = n3;
3644 280 : else if (TREE_CODE (n3) == INTEGER_CST)
3645 107 : pmn3 = int_const_binop (MINUS_EXPR, p, n3);
3646 : else
3647 : {
3648 173 : pmn3 = make_ssa_name (sizetype);
3649 173 : g = gimple_build_assign (pmn3, MINUS_EXPR, p, n3);
3650 173 : insert_before (g);
3651 : }
3652 380 : tree pmn2 = NULL_TREE;
3653 380 : if (bitint_big_endian)
3654 : {
3655 0 : if (TREE_CODE (n2) == INTEGER_CST)
3656 0 : pmn2 = int_const_binop (MINUS_EXPR, p, n2);
3657 : else
3658 : {
3659 0 : pmn2 = make_ssa_name (sizetype);
3660 0 : g = gimple_build_assign (pmn2, MINUS_EXPR, p, n2);
3661 0 : insert_before (g);
3662 : }
3663 0 : g = gimple_build_cond (GT_EXPR, pmn2, pmn3, NULL_TREE, NULL_TREE);
3664 : }
3665 : else
3666 380 : g = gimple_build_cond (LT_EXPR, n2, pmn3, NULL_TREE, NULL_TREE);
3667 380 : edge edge_true, edge_false;
3668 380 : if_then (g, profile_probability::likely (), edge_true, edge_false);
3669 380 : tree idx_next;
3670 760 : tree idx = create_loop (bitint_big_endian ? pmn2 : n2, &idx_next);
3671 380 : tree idxmn2 = make_ssa_name (sizetype);
3672 380 : tree idxpn3 = make_ssa_name (sizetype);
3673 760 : g = gimple_build_assign (idxmn2,
3674 : bitint_big_endian ? PLUS_EXPR : MINUS_EXPR,
3675 : idx, n2);
3676 380 : insert_before (g);
3677 760 : g = gimple_build_assign (idxpn3,
3678 : bitint_big_endian ? MINUS_EXPR : PLUS_EXPR,
3679 : idx, n3);
3680 380 : insert_before (g);
3681 380 : m_data_cnt = 0;
3682 380 : tree t1 = handle_operand (rhs1, idx);
3683 380 : m_first = false;
3684 380 : g = gimple_build_assign (make_ssa_name (m_limb_type),
3685 : RSHIFT_EXPR, t1, n1);
3686 380 : insert_before (g);
3687 380 : t1 = gimple_assign_lhs (g);
3688 380 : if (!integer_zerop (n3))
3689 : {
3690 292 : m_data_cnt = 0;
3691 292 : tree t2 = handle_operand (rhs1, idxpn3);
3692 292 : g = gimple_build_assign (make_ssa_name (m_limb_type),
3693 : LSHIFT_EXPR, t2, n4);
3694 292 : insert_before (g);
3695 292 : t2 = gimple_assign_lhs (g);
3696 292 : g = gimple_build_assign (make_ssa_name (m_limb_type),
3697 : BIT_IOR_EXPR, t1, t2);
3698 292 : insert_before (g);
3699 292 : t1 = gimple_assign_lhs (g);
3700 : }
3701 380 : tree l = limb_access (TREE_TYPE (lhs), obj, idxmn2, true);
3702 380 : g = gimple_build_assign (l, t1);
3703 380 : insert_before (g);
3704 380 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
3705 0 : bitint_big_endian ? size_int (-1)
3706 : : size_one_node);
3707 380 : insert_before (g);
3708 760 : g = gimple_build_cond (bitint_big_endian ? GT_EXPR : LT_EXPR,
3709 : idx_next, pmn3, NULL_TREE, NULL_TREE);
3710 380 : insert_before (g);
3711 380 : idx = make_ssa_name (sizetype);
3712 380 : m_gsi = gsi_for_stmt (final_stmt);
3713 380 : gphi *phi = create_phi_node (idx, gsi_bb (m_gsi));
3714 380 : edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3715 380 : edge_true = EDGE_PRED (gsi_bb (m_gsi),
3716 : EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3717 760 : add_phi_arg (phi, bitint_big_endian ? pmn2 : n2, edge_false,
3718 : UNKNOWN_LOCATION);
3719 380 : add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3720 380 : m_data_cnt = 0;
3721 380 : tree ms = handle_operand (rhs1, bitint_big_endian ? size_zero_node : p);
3722 380 : tree ext = ms;
3723 380 : if (!types_compatible_p (TREE_TYPE (ms), m_limb_type))
3724 223 : ext = add_cast (m_limb_type, ms);
3725 573 : if (!(TYPE_UNSIGNED (type) && prec % limb_prec == 0)
3726 473 : && !integer_zerop (n3))
3727 : {
3728 242 : if (bitint_big_endian)
3729 0 : g = gimple_build_cond (GT_EXPR, idx, size_zero_node,
3730 : NULL_TREE, NULL_TREE);
3731 : else
3732 242 : g = gimple_build_cond (LT_EXPR, idx, p, NULL_TREE, NULL_TREE);
3733 242 : if_then (g, profile_probability::likely (), edge_true, edge_false);
3734 242 : m_data_cnt = 0;
3735 242 : t1 = handle_operand (rhs1, idx);
3736 242 : g = gimple_build_assign (make_ssa_name (m_limb_type),
3737 : RSHIFT_EXPR, t1, n1);
3738 242 : insert_before (g);
3739 242 : t1 = gimple_assign_lhs (g);
3740 242 : g = gimple_build_assign (make_ssa_name (m_limb_type),
3741 : LSHIFT_EXPR, ext, n4);
3742 242 : insert_before (g);
3743 242 : tree t2 = gimple_assign_lhs (g);
3744 242 : g = gimple_build_assign (make_ssa_name (m_limb_type),
3745 : BIT_IOR_EXPR, t1, t2);
3746 242 : insert_before (g);
3747 242 : t1 = gimple_assign_lhs (g);
3748 242 : idxmn2 = make_ssa_name (sizetype);
3749 484 : g = gimple_build_assign (idxmn2, bitint_big_endian
3750 : ? PLUS_EXPR : MINUS_EXPR, idx, n2);
3751 242 : insert_before (g);
3752 242 : l = limb_access (TREE_TYPE (lhs), obj, idxmn2, true);
3753 242 : g = gimple_build_assign (l, t1);
3754 242 : insert_before (g);
3755 242 : idx_next = make_ssa_name (sizetype);
3756 242 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
3757 : bitint_big_endian
3758 0 : ? size_int (-1) : size_one_node);
3759 242 : insert_before (g);
3760 242 : m_gsi = gsi_for_stmt (final_stmt);
3761 242 : tree nidx = make_ssa_name (sizetype);
3762 242 : phi = create_phi_node (nidx, gsi_bb (m_gsi));
3763 242 : edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3764 242 : edge_true = EDGE_PRED (gsi_bb (m_gsi),
3765 : EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3766 242 : add_phi_arg (phi, idx, edge_false, UNKNOWN_LOCATION);
3767 242 : add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3768 242 : idx = nidx;
3769 : }
3770 760 : g = gimple_build_assign (make_ssa_name (sizetype),
3771 : bitint_big_endian ? PLUS_EXPR : MINUS_EXPR,
3772 : idx, n2);
3773 380 : insert_before (g);
3774 380 : idx = gimple_assign_lhs (g);
3775 380 : tree sext = ext;
3776 380 : if (!TYPE_UNSIGNED (type))
3777 187 : sext = add_cast (signed_type_for (m_limb_type), ext);
3778 380 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (sext)),
3779 : RSHIFT_EXPR, sext, n1);
3780 380 : insert_before (g);
3781 380 : t1 = gimple_assign_lhs (g);
3782 380 : if (!TYPE_UNSIGNED (type))
3783 : {
3784 187 : t1 = add_cast (m_limb_type, t1);
3785 187 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (sext)),
3786 : RSHIFT_EXPR, sext,
3787 187 : build_int_cst (TREE_TYPE (n),
3788 187 : limb_prec - 1));
3789 187 : insert_before (g);
3790 187 : ext = add_cast (m_limb_type, gimple_assign_lhs (g));
3791 : }
3792 : else
3793 193 : ext = build_zero_cst (m_limb_type);
3794 380 : l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3795 380 : g = gimple_build_assign (l, t1);
3796 380 : insert_before (g);
3797 380 : g = gimple_build_assign (make_ssa_name (sizetype), PLUS_EXPR, idx,
3798 : bitint_big_endian
3799 0 : ? size_int (-1) : size_one_node);
3800 380 : insert_before (g);
3801 380 : tree p2 = p;
3802 380 : if (bitint_big_endian)
3803 : {
3804 0 : tree new_idx = gimple_assign_lhs (g);
3805 0 : g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
3806 : NULL_TREE, NULL_TREE);
3807 0 : idx = new_idx;
3808 : }
3809 : else
3810 : {
3811 380 : if (bitint_extended == bitint_ext_full
3812 0 : && abi_limb_prec > limb_prec)
3813 0 : p2 = build_int_cst (sizetype,
3814 0 : CEIL (prec, abi_limb_prec)
3815 0 : * abi_limb_prec / limb_prec - 1);
3816 380 : idx = gimple_assign_lhs (g);
3817 380 : g = gimple_build_cond (LE_EXPR, idx, p2, NULL_TREE, NULL_TREE);
3818 : }
3819 380 : if_then (g, profile_probability::likely (), edge_true, edge_false);
3820 380 : idx = create_loop (idx, &idx_next);
3821 380 : l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3822 380 : g = gimple_build_assign (l, ext);
3823 380 : insert_before (g);
3824 380 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
3825 : bitint_big_endian
3826 0 : ? size_int (-1) : size_one_node);
3827 380 : insert_before (g);
3828 380 : if (bitint_big_endian)
3829 0 : g = gimple_build_cond (NE_EXPR, idx, size_zero_node,
3830 : NULL_TREE, NULL_TREE);
3831 : else
3832 380 : g = gimple_build_cond (LE_EXPR, idx_next, p2, NULL_TREE, NULL_TREE);
3833 380 : insert_before (g);
3834 : }
3835 : else
3836 : {
3837 : /* Lower
3838 : dst = src << n;
3839 : as
3840 : unsigned n1 = n % limb_prec;
3841 : size_t n2 = n / limb_prec;
3842 : size_t n3 = n1 != 0;
3843 : unsigned n4 = (limb_prec - n1) % limb_prec;
3844 : size_t idx;
3845 : size_t p = prec / limb_prec - (prec % limb_prec == 0);
3846 : for (idx = p; (ssize_t) idx >= (ssize_t) (n2 + n3); --idx)
3847 : dst[idx] = (src[idx - n2] << n1) | (src[idx - n2 - n3] >> n4);
3848 : if (n1)
3849 : {
3850 : dst[idx] = src[idx - n2] << n1;
3851 : --idx;
3852 : }
3853 : for (; (ssize_t) idx >= 0; --idx)
3854 : dst[idx] = 0; */
3855 195 : tree n2pn3;
3856 195 : if (TREE_CODE (n2) == INTEGER_CST && TREE_CODE (n3) == INTEGER_CST)
3857 71 : n2pn3 = int_const_binop (PLUS_EXPR, n2, n3);
3858 : else
3859 : {
3860 124 : n2pn3 = make_ssa_name (sizetype);
3861 124 : g = gimple_build_assign (n2pn3, PLUS_EXPR, n2, n3);
3862 124 : insert_before (g);
3863 : }
3864 195 : if (bitint_big_endian)
3865 : {
3866 0 : if (TREE_CODE (n2pn3) == INTEGER_CST)
3867 0 : n2pn3 = int_const_binop (MINUS_EXPR, p, n2pn3);
3868 : else
3869 : {
3870 0 : g = gimple_build_assign (make_ssa_name (sizetype),
3871 : MINUS_EXPR, p, n2pn3);
3872 0 : insert_before (g);
3873 0 : n2pn3 = gimple_assign_lhs (g);
3874 : }
3875 : }
3876 : /* For LSHIFT_EXPR, we can use handle_operand with non-INTEGER_CST
3877 : idx even to access the most significant partial limb. */
3878 195 : m_var_msb = true;
3879 195 : if (integer_zerop (n3))
3880 : /* For n3 == 0 p >= n2 + n3 is always true for all valid shift
3881 : counts. Emit if (true) condition that can be optimized later. */
3882 45 : g = gimple_build_cond (NE_EXPR, boolean_true_node, boolean_false_node,
3883 : NULL_TREE, NULL_TREE);
3884 150 : else if (bitint_big_endian)
3885 0 : g = gimple_build_cond (NE_EXPR, n2pn3, size_int (-1), NULL_TREE,
3886 : NULL_TREE);
3887 : else
3888 150 : g = gimple_build_cond (LE_EXPR, n2pn3, p, NULL_TREE, NULL_TREE);
3889 195 : edge edge_true, edge_false;
3890 195 : if_then (g, profile_probability::likely (), edge_true, edge_false);
3891 195 : tree idx_next;
3892 195 : tree idx = create_loop (bitint_big_endian ? size_zero_node : p,
3893 : &idx_next);
3894 195 : tree idxmn2 = make_ssa_name (sizetype);
3895 195 : tree idxmn2mn3 = make_ssa_name (sizetype);
3896 390 : g = gimple_build_assign (idxmn2,
3897 : bitint_big_endian ? PLUS_EXPR : MINUS_EXPR,
3898 : idx, n2);
3899 195 : insert_before (g);
3900 390 : g = gimple_build_assign (idxmn2mn3,
3901 : bitint_big_endian ? PLUS_EXPR : MINUS_EXPR,
3902 : idxmn2, n3);
3903 195 : insert_before (g);
3904 195 : m_data_cnt = 0;
3905 195 : tree t1 = handle_operand (rhs1, idxmn2);
3906 195 : m_first = false;
3907 195 : g = gimple_build_assign (make_ssa_name (m_limb_type),
3908 : LSHIFT_EXPR, t1, n1);
3909 195 : insert_before (g);
3910 195 : t1 = gimple_assign_lhs (g);
3911 195 : if (!integer_zerop (n3))
3912 : {
3913 150 : m_data_cnt = 0;
3914 150 : tree t2 = handle_operand (rhs1, idxmn2mn3);
3915 150 : g = gimple_build_assign (make_ssa_name (m_limb_type),
3916 : RSHIFT_EXPR, t2, n4);
3917 150 : insert_before (g);
3918 150 : t2 = gimple_assign_lhs (g);
3919 150 : g = gimple_build_assign (make_ssa_name (m_limb_type),
3920 : BIT_IOR_EXPR, t1, t2);
3921 150 : insert_before (g);
3922 150 : t1 = gimple_assign_lhs (g);
3923 : }
3924 195 : tree l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3925 195 : g = gimple_build_assign (l, t1);
3926 195 : insert_before (g);
3927 390 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
3928 : bitint_big_endian
3929 195 : ? size_one_node : size_int (-1));
3930 195 : insert_before (g);
3931 195 : tree sn2pn3 = add_cast (ssizetype, n2pn3);
3932 390 : g = gimple_build_cond (bitint_big_endian ? LE_EXPR : GE_EXPR,
3933 : add_cast (ssizetype, idx_next), sn2pn3,
3934 : NULL_TREE, NULL_TREE);
3935 195 : insert_before (g);
3936 195 : idx = make_ssa_name (sizetype);
3937 195 : m_gsi = gsi_for_stmt (final_stmt);
3938 195 : gphi *phi = create_phi_node (idx, gsi_bb (m_gsi));
3939 195 : edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3940 195 : edge_true = EDGE_PRED (gsi_bb (m_gsi),
3941 : EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3942 195 : add_phi_arg (phi, bitint_big_endian ? size_zero_node : p,
3943 : edge_false, UNKNOWN_LOCATION);
3944 195 : add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3945 195 : m_data_cnt = 0;
3946 195 : if (!integer_zerop (n3))
3947 : {
3948 150 : g = gimple_build_cond (NE_EXPR, n3, size_zero_node,
3949 : NULL_TREE, NULL_TREE);
3950 150 : if_then (g, profile_probability::likely (), edge_true, edge_false);
3951 150 : idxmn2 = make_ssa_name (sizetype);
3952 300 : g = gimple_build_assign (idxmn2,
3953 : bitint_big_endian ? PLUS_EXPR : MINUS_EXPR,
3954 : idx, n2);
3955 150 : insert_before (g);
3956 150 : m_data_cnt = 0;
3957 150 : t1 = handle_operand (rhs1, idxmn2);
3958 150 : g = gimple_build_assign (make_ssa_name (m_limb_type),
3959 : LSHIFT_EXPR, t1, n1);
3960 150 : insert_before (g);
3961 150 : t1 = gimple_assign_lhs (g);
3962 150 : l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3963 150 : g = gimple_build_assign (l, t1);
3964 150 : insert_before (g);
3965 150 : idx_next = make_ssa_name (sizetype);
3966 300 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
3967 : bitint_big_endian
3968 150 : ? size_one_node : size_int (-1));
3969 150 : insert_before (g);
3970 150 : m_gsi = gsi_for_stmt (final_stmt);
3971 150 : tree nidx = make_ssa_name (sizetype);
3972 150 : phi = create_phi_node (nidx, gsi_bb (m_gsi));
3973 150 : edge_false = find_edge (edge_false->src, gsi_bb (m_gsi));
3974 150 : edge_true = EDGE_PRED (gsi_bb (m_gsi),
3975 : EDGE_PRED (gsi_bb (m_gsi), 0) == edge_false);
3976 150 : add_phi_arg (phi, idx, edge_false, UNKNOWN_LOCATION);
3977 150 : add_phi_arg (phi, idx_next, edge_true, UNKNOWN_LOCATION);
3978 150 : idx = nidx;
3979 : }
3980 195 : if (bitint_big_endian)
3981 0 : g = gimple_build_cond (LE_EXPR, idx, p, NULL_TREE, NULL_TREE);
3982 : else
3983 195 : g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx),
3984 : ssize_int (0), NULL_TREE, NULL_TREE);
3985 195 : if_then (g, profile_probability::likely (), edge_true, edge_false);
3986 195 : idx = create_loop (idx, &idx_next);
3987 195 : l = limb_access (TREE_TYPE (lhs), obj, idx, true);
3988 195 : g = gimple_build_assign (l, build_zero_cst (m_limb_type));
3989 195 : insert_before (g);
3990 390 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
3991 : bitint_big_endian
3992 195 : ? size_one_node : size_int (-1));
3993 195 : insert_before (g);
3994 195 : if (bitint_big_endian)
3995 0 : g = gimple_build_cond (LE_EXPR, idx_next, p, NULL_TREE, NULL_TREE);
3996 : else
3997 195 : g = gimple_build_cond (GE_EXPR, add_cast (ssizetype, idx_next),
3998 : ssize_int (0), NULL_TREE, NULL_TREE);
3999 195 : insert_before (g);
4000 195 : if (bitint_extended && prec % limb_prec != 0)
4001 : {
4002 : /* The most significant limb has been updated either in the
4003 : loop or in the if after it. To simplify the code, just
4004 : read it back from memory and extend. */
4005 0 : m_gsi = gsi_after_labels (edge_false->dest);
4006 0 : idx = bitint_big_endian ? size_zero_node : p;
4007 0 : tree l = limb_access (TREE_TYPE (lhs), obj, idx, true);
4008 0 : tree type = limb_access_type (TREE_TYPE (lhs), idx);
4009 0 : tree v = make_ssa_name (m_limb_type);
4010 0 : g = gimple_build_assign (v, l);
4011 0 : insert_before (g);
4012 0 : v = add_cast (type, v);
4013 0 : l = limb_access (TREE_TYPE (lhs), obj, idx, true);
4014 0 : v = add_cast (m_limb_type, v);
4015 0 : g = gimple_build_assign (l, v);
4016 0 : insert_before (g);
4017 0 : if (bitint_extended == bitint_ext_full
4018 0 : && abi_limb_prec > limb_prec
4019 0 : && (CEIL (prec, abi_limb_prec) * abi_limb_prec
4020 0 : > CEIL (prec, limb_prec) * limb_prec))
4021 : {
4022 0 : tree p2 = build_int_cst (sizetype,
4023 : CEIL (prec, abi_limb_prec)
4024 0 : * abi_limb_prec / limb_prec - 1);
4025 0 : if (TYPE_UNSIGNED (TREE_TYPE (lhs)))
4026 0 : v = build_zero_cst (m_limb_type);
4027 : else
4028 : {
4029 0 : v = add_cast (signed_type_for (m_limb_type), v);
4030 0 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (v)),
4031 : RSHIFT_EXPR, v,
4032 : build_int_cst (unsigned_type_node,
4033 0 : limb_prec - 1));
4034 0 : insert_before (g);
4035 0 : v = add_cast (m_limb_type, gimple_assign_lhs (g));
4036 : }
4037 0 : l = limb_access (TREE_TYPE (lhs), obj, p2, true);
4038 0 : g = gimple_build_assign (l, v);
4039 0 : insert_before (g);
4040 : }
4041 : }
4042 195 : else if (bitint_extended == bitint_ext_full
4043 0 : && abi_limb_prec > limb_prec
4044 0 : && (CEIL (prec, abi_limb_prec) * abi_limb_prec
4045 0 : > CEIL (prec, limb_prec) * limb_prec))
4046 : {
4047 0 : m_gsi = gsi_after_labels (edge_false->dest);
4048 0 : tree p2 = build_int_cst (sizetype,
4049 : CEIL (prec, abi_limb_prec)
4050 0 : * abi_limb_prec / limb_prec - 1);
4051 0 : tree v;
4052 0 : if (TYPE_UNSIGNED (TREE_TYPE (lhs)))
4053 0 : v = build_zero_cst (m_limb_type);
4054 : else
4055 : {
4056 0 : tree l = limb_access (TREE_TYPE (lhs), obj, p, true);
4057 0 : v = make_ssa_name (m_limb_type);
4058 0 : g = gimple_build_assign (v, l);
4059 0 : insert_before (g);
4060 0 : v = add_cast (signed_type_for (m_limb_type), v);
4061 0 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (v)),
4062 : RSHIFT_EXPR, v,
4063 : build_int_cst (unsigned_type_node,
4064 0 : limb_prec - 1));
4065 0 : insert_before (g);
4066 0 : v = add_cast (m_limb_type, gimple_assign_lhs (g));
4067 : }
4068 0 : tree l = limb_access (TREE_TYPE (lhs), obj, p2, true);
4069 0 : g = gimple_build_assign (l, v);
4070 0 : insert_before (g);
4071 : }
4072 : }
4073 575 : }
4074 :
4075 : /* Lower large/huge _BitInt multiplication or division. */
4076 :
4077 : void
4078 335 : bitint_large_huge::lower_muldiv_stmt (tree obj, gimple *stmt)
4079 : {
4080 335 : tree rhs1 = gimple_assign_rhs1 (stmt);
4081 335 : tree rhs2 = gimple_assign_rhs2 (stmt);
4082 335 : tree lhs = gimple_assign_lhs (stmt);
4083 335 : tree_code rhs_code = gimple_assign_rhs_code (stmt);
4084 335 : tree type = TREE_TYPE (rhs1);
4085 335 : gcc_assert (TREE_CODE (type) == BITINT_TYPE
4086 : && bitint_precision_kind (type) >= bitint_prec_large);
4087 335 : int prec = TYPE_PRECISION (type), prec1, prec2;
4088 335 : bool zero_ms_limb = false;
4089 335 : rhs1 = handle_operand_addr (rhs1, stmt, NULL, &prec1);
4090 335 : rhs2 = handle_operand_addr (rhs2, stmt, NULL, &prec2);
4091 335 : if (obj == NULL_TREE)
4092 : {
4093 143 : int part = var_to_partition (m_map, lhs);
4094 143 : gcc_assert (m_vars[part] != NULL_TREE);
4095 143 : obj = m_vars[part];
4096 143 : lhs = build_fold_addr_expr (obj);
4097 : }
4098 : else
4099 : {
4100 192 : lhs = build_fold_addr_expr (obj);
4101 192 : lhs = force_gimple_operand_gsi (&m_gsi, lhs, true,
4102 : NULL_TREE, true, GSI_SAME_STMT);
4103 : }
4104 335 : if (bitint_extended == bitint_ext_full
4105 0 : && abi_limb_prec > limb_prec
4106 0 : && (CEIL (prec, abi_limb_prec) * abi_limb_prec
4107 0 : > CEIL (prec, limb_prec) * limb_prec))
4108 : {
4109 : /* unsigned multiplication needs to wrap around, so we can't
4110 : increase prec. */
4111 0 : if (rhs_code == MULT_EXPR && TYPE_UNSIGNED (type))
4112 : zero_ms_limb = true;
4113 : else
4114 : prec = CEIL (prec, abi_limb_prec) * abi_limb_prec;
4115 : }
4116 335 : tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
4117 335 : gimple *g;
4118 335 : switch (rhs_code)
4119 : {
4120 188 : case MULT_EXPR:
4121 188 : g = gimple_build_call_internal (IFN_MULBITINT, 6,
4122 188 : lhs, build_int_cst (sitype, prec),
4123 188 : rhs1, build_int_cst (sitype, prec1),
4124 188 : rhs2, build_int_cst (sitype, prec2));
4125 188 : insert_before (g);
4126 188 : break;
4127 99 : case TRUNC_DIV_EXPR:
4128 99 : case EXACT_DIV_EXPR:
4129 99 : g = gimple_build_call_internal (IFN_DIVMODBITINT, 8,
4130 99 : lhs, build_int_cst (sitype, prec),
4131 : null_pointer_node,
4132 : build_int_cst (sitype, 0),
4133 99 : rhs1, build_int_cst (sitype, prec1),
4134 99 : rhs2, build_int_cst (sitype, prec2));
4135 99 : if (!stmt_ends_bb_p (stmt))
4136 98 : gimple_call_set_nothrow (as_a <gcall *> (g), true);
4137 99 : insert_before (g);
4138 99 : break;
4139 48 : case TRUNC_MOD_EXPR:
4140 48 : g = gimple_build_call_internal (IFN_DIVMODBITINT, 8, null_pointer_node,
4141 : build_int_cst (sitype, 0),
4142 48 : lhs, build_int_cst (sitype, prec),
4143 48 : rhs1, build_int_cst (sitype, prec1),
4144 48 : rhs2, build_int_cst (sitype, prec2));
4145 48 : if (!stmt_ends_bb_p (stmt))
4146 45 : gimple_call_set_nothrow (as_a <gcall *> (g), true);
4147 48 : insert_before (g);
4148 48 : break;
4149 0 : default:
4150 0 : gcc_unreachable ();
4151 : }
4152 335 : if (stmt_ends_bb_p (stmt))
4153 : {
4154 4 : maybe_duplicate_eh_stmt (g, stmt);
4155 4 : edge e1;
4156 4 : edge_iterator ei;
4157 4 : basic_block bb = gimple_bb (stmt);
4158 :
4159 4 : FOR_EACH_EDGE (e1, ei, bb->succs)
4160 4 : if (e1->flags & EDGE_EH)
4161 : break;
4162 4 : if (e1)
4163 : {
4164 4 : edge e2 = split_block (gsi_bb (m_gsi), g);
4165 4 : m_gsi = gsi_after_labels (e2->dest);
4166 4 : add_eh_edge (e2->src, e1);
4167 : }
4168 : }
4169 335 : if (bitint_extended
4170 0 : && rhs_code == MULT_EXPR
4171 0 : && TYPE_UNSIGNED (type)
4172 335 : && (prec % limb_prec) != 0)
4173 : {
4174 : /* Unsigned multiplication wraps, but libgcc function will return the
4175 : bits beyond prec within the top limb as another limb of the full
4176 : multiplication. So, clear the padding bits here. */
4177 0 : tree idx = size_int (bitint_big_endian ? 0 : prec / limb_prec);
4178 0 : tree l = limb_access (type, obj, idx, true);
4179 0 : tree ctype = limb_access_type (type, idx);
4180 0 : tree v = make_ssa_name (m_limb_type);
4181 0 : g = gimple_build_assign (v, l);
4182 0 : insert_before (g);
4183 0 : v = add_cast (ctype, v);
4184 0 : l = limb_access (type, obj, idx, true);
4185 0 : v = add_cast (m_limb_type, v);
4186 0 : g = gimple_build_assign (l, v);
4187 0 : insert_before (g);
4188 : }
4189 335 : if (zero_ms_limb)
4190 : {
4191 0 : unsigned int i = CEIL (prec, abi_limb_prec) * abi_limb_prec / limb_prec;
4192 0 : g = gimple_build_assign (limb_access (type, obj, size_int (i - 1), true),
4193 : build_zero_cst (m_limb_type));
4194 0 : insert_before (g);
4195 : }
4196 335 : }
4197 :
4198 : /* Lower large/huge _BitInt conversion to/from floating point. */
4199 :
4200 : void
4201 317 : bitint_large_huge::lower_float_conv_stmt (tree obj, gimple *stmt)
4202 : {
4203 317 : tree rhs1 = gimple_assign_rhs1 (stmt);
4204 317 : tree lhs = gimple_assign_lhs (stmt);
4205 317 : tree_code rhs_code = gimple_assign_rhs_code (stmt);
4206 317 : tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
4207 317 : gimple *g;
4208 317 : if (rhs_code == FIX_TRUNC_EXPR)
4209 : {
4210 179 : tree type = TREE_TYPE (lhs);
4211 179 : int prec = TYPE_PRECISION (type);
4212 179 : bool extend_ms_limb = false;
4213 179 : if (bitint_extended == bitint_ext_full
4214 0 : && abi_limb_prec > limb_prec
4215 0 : && (CEIL (prec, abi_limb_prec) * abi_limb_prec
4216 0 : > CEIL (prec, limb_prec) * limb_prec))
4217 179 : extend_ms_limb = true;
4218 179 : if (!TYPE_UNSIGNED (type))
4219 93 : prec = -prec;
4220 179 : if (obj == NULL_TREE)
4221 : {
4222 135 : int part = var_to_partition (m_map, lhs);
4223 135 : gcc_assert (m_vars[part] != NULL_TREE);
4224 135 : obj = m_vars[part];
4225 135 : lhs = build_fold_addr_expr (obj);
4226 : }
4227 : else
4228 : {
4229 44 : lhs = build_fold_addr_expr (obj);
4230 44 : lhs = force_gimple_operand_gsi (&m_gsi, lhs, true,
4231 : NULL_TREE, true, GSI_SAME_STMT);
4232 : }
4233 179 : scalar_mode from_mode
4234 179 : = as_a <scalar_mode> (TYPE_MODE (TREE_TYPE (rhs1)));
4235 : #ifdef HAVE_SFmode
4236 : /* IEEE single is a full superset of both IEEE half and
4237 : bfloat formats, convert to float first and then to _BitInt
4238 : to avoid the need of another 2 library routines. */
4239 179 : if ((REAL_MODE_FORMAT (from_mode) == &arm_bfloat_half_format
4240 179 : || REAL_MODE_FORMAT (from_mode) == &ieee_half_format)
4241 191 : && REAL_MODE_FORMAT (SFmode) == &ieee_single_format)
4242 : {
4243 12 : tree type = lang_hooks.types.type_for_mode (SFmode, 0);
4244 12 : if (type)
4245 12 : rhs1 = add_cast (type, rhs1);
4246 : }
4247 : #endif
4248 179 : g = gimple_build_call_internal (IFN_FLOATTOBITINT, 3,
4249 179 : lhs, build_int_cst (sitype, prec),
4250 : rhs1);
4251 179 : insert_before (g);
4252 179 : if (extend_ms_limb)
4253 : {
4254 0 : unsigned int i
4255 0 : = (CEIL (prec < 0 ? -prec : prec, abi_limb_prec)
4256 0 : * abi_limb_prec / limb_prec);
4257 0 : tree val;
4258 0 : if (prec < 0)
4259 : {
4260 0 : g = gimple_build_assign (make_ssa_name (m_limb_type),
4261 : limb_access (type, obj,
4262 0 : size_int (i - 2),
4263 : true));
4264 0 : insert_before (g);
4265 0 : val = add_cast (signed_type_for (m_limb_type),
4266 : gimple_assign_lhs (g));
4267 0 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (val)),
4268 : RSHIFT_EXPR, val,
4269 : build_int_cst (unsigned_type_node,
4270 0 : limb_prec - 1));
4271 0 : insert_before (g);
4272 0 : val = add_cast (m_limb_type, gimple_assign_lhs (g));
4273 : }
4274 : else
4275 0 : val = build_zero_cst (m_limb_type);
4276 0 : g = gimple_build_assign (limb_access (type, obj, size_int (i - 1),
4277 : true), val);
4278 0 : insert_before (g);
4279 : }
4280 : }
4281 : else
4282 : {
4283 138 : int prec;
4284 138 : rhs1 = handle_operand_addr (rhs1, stmt, NULL, &prec);
4285 138 : g = gimple_build_call_internal (IFN_BITINTTOFLOAT, 2,
4286 138 : rhs1, build_int_cst (sitype, prec));
4287 138 : gimple_call_set_lhs (g, lhs);
4288 138 : if (!stmt_ends_bb_p (stmt))
4289 137 : gimple_call_set_nothrow (as_a <gcall *> (g), true);
4290 138 : gsi_replace (&m_gsi, g, true);
4291 : }
4292 317 : }
4293 :
4294 : /* Helper method for lower_addsub_overflow and lower_mul_overflow.
4295 : If check_zero is true, caller wants to check if all bits in [start, end)
4296 : are zero, otherwise if bits in [start, end) are either all zero or
4297 : all ones. L is the limb with index LIMB, START and END are measured
4298 : in bits. */
4299 :
4300 : tree
4301 6130 : bitint_large_huge::arith_overflow_extract_bits (unsigned int start,
4302 : unsigned int end, tree l,
4303 : unsigned int limb,
4304 : bool check_zero)
4305 : {
4306 6130 : unsigned startlimb = start / limb_prec;
4307 6130 : unsigned endlimb = (end - 1) / limb_prec;
4308 6130 : gimple *g;
4309 :
4310 6130 : if ((start % limb_prec) == 0 && (end % limb_prec) == 0)
4311 : return l;
4312 5864 : if (startlimb == endlimb && limb == startlimb)
4313 : {
4314 1981 : if (check_zero)
4315 : {
4316 1456 : wide_int w = wi::shifted_mask (start % limb_prec,
4317 1456 : end - start, false, limb_prec);
4318 2912 : g = gimple_build_assign (make_ssa_name (m_limb_type),
4319 : BIT_AND_EXPR, l,
4320 1456 : wide_int_to_tree (m_limb_type, w));
4321 1456 : insert_before (g);
4322 1456 : return gimple_assign_lhs (g);
4323 1456 : }
4324 525 : unsigned int shift = start % limb_prec;
4325 525 : if ((end % limb_prec) != 0)
4326 : {
4327 328 : unsigned int lshift = (-end) % limb_prec;
4328 328 : shift += lshift;
4329 328 : g = gimple_build_assign (make_ssa_name (m_limb_type),
4330 : LSHIFT_EXPR, l,
4331 : build_int_cst (unsigned_type_node,
4332 328 : lshift));
4333 328 : insert_before (g);
4334 328 : l = gimple_assign_lhs (g);
4335 : }
4336 525 : l = add_cast (signed_type_for (m_limb_type), l);
4337 525 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
4338 : RSHIFT_EXPR, l,
4339 525 : build_int_cst (unsigned_type_node, shift));
4340 525 : insert_before (g);
4341 525 : return add_cast (m_limb_type, gimple_assign_lhs (g));
4342 : }
4343 3883 : else if (limb == startlimb)
4344 : {
4345 1881 : if ((start % limb_prec) == 0)
4346 : return l;
4347 1795 : if (!check_zero)
4348 917 : l = add_cast (signed_type_for (m_limb_type), l);
4349 1795 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
4350 : RSHIFT_EXPR, l,
4351 : build_int_cst (unsigned_type_node,
4352 1795 : start % limb_prec));
4353 1795 : insert_before (g);
4354 1795 : l = gimple_assign_lhs (g);
4355 1795 : if (!check_zero)
4356 917 : l = add_cast (m_limb_type, l);
4357 1795 : return l;
4358 : }
4359 2002 : else if (limb == endlimb)
4360 : {
4361 1603 : if ((end % limb_prec) == 0)
4362 : return l;
4363 1602 : if (check_zero)
4364 : {
4365 840 : wide_int w = wi::mask (end % limb_prec, false, limb_prec);
4366 1680 : g = gimple_build_assign (make_ssa_name (m_limb_type),
4367 : BIT_AND_EXPR, l,
4368 840 : wide_int_to_tree (m_limb_type, w));
4369 840 : insert_before (g);
4370 840 : return gimple_assign_lhs (g);
4371 840 : }
4372 762 : unsigned int shift = (-end) % limb_prec;
4373 762 : g = gimple_build_assign (make_ssa_name (m_limb_type),
4374 : LSHIFT_EXPR, l,
4375 762 : build_int_cst (unsigned_type_node, shift));
4376 762 : insert_before (g);
4377 762 : l = add_cast (signed_type_for (m_limb_type), gimple_assign_lhs (g));
4378 762 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (l)),
4379 : RSHIFT_EXPR, l,
4380 762 : build_int_cst (unsigned_type_node, shift));
4381 762 : insert_before (g);
4382 762 : return add_cast (m_limb_type, gimple_assign_lhs (g));
4383 : }
4384 : return l;
4385 : }
4386 :
4387 : /* Helper method for lower_addsub_overflow and lower_mul_overflow. Store
4388 : result including overflow flag into the right locations. */
4389 :
4390 : void
4391 4040 : bitint_large_huge::finish_arith_overflow (tree var, tree obj, tree type,
4392 : tree ovf, tree lhs, tree orig_obj,
4393 : gimple *stmt, unsigned nelts,
4394 : tree_code code)
4395 : {
4396 4040 : gimple *g;
4397 :
4398 4040 : if (obj == NULL_TREE
4399 4040 : && (TREE_CODE (type) != BITINT_TYPE
4400 225 : || bitint_precision_kind (type) < bitint_prec_large))
4401 : {
4402 : /* Add support for 3 or more limbs filled in from normal integral
4403 : type if this assert fails. If no target chooses limb mode smaller
4404 : than half of largest supported normal integral type, this will not
4405 : be needed. */
4406 241 : gcc_assert (TYPE_PRECISION (type) <= 2 * limb_prec);
4407 241 : tree lhs_type = type;
4408 241 : if (TREE_CODE (type) == BITINT_TYPE
4409 241 : && bitint_precision_kind (type) == bitint_prec_middle)
4410 46 : lhs_type = build_nonstandard_integer_type (TYPE_PRECISION (type),
4411 46 : TYPE_UNSIGNED (type));
4412 241 : tree r1 = limb_access (NULL_TREE, var,
4413 : bitint_big_endian
4414 0 : ? size_int (nelts - 1) : size_zero_node, true);
4415 241 : g = gimple_build_assign (make_ssa_name (m_limb_type), r1);
4416 241 : insert_before (g);
4417 241 : r1 = gimple_assign_lhs (g);
4418 241 : if (!useless_type_conversion_p (lhs_type, TREE_TYPE (r1)))
4419 241 : r1 = add_cast (lhs_type, r1);
4420 241 : if (TYPE_PRECISION (lhs_type) > limb_prec)
4421 : {
4422 90 : tree r2 = limb_access (NULL_TREE, var,
4423 : bitint_big_endian
4424 0 : ? size_int (nelts - 2) : size_one_node, true);
4425 90 : g = gimple_build_assign (make_ssa_name (m_limb_type), r2);
4426 90 : insert_before (g);
4427 90 : r2 = gimple_assign_lhs (g);
4428 90 : r2 = add_cast (lhs_type, r2);
4429 90 : g = gimple_build_assign (make_ssa_name (lhs_type), LSHIFT_EXPR, r2,
4430 : build_int_cst (unsigned_type_node,
4431 90 : limb_prec));
4432 90 : insert_before (g);
4433 90 : g = gimple_build_assign (make_ssa_name (lhs_type), BIT_IOR_EXPR, r1,
4434 : gimple_assign_lhs (g));
4435 90 : insert_before (g);
4436 90 : r1 = gimple_assign_lhs (g);
4437 : }
4438 241 : if (lhs_type != type)
4439 46 : r1 = add_cast (type, r1);
4440 241 : ovf = add_cast (lhs_type, ovf);
4441 241 : if (lhs_type != type)
4442 46 : ovf = add_cast (type, ovf);
4443 241 : g = gimple_build_assign (lhs, COMPLEX_EXPR, r1, ovf);
4444 241 : m_gsi = gsi_for_stmt (stmt);
4445 241 : gsi_replace (&m_gsi, g, true);
4446 : }
4447 : else
4448 : {
4449 3799 : unsigned HOST_WIDE_INT obj_nelts = 0;
4450 3799 : tree atype = NULL_TREE;
4451 3799 : if (obj)
4452 : {
4453 3708 : obj_nelts = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))) / limb_prec;
4454 3708 : if (orig_obj == NULL_TREE)
4455 2140 : obj_nelts >>= 1;
4456 3708 : atype = build_array_type_nelts (m_limb_type, obj_nelts);
4457 : }
4458 3799 : if (var && obj)
4459 : {
4460 480 : tree v1, v2;
4461 480 : tree off;
4462 480 : if (orig_obj == NULL_TREE)
4463 : {
4464 0 : off = build_zero_cst (build_pointer_type (TREE_TYPE (obj)));
4465 0 : v1 = build2 (MEM_REF, atype,
4466 : build_fold_addr_expr (unshare_expr (obj)), off);
4467 : }
4468 480 : else if (!useless_type_conversion_p (atype, TREE_TYPE (obj)))
4469 8 : v1 = build1 (VIEW_CONVERT_EXPR, atype, unshare_expr (obj));
4470 : else
4471 472 : v1 = unshare_expr (obj);
4472 480 : off = build_int_cst (build_pointer_type (TREE_TYPE (var)),
4473 : bitint_big_endian
4474 480 : ? (nelts - obj_nelts) * m_limb_size : 0);
4475 480 : v2 = build2 (MEM_REF, atype, build_fold_addr_expr (var), off);
4476 480 : g = gimple_build_assign (v1, v2);
4477 480 : insert_before (g);
4478 : }
4479 3319 : else if (obj && bitint_big_endian && nelts != obj_nelts)
4480 : {
4481 0 : gcc_assert (nelts > obj_nelts);
4482 0 : tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
4483 0 : tree off = build_int_cst (build_pointer_type (TREE_TYPE (obj)),
4484 0 : (nelts - obj_nelts) * m_limb_size);
4485 0 : tree src = build2 (MEM_REF, atype,
4486 : build_fold_addr_expr (unshare_expr (obj)), off);
4487 0 : g = gimple_build_call (fn, 3,
4488 : build_fold_addr_expr (unshare_expr (obj)),
4489 : src, build_int_cst (size_type_node,
4490 0 : obj_nelts * m_limb_size));
4491 0 : insert_before (g);
4492 : }
4493 3799 : if (orig_obj == NULL_TREE && obj)
4494 : {
4495 2140 : ovf = add_cast (m_limb_type, ovf);
4496 2140 : tree l = limb_access (NULL_TREE, obj,
4497 2140 : size_int (bitint_big_endian
4498 : ? obj_nelts * 2 - 1 : obj_nelts),
4499 : true);
4500 2140 : g = gimple_build_assign (l, ovf);
4501 2140 : insert_before (g);
4502 2140 : if (obj_nelts > 1)
4503 : {
4504 2140 : atype = build_array_type_nelts (m_limb_type, obj_nelts - 1);
4505 2140 : tree off = build_int_cst (build_pointer_type (TREE_TYPE (obj)),
4506 2140 : (obj_nelts + !bitint_big_endian)
4507 2140 : * m_limb_size);
4508 2140 : tree v1 = build2 (MEM_REF, atype,
4509 : build_fold_addr_expr (unshare_expr (obj)),
4510 : off);
4511 2140 : g = gimple_build_assign (v1, build_zero_cst (atype));
4512 2140 : insert_before (g);
4513 : }
4514 : }
4515 1659 : else if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE)
4516 : {
4517 1632 : imm_use_iterator ui;
4518 1632 : use_operand_p use_p;
4519 1632 : FOR_EACH_IMM_USE_FAST (use_p, ui, lhs)
4520 : {
4521 1632 : g = USE_STMT (use_p);
4522 1632 : if (!is_gimple_assign (g)
4523 1632 : || gimple_assign_rhs_code (g) != IMAGPART_EXPR)
4524 0 : continue;
4525 1632 : tree lhs2 = gimple_assign_lhs (g);
4526 1632 : gimple *use_stmt;
4527 1632 : single_imm_use (lhs2, &use_p, &use_stmt);
4528 1632 : lhs2 = gimple_assign_lhs (use_stmt);
4529 1632 : gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
4530 1632 : if (useless_type_conversion_p (TREE_TYPE (lhs2), TREE_TYPE (ovf)))
4531 1611 : g = gimple_build_assign (lhs2, ovf);
4532 : else
4533 21 : g = gimple_build_assign (lhs2, NOP_EXPR, ovf);
4534 1632 : gsi_replace (&gsi, g, true);
4535 1632 : if (gsi_stmt (m_gsi) == use_stmt)
4536 91 : m_gsi = gsi_for_stmt (g);
4537 1632 : break;
4538 1632 : }
4539 : }
4540 27 : else if (ovf != boolean_false_node)
4541 : {
4542 27 : g = gimple_build_cond (NE_EXPR, ovf, boolean_false_node,
4543 : NULL_TREE, NULL_TREE);
4544 27 : edge edge_true, edge_false;
4545 27 : if_then (g, profile_probability::very_unlikely (),
4546 : edge_true, edge_false);
4547 27 : tree zero = build_zero_cst (TREE_TYPE (lhs));
4548 27 : tree fn = ubsan_build_overflow_builtin (code, m_loc,
4549 27 : TREE_TYPE (lhs),
4550 : zero, zero, NULL);
4551 27 : force_gimple_operand_gsi (&m_gsi, fn, true, NULL_TREE,
4552 : true, GSI_SAME_STMT);
4553 27 : m_gsi = gsi_after_labels (edge_true->dest);
4554 : }
4555 : }
4556 4040 : if (var)
4557 : {
4558 733 : tree clobber = build_clobber (TREE_TYPE (var), CLOBBER_STORAGE_END);
4559 733 : g = gimple_build_assign (var, clobber);
4560 733 : gsi_insert_after (&m_gsi, g, GSI_SAME_STMT);
4561 : }
4562 4040 : }
4563 :
4564 : /* Helper function for lower_addsub_overflow and lower_mul_overflow.
4565 : Given precisions of result TYPE (PREC), argument 0 precision PREC0,
4566 : argument 1 precision PREC1 and minimum precision for the result
4567 : PREC2, compute *START, *END, *CHECK_ZERO and return OVF. */
4568 :
4569 : static tree
4570 4040 : arith_overflow (tree_code code, tree type, int prec, int prec0, int prec1,
4571 : int prec2, unsigned *start, unsigned *end, bool *check_zero)
4572 : {
4573 4040 : *start = 0;
4574 4040 : *end = 0;
4575 4040 : *check_zero = true;
4576 : /* Ignore this special rule for subtraction, even if both
4577 : prec0 >= 0 and prec1 >= 0, their subtraction can be negative
4578 : in infinite precision. */
4579 4040 : if (code != MINUS_EXPR && prec0 >= 0 && prec1 >= 0)
4580 : {
4581 : /* Result in [0, prec2) is unsigned, if prec > prec2,
4582 : all bits above it will be zero. */
4583 626 : if ((prec - !TYPE_UNSIGNED (type)) >= prec2)
4584 0 : return boolean_false_node;
4585 : else
4586 : {
4587 : /* ovf if any of bits in [start, end) is non-zero. */
4588 626 : *start = prec - !TYPE_UNSIGNED (type);
4589 626 : *end = prec2;
4590 : }
4591 : }
4592 3414 : else if (TYPE_UNSIGNED (type))
4593 : {
4594 : /* If result in [0, prec2) is signed and if prec > prec2,
4595 : all bits above it will be sign bit copies. */
4596 1926 : if (prec >= prec2)
4597 : {
4598 : /* ovf if bit prec - 1 is non-zero. */
4599 184 : *start = prec - 1;
4600 184 : *end = prec;
4601 : }
4602 : else
4603 : {
4604 : /* ovf if any of bits in [start, end) is non-zero. */
4605 1742 : *start = prec;
4606 1742 : *end = prec2;
4607 : }
4608 : }
4609 1488 : else if (prec >= prec2)
4610 0 : return boolean_false_node;
4611 : else
4612 : {
4613 : /* ovf if [start, end) bits aren't all zeros or all ones. */
4614 1488 : *start = prec - 1;
4615 1488 : *end = prec2;
4616 1488 : *check_zero = false;
4617 : }
4618 : return NULL_TREE;
4619 : }
4620 :
4621 : /* Lower a .{ADD,SUB}_OVERFLOW call with at least one large/huge _BitInt
4622 : argument or return type _Complex large/huge _BitInt. */
4623 :
4624 : void
4625 2653 : bitint_large_huge::lower_addsub_overflow (tree obj, gimple *stmt)
4626 : {
4627 2653 : tree arg0 = gimple_call_arg (stmt, 0);
4628 2653 : tree arg1 = gimple_call_arg (stmt, 1);
4629 2653 : tree lhs = gimple_call_lhs (stmt);
4630 2653 : gimple *g;
4631 :
4632 2653 : if (!lhs)
4633 : {
4634 0 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4635 0 : gsi_remove (&gsi, true);
4636 0 : return;
4637 : }
4638 2653 : gimple *final_stmt = gsi_stmt (m_gsi);
4639 2653 : tree type = TREE_TYPE (lhs);
4640 2653 : if (TREE_CODE (type) == COMPLEX_TYPE)
4641 2635 : type = TREE_TYPE (type);
4642 2653 : int prec = TYPE_PRECISION (type);
4643 2653 : int prec0 = range_to_prec (arg0, stmt);
4644 2653 : int prec1 = range_to_prec (arg1, stmt);
4645 : /* If PREC0 >= 0 && PREC1 >= 0 and CODE is not MINUS_EXPR, PREC2 is
4646 : the be minimum unsigned precision of any possible operation's
4647 : result, otherwise it is minimum signed precision.
4648 : Some examples:
4649 : If PREC0 or PREC1 is 8, it means that argument is [0, 0xff],
4650 : if PREC0 or PREC1 is 10, it means that argument is [0, 0x3ff],
4651 : if PREC0 or PREC1 is -8, it means that argument is [-0x80, 0x7f],
4652 : if PREC0 or PREC1 is -10, it means that argument is [-0x200, 0x1ff].
4653 : PREC0 CODE PREC1 RESULT PREC2 SIGNED vs. UNSIGNED
4654 : 8 + 8 [0, 0x1fe] 9 UNSIGNED
4655 : 8 + 10 [0, 0x4fe] 11 UNSIGNED
4656 : -8 + -8 [-0x100, 0xfe] 9 SIGNED
4657 : -8 + -10 [-0x280, 0x27e] 11 SIGNED
4658 : 8 + -8 [-0x80, 0x17e] 10 SIGNED
4659 : 8 + -10 [-0x200, 0x2fe] 11 SIGNED
4660 : 10 + -8 [-0x80, 0x47e] 12 SIGNED
4661 : 8 - 8 [-0xff, 0xff] 9 SIGNED
4662 : 8 - 10 [-0x3ff, 0xff] 11 SIGNED
4663 : 10 - 8 [-0xff, 0x3ff] 11 SIGNED
4664 : -8 - -8 [-0xff, 0xff] 9 SIGNED
4665 : -8 - -10 [-0x27f, 0x27f] 11 SIGNED
4666 : -10 - -8 [-0x27f, 0x27f] 11 SIGNED
4667 : 8 - -8 [-0x7f, 0x17f] 10 SIGNED
4668 : 8 - -10 [-0x1ff, 0x2ff] 11 SIGNED
4669 : 10 - -8 [-0x7f, 0x47f] 12 SIGNED
4670 : -8 - 8 [-0x17f, 0x7f] 10 SIGNED
4671 : -8 - 10 [-0x47f, 0x7f] 12 SIGNED
4672 : -10 - 8 [-0x2ff, 0x1ff] 11 SIGNED */
4673 2653 : int prec2 = MAX (prec0 < 0 ? -prec0 : prec0,
4674 : prec1 < 0 ? -prec1 : prec1);
4675 : /* If operands are either both signed or both unsigned,
4676 : we need just one additional bit. */
4677 3692 : prec2 = (((prec0 < 0) == (prec1 < 0)
4678 : /* If one operand is signed and one unsigned and
4679 : the signed one has larger precision, we need
4680 : just one extra bit, otherwise two. */
4681 702 : || (prec0 < 0 ? (prec2 == -prec0 && prec2 != prec1)
4682 337 : : (prec2 == -prec1 && prec2 != prec0)))
4683 2653 : ? prec2 + 1 : prec2 + 2);
4684 2653 : int prec3 = MAX (prec0 < 0 ? -prec0 : prec0,
4685 : prec1 < 0 ? -prec1 : prec1);
4686 2653 : prec3 = MAX (prec3, prec);
4687 2653 : tree var = NULL_TREE;
4688 2653 : tree orig_obj = obj;
4689 2653 : if (obj == NULL_TREE
4690 1673 : && TREE_CODE (type) == BITINT_TYPE
4691 1572 : && bitint_precision_kind (type) >= bitint_prec_large
4692 1460 : && m_names
4693 4089 : && bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
4694 : {
4695 1381 : int part = var_to_partition (m_map, lhs);
4696 1381 : gcc_assert (m_vars[part] != NULL_TREE);
4697 1381 : obj = m_vars[part];
4698 1381 : if (TREE_TYPE (lhs) == type)
4699 2 : orig_obj = obj;
4700 : }
4701 2653 : if (TREE_CODE (type) != BITINT_TYPE
4702 2653 : || bitint_precision_kind (type) < bitint_prec_large)
4703 : {
4704 213 : unsigned HOST_WIDE_INT nelts = CEIL (prec, limb_prec);
4705 213 : tree atype = build_array_type_nelts (m_limb_type, nelts);
4706 213 : var = create_tmp_var (atype);
4707 : }
4708 :
4709 2653 : enum tree_code code;
4710 2653 : switch (gimple_call_internal_fn (stmt))
4711 : {
4712 : case IFN_ADD_OVERFLOW:
4713 : case IFN_UBSAN_CHECK_ADD:
4714 : code = PLUS_EXPR;
4715 : break;
4716 1359 : case IFN_SUB_OVERFLOW:
4717 1359 : case IFN_UBSAN_CHECK_SUB:
4718 1359 : code = MINUS_EXPR;
4719 1359 : break;
4720 0 : default:
4721 0 : gcc_unreachable ();
4722 : }
4723 2653 : unsigned start, end;
4724 2653 : bool check_zero;
4725 2653 : tree ovf = arith_overflow (code, type, prec, prec0, prec1, prec2,
4726 : &start, &end, &check_zero);
4727 :
4728 2653 : unsigned startlimb, endlimb;
4729 2653 : if (ovf)
4730 : {
4731 : startlimb = ~0U;
4732 : endlimb = ~0U;
4733 : }
4734 : else
4735 : {
4736 2653 : startlimb = start / limb_prec;
4737 2653 : endlimb = (end - 1) / limb_prec;
4738 : }
4739 :
4740 2653 : int prec4 = ovf != NULL_TREE ? prec : prec3;
4741 2653 : bitint_prec_kind kind = bitint_precision_kind (prec4);
4742 2653 : unsigned cnt, rem = 0, fin = 0, nelts;
4743 2653 : tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
4744 5306 : bool last_ovf = (ovf == NULL_TREE
4745 2653 : && CEIL (prec2, limb_prec) > CEIL (prec3, limb_prec));
4746 2653 : if (kind != bitint_prec_huge)
4747 1539 : nelts = cnt = CEIL (prec4, limb_prec) + last_ovf;
4748 : else
4749 : {
4750 1114 : rem = prec4 % (2 * limb_prec);
4751 1114 : fin = (prec4 - rem) / limb_prec;
4752 1114 : cnt = 2 + CEIL (rem, limb_prec) + last_ovf;
4753 1114 : nelts = fin + cnt - 2;
4754 1114 : idx = idx_first = create_loop (bitint_big_endian
4755 1114 : ? size_int (nelts - 1) : size_zero_node,
4756 : &idx_next);
4757 : }
4758 :
4759 2653 : if (kind == bitint_prec_huge)
4760 1114 : m_upwards_2limb = fin;
4761 2653 : m_upwards = true;
4762 :
4763 2653 : tree type0 = TREE_TYPE (arg0);
4764 2653 : tree type1 = TREE_TYPE (arg1);
4765 2653 : int prec5 = prec3;
4766 2653 : if (bitint_precision_kind (prec5) < bitint_prec_large)
4767 10 : prec5 = MAX (TYPE_PRECISION (type0), TYPE_PRECISION (type1));
4768 2653 : if (TYPE_PRECISION (type0) < prec5)
4769 : {
4770 146 : type0 = build_bitint_type (prec5, TYPE_UNSIGNED (type0));
4771 146 : if (TREE_CODE (arg0) == INTEGER_CST)
4772 27 : arg0 = fold_convert (type0, arg0);
4773 : }
4774 2653 : if (TYPE_PRECISION (type1) < prec5)
4775 : {
4776 156 : type1 = build_bitint_type (prec5, TYPE_UNSIGNED (type1));
4777 156 : if (TREE_CODE (arg1) == INTEGER_CST)
4778 76 : arg1 = fold_convert (type1, arg1);
4779 : }
4780 2653 : unsigned int data_cnt = 0;
4781 2653 : tree last_rhs1 = NULL_TREE, last_rhs2 = NULL_TREE;
4782 2653 : tree cmp = build_zero_cst (m_limb_type);
4783 2653 : unsigned prec_limbs = CEIL ((unsigned) prec, limb_prec);
4784 2653 : tree ovf_out = NULL_TREE, cmp_out = NULL_TREE;
4785 11552 : for (unsigned i = 0; i < cnt; i++)
4786 : {
4787 8899 : m_data_cnt = 0;
4788 8899 : tree rhs1, rhs2;
4789 8899 : if (kind != bitint_prec_huge)
4790 5303 : idx = size_int (bitint_big_endian ? nelts - 1 - i : i);
4791 3596 : else if (i >= 2)
4792 1368 : idx = size_int (bitint_big_endian ? nelts + 1 - fin - i : fin + i - 2);
4793 8899 : if (!last_ovf || i < cnt - 1)
4794 : {
4795 7957 : tree idx0 = idx, idx1 = idx;
4796 7957 : if (bitint_big_endian
4797 7957 : && CEIL ((unsigned) TYPE_PRECISION (type0), limb_prec) != nelts)
4798 : {
4799 0 : HOST_WIDE_INT diff
4800 0 : = ((HOST_WIDE_INT) CEIL (TYPE_PRECISION (type0), limb_prec)
4801 0 : - (HOST_WIDE_INT) nelts);
4802 0 : if (tree_fits_uhwi_p (idx))
4803 0 : idx0 = size_int (tree_to_uhwi (idx) + diff);
4804 : else
4805 : {
4806 0 : idx0 = make_ssa_name (sizetype);
4807 0 : g = gimple_build_assign (idx0, PLUS_EXPR, idx,
4808 0 : size_int (diff));
4809 0 : insert_before (g);
4810 : }
4811 : }
4812 7957 : if (type0 != TREE_TYPE (arg0))
4813 334 : rhs1 = handle_cast (type0, arg0, idx0);
4814 : else
4815 7623 : rhs1 = handle_operand (arg0, idx0);
4816 7957 : if (bitint_big_endian
4817 7957 : && CEIL ((unsigned) TYPE_PRECISION (type1), limb_prec) != nelts)
4818 : {
4819 0 : HOST_WIDE_INT diff
4820 0 : = ((HOST_WIDE_INT) CEIL (TYPE_PRECISION (type1), limb_prec)
4821 0 : - (HOST_WIDE_INT) nelts);
4822 0 : if (tree_fits_uhwi_p (idx))
4823 0 : idx1 = size_int (tree_to_uhwi (idx) + diff);
4824 : else
4825 : {
4826 0 : idx1 = make_ssa_name (sizetype);
4827 0 : g = gimple_build_assign (idx1, PLUS_EXPR, idx,
4828 0 : size_int (diff));
4829 0 : insert_before (g);
4830 : }
4831 : }
4832 7957 : if (type1 != TREE_TYPE (arg1))
4833 250 : rhs2 = handle_cast (type1, arg1, idx1);
4834 : else
4835 7707 : rhs2 = handle_operand (arg1, idx1);
4836 7957 : if (i == 0)
4837 2653 : data_cnt = m_data_cnt;
4838 7957 : if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
4839 1907 : rhs1 = add_cast (m_limb_type, rhs1);
4840 7957 : if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs2)))
4841 1907 : rhs2 = add_cast (m_limb_type, rhs2);
4842 : last_rhs1 = rhs1;
4843 : last_rhs2 = rhs2;
4844 : }
4845 : else
4846 : {
4847 942 : m_data_cnt = data_cnt;
4848 942 : if (TYPE_UNSIGNED (type0) || prec0 >= 0)
4849 421 : rhs1 = build_zero_cst (m_limb_type);
4850 : else
4851 : {
4852 521 : rhs1 = add_cast (signed_type_for (m_limb_type), last_rhs1);
4853 521 : if (TREE_CODE (rhs1) == INTEGER_CST)
4854 52 : rhs1 = build_int_cst (m_limb_type,
4855 74 : tree_int_cst_sgn (rhs1) < 0 ? -1 : 0);
4856 : else
4857 : {
4858 938 : tree lpm1 = build_int_cst (unsigned_type_node,
4859 469 : limb_prec - 1);
4860 469 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
4861 : RSHIFT_EXPR, rhs1, lpm1);
4862 469 : insert_before (g);
4863 469 : rhs1 = add_cast (m_limb_type, gimple_assign_lhs (g));
4864 : }
4865 : }
4866 942 : if (TYPE_UNSIGNED (type1) || prec1 >= 0)
4867 543 : rhs2 = build_zero_cst (m_limb_type);
4868 : else
4869 : {
4870 399 : rhs2 = add_cast (signed_type_for (m_limb_type), last_rhs2);
4871 399 : if (TREE_CODE (rhs2) == INTEGER_CST)
4872 114 : rhs2 = build_int_cst (m_limb_type,
4873 153 : tree_int_cst_sgn (rhs2) < 0 ? -1 : 0);
4874 : else
4875 : {
4876 570 : tree lpm1 = build_int_cst (unsigned_type_node,
4877 285 : limb_prec - 1);
4878 285 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs2)),
4879 : RSHIFT_EXPR, rhs2, lpm1);
4880 285 : insert_before (g);
4881 285 : rhs2 = add_cast (m_limb_type, gimple_assign_lhs (g));
4882 : }
4883 : }
4884 : }
4885 8899 : tree rhs = handle_plus_minus (code, rhs1, rhs2, idx);
4886 8899 : if (ovf != boolean_false_node)
4887 : {
4888 8899 : if (tree_fits_uhwi_p (idx))
4889 : {
4890 6671 : unsigned limb = tree_to_uhwi (idx);
4891 6671 : if (bitint_big_endian)
4892 0 : limb = nelts - 1 - limb;
4893 6671 : if (limb >= startlimb && limb <= endlimb)
4894 : {
4895 3274 : tree l = arith_overflow_extract_bits (start, end, rhs,
4896 : limb, check_zero);
4897 3274 : tree this_ovf = make_ssa_name (boolean_type_node);
4898 3274 : if (ovf == NULL_TREE && !check_zero)
4899 : {
4900 879 : cmp = l;
4901 879 : g = gimple_build_assign (make_ssa_name (m_limb_type),
4902 : PLUS_EXPR, l,
4903 : build_int_cst (m_limb_type, 1));
4904 879 : insert_before (g);
4905 879 : g = gimple_build_assign (this_ovf, GT_EXPR,
4906 : gimple_assign_lhs (g),
4907 : build_int_cst (m_limb_type, 1));
4908 : }
4909 : else
4910 2395 : g = gimple_build_assign (this_ovf, NE_EXPR, l, cmp);
4911 3274 : insert_before (g);
4912 3274 : if (ovf == NULL_TREE)
4913 : ovf = this_ovf;
4914 : else
4915 : {
4916 1012 : tree b = make_ssa_name (boolean_type_node);
4917 1012 : g = gimple_build_assign (b, BIT_IOR_EXPR, ovf, this_ovf);
4918 1012 : insert_before (g);
4919 1012 : ovf = b;
4920 : }
4921 : }
4922 : }
4923 2228 : else if (startlimb < fin)
4924 : {
4925 782 : if (m_first && startlimb + 2 < fin)
4926 : {
4927 288 : tree data_out;
4928 288 : ovf = prepare_data_in_out (boolean_false_node, idx, &data_out);
4929 288 : ovf_out = m_data.pop ();
4930 288 : m_data.pop ();
4931 288 : if (!check_zero)
4932 : {
4933 149 : cmp = prepare_data_in_out (cmp, idx, &data_out);
4934 149 : cmp_out = m_data.pop ();
4935 149 : m_data.pop ();
4936 : }
4937 : }
4938 782 : if (i != 0 || startlimb != fin - 1)
4939 : {
4940 767 : tree_code cmp_code;
4941 767 : bool single_comparison
4942 767 : = (startlimb + 2 >= fin || (startlimb & 1) != (i & 1));
4943 : if (!single_comparison)
4944 : cmp_code = GE_EXPR;
4945 479 : else if ((startlimb & 1) == (i & 1))
4946 : cmp_code = EQ_EXPR;
4947 : else
4948 376 : cmp_code = GT_EXPR;
4949 767 : if (bitint_big_endian)
4950 0 : g = gimple_build_cond (swap_tree_comparison (cmp_code),
4951 0 : idx, size_int (nelts - 1
4952 : - startlimb),
4953 : NULL_TREE, NULL_TREE);
4954 : else
4955 767 : g = gimple_build_cond (cmp_code, idx, size_int (startlimb),
4956 : NULL_TREE, NULL_TREE);
4957 767 : edge edge_true_true, edge_true_false, edge_false;
4958 767 : gimple *g2 = NULL;
4959 767 : if (!single_comparison)
4960 288 : g2 = gimple_build_cond (NE_EXPR, idx,
4961 288 : size_int (bitint_big_endian
4962 : ? nelts - 1 - startlimb
4963 : : startlimb),
4964 : NULL_TREE, NULL_TREE);
4965 767 : if_then_if_then_else (g, g2, profile_probability::likely (),
4966 : profile_probability::likely (),
4967 : edge_true_true, edge_true_false,
4968 : edge_false);
4969 767 : unsigned tidx = startlimb + (cmp_code == GT_EXPR);
4970 767 : tree l = arith_overflow_extract_bits (start, end, rhs, tidx,
4971 : check_zero);
4972 767 : tree this_ovf = make_ssa_name (boolean_type_node);
4973 767 : if (cmp_code != GT_EXPR && !check_zero)
4974 : {
4975 153 : g = gimple_build_assign (make_ssa_name (m_limb_type),
4976 : PLUS_EXPR, l,
4977 : build_int_cst (m_limb_type, 1));
4978 153 : insert_before (g);
4979 153 : g = gimple_build_assign (this_ovf, GT_EXPR,
4980 : gimple_assign_lhs (g),
4981 : build_int_cst (m_limb_type, 1));
4982 : }
4983 : else
4984 614 : g = gimple_build_assign (this_ovf, NE_EXPR, l, cmp);
4985 767 : insert_before (g);
4986 767 : if (cmp_code == GT_EXPR)
4987 : {
4988 376 : tree t = make_ssa_name (boolean_type_node);
4989 376 : g = gimple_build_assign (t, BIT_IOR_EXPR, ovf, this_ovf);
4990 376 : insert_before (g);
4991 376 : this_ovf = t;
4992 : }
4993 767 : tree this_ovf2 = NULL_TREE;
4994 767 : if (!single_comparison)
4995 : {
4996 288 : m_gsi = gsi_after_labels (edge_true_true->src);
4997 288 : tree t = make_ssa_name (boolean_type_node);
4998 288 : g = gimple_build_assign (t, NE_EXPR, rhs, cmp);
4999 288 : insert_before (g);
5000 288 : this_ovf2 = make_ssa_name (boolean_type_node);
5001 288 : g = gimple_build_assign (this_ovf2, BIT_IOR_EXPR,
5002 : ovf, t);
5003 288 : insert_before (g);
5004 : }
5005 767 : m_gsi = gsi_after_labels (edge_true_false->dest);
5006 767 : tree t;
5007 767 : if (i == 1 && ovf_out)
5008 : t = ovf_out;
5009 : else
5010 479 : t = make_ssa_name (boolean_type_node);
5011 767 : gphi *phi = create_phi_node (t, edge_true_false->dest);
5012 767 : add_phi_arg (phi, this_ovf, edge_true_false,
5013 : UNKNOWN_LOCATION);
5014 767 : add_phi_arg (phi, ovf ? ovf
5015 : : boolean_false_node, edge_false,
5016 : UNKNOWN_LOCATION);
5017 767 : if (edge_true_true)
5018 288 : add_phi_arg (phi, this_ovf2, edge_true_true,
5019 : UNKNOWN_LOCATION);
5020 767 : ovf = t;
5021 767 : if (!check_zero && cmp_code != GT_EXPR)
5022 : {
5023 153 : t = cmp_out ? cmp_out : make_ssa_name (m_limb_type);
5024 153 : phi = create_phi_node (t, edge_true_false->dest);
5025 153 : add_phi_arg (phi, l, edge_true_false, UNKNOWN_LOCATION);
5026 153 : add_phi_arg (phi, cmp, edge_false, UNKNOWN_LOCATION);
5027 153 : if (edge_true_true)
5028 149 : add_phi_arg (phi, cmp, edge_true_true,
5029 : UNKNOWN_LOCATION);
5030 : cmp = t;
5031 : }
5032 : }
5033 : }
5034 : }
5035 :
5036 8899 : if (var || obj)
5037 : {
5038 8671 : if (tree_fits_uhwi_p (idx)
5039 6561 : && (bitint_big_endian
5040 6561 : ? nelts - 1 - tree_to_uhwi (idx)
5041 6561 : : tree_to_uhwi (idx)) >= prec_limbs)
5042 : ;
5043 7335 : else if (!tree_fits_uhwi_p (idx)
5044 2110 : && (unsigned) prec < (fin - (i == 0)) * limb_prec)
5045 : {
5046 1314 : bool single_comparison
5047 657 : = (((unsigned) prec % limb_prec) == 0
5048 499 : || prec_limbs + 1 >= fin
5049 1087 : || (prec_limbs & 1) == (i & 1));
5050 657 : if (bitint_big_endian)
5051 0 : g = gimple_build_cond (GE_EXPR, idx,
5052 0 : size_int (nelts - prec_limbs),
5053 : NULL_TREE, NULL_TREE);
5054 : else
5055 657 : g = gimple_build_cond (LE_EXPR, idx, size_int (prec_limbs - 1),
5056 : NULL_TREE, NULL_TREE);
5057 657 : gimple *g2 = NULL;
5058 657 : if (!single_comparison)
5059 215 : g2 = gimple_build_cond (EQ_EXPR, idx,
5060 215 : size_int (bitint_big_endian
5061 : ? nelts - prec_limbs
5062 : : prec_limbs - 1),
5063 : NULL_TREE, NULL_TREE);
5064 657 : edge edge_true_true, edge_true_false, edge_false;
5065 657 : if_then_if_then_else (g, g2, profile_probability::likely (),
5066 : profile_probability::unlikely (),
5067 : edge_true_true, edge_true_false,
5068 : edge_false);
5069 657 : tree idxl = idx;
5070 657 : if (bitint_big_endian && prec_limbs != nelts)
5071 : {
5072 0 : HOST_WIDE_INT diff = ((HOST_WIDE_INT) prec_limbs
5073 0 : - (HOST_WIDE_INT) nelts);
5074 0 : if (tree_fits_uhwi_p (idx))
5075 0 : idxl = size_int (tree_to_uhwi (idx) + diff);
5076 : else
5077 : {
5078 0 : idxl = make_ssa_name (sizetype);
5079 0 : g = gimple_build_assign (idxl, PLUS_EXPR, idx,
5080 0 : size_int (diff));
5081 0 : insert_before (g);
5082 : }
5083 : }
5084 946 : tree l = limb_access (type, var ? var : obj, idxl, true);
5085 657 : g = gimple_build_assign (l, rhs);
5086 657 : insert_before (g);
5087 657 : if (!single_comparison)
5088 : {
5089 215 : m_gsi = gsi_after_labels (edge_true_true->src);
5090 215 : tree plm1idx = size_int (bitint_big_endian
5091 : ? 0 : prec_limbs - 1);
5092 215 : tree plm1type = limb_access_type (type, plm1idx);
5093 215 : l = limb_access (type, var ? var : obj, plm1idx, true);
5094 215 : if (!useless_type_conversion_p (plm1type, TREE_TYPE (rhs)))
5095 215 : rhs = add_cast (plm1type, rhs);
5096 215 : if (!useless_type_conversion_p (TREE_TYPE (l),
5097 215 : TREE_TYPE (rhs)))
5098 215 : rhs = add_cast (TREE_TYPE (l), rhs);
5099 215 : g = gimple_build_assign (l, rhs);
5100 215 : insert_before (g);
5101 : }
5102 657 : m_gsi = gsi_after_labels (edge_true_false->dest);
5103 657 : }
5104 : else
5105 : {
5106 6678 : tree idxl = idx;
5107 6678 : if (bitint_big_endian && prec_limbs != nelts)
5108 : {
5109 0 : HOST_WIDE_INT diff = ((HOST_WIDE_INT) prec_limbs
5110 0 : - (HOST_WIDE_INT) nelts);
5111 0 : if (tree_fits_uhwi_p (idx))
5112 0 : idxl = size_int (tree_to_uhwi (idx) + diff);
5113 : else
5114 : {
5115 0 : idxl = make_ssa_name (sizetype);
5116 0 : g = gimple_build_assign (idxl, PLUS_EXPR, idx,
5117 0 : size_int (diff));
5118 0 : insert_before (g);
5119 : }
5120 : }
5121 13327 : tree l = limb_access (type, var ? var : obj, idxl, true);
5122 6678 : if (!useless_type_conversion_p (TREE_TYPE (l), TREE_TYPE (rhs)))
5123 0 : rhs = add_cast (TREE_TYPE (l), rhs);
5124 6678 : g = gimple_build_assign (l, rhs);
5125 6678 : insert_before (g);
5126 : }
5127 : }
5128 8899 : m_first = false;
5129 8899 : if (kind == bitint_prec_huge && i <= 1)
5130 : {
5131 2228 : if (i == 0)
5132 : {
5133 1114 : idx = make_ssa_name (sizetype);
5134 1114 : g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
5135 : bitint_big_endian
5136 0 : ? size_int (-1) : size_one_node);
5137 1114 : insert_before (g);
5138 : }
5139 : else
5140 : {
5141 1114 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
5142 2228 : size_int (bitint_big_endian ? -2 : 2));
5143 1114 : insert_before (g);
5144 1114 : if (bitint_big_endian)
5145 0 : g = gimple_build_cond (NE_EXPR, idx_first,
5146 0 : size_int (nelts + 1 - fin),
5147 : NULL_TREE, NULL_TREE);
5148 : else
5149 1114 : g = gimple_build_cond (NE_EXPR, idx_next, size_int (fin),
5150 : NULL_TREE, NULL_TREE);
5151 1114 : insert_before (g);
5152 1114 : m_gsi = gsi_for_stmt (final_stmt);
5153 1114 : m_bb = NULL;
5154 : }
5155 : }
5156 : }
5157 :
5158 2653 : finish_arith_overflow (var, obj, type, ovf, lhs, orig_obj, stmt,
5159 : prec_limbs, code);
5160 : }
5161 :
5162 : /* Lower a .MUL_OVERFLOW call with at least one large/huge _BitInt
5163 : argument or return type _Complex large/huge _BitInt. */
5164 :
5165 : void
5166 1387 : bitint_large_huge::lower_mul_overflow (tree obj, gimple *stmt)
5167 : {
5168 1387 : tree arg0 = gimple_call_arg (stmt, 0);
5169 1387 : tree arg1 = gimple_call_arg (stmt, 1);
5170 1387 : tree lhs = gimple_call_lhs (stmt);
5171 1387 : if (!lhs)
5172 : {
5173 0 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5174 0 : gsi_remove (&gsi, true);
5175 0 : return;
5176 : }
5177 1387 : gimple *final_stmt = gsi_stmt (m_gsi);
5178 1387 : tree type = TREE_TYPE (lhs);
5179 1387 : if (TREE_CODE (type) == COMPLEX_TYPE)
5180 1378 : type = TREE_TYPE (type);
5181 1387 : int prec = TYPE_PRECISION (type), prec0, prec1;
5182 1387 : arg0 = handle_operand_addr (arg0, stmt, NULL, &prec0);
5183 1387 : arg1 = handle_operand_addr (arg1, stmt, NULL, &prec1);
5184 1387 : int prec2 = ((prec0 < 0 ? -prec0 : prec0)
5185 1387 : + (prec1 < 0 ? -prec1 : prec1));
5186 1387 : if (prec0 == 1 || prec1 == 1)
5187 18 : --prec2;
5188 1387 : tree var = NULL_TREE;
5189 1387 : tree orig_obj = obj;
5190 1387 : bool force_var = false;
5191 1387 : if (obj == NULL_TREE
5192 802 : && TREE_CODE (type) == BITINT_TYPE
5193 796 : && bitint_precision_kind (type) >= bitint_prec_large
5194 774 : && m_names
5195 2149 : && bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
5196 : {
5197 762 : int part = var_to_partition (m_map, lhs);
5198 762 : gcc_assert (m_vars[part] != NULL_TREE);
5199 762 : obj = m_vars[part];
5200 762 : if (TREE_TYPE (lhs) == type)
5201 1 : orig_obj = obj;
5202 : }
5203 625 : else if (obj != NULL_TREE && DECL_P (obj))
5204 : {
5205 1731 : for (int i = 0; i < 2; ++i)
5206 : {
5207 1154 : tree arg = i ? arg1 : arg0;
5208 1154 : if (TREE_CODE (arg) == ADDR_EXPR)
5209 1154 : arg = TREE_OPERAND (arg, 0);
5210 1154 : if (get_base_address (arg) == obj)
5211 : {
5212 : force_var = true;
5213 : break;
5214 : }
5215 : }
5216 : }
5217 1387 : if (obj == NULL_TREE
5218 1387 : || force_var
5219 1347 : || TREE_CODE (type) != BITINT_TYPE
5220 1347 : || bitint_precision_kind (type) < bitint_prec_large
5221 3495 : || prec2 > (CEIL (prec, limb_prec) * limb_prec * (orig_obj ? 1 : 2)))
5222 : {
5223 520 : unsigned HOST_WIDE_INT nelts = CEIL (MAX (prec, prec2), limb_prec);
5224 520 : tree atype = build_array_type_nelts (m_limb_type, nelts);
5225 520 : var = create_tmp_var (atype);
5226 : }
5227 1387 : tree addr = build_fold_addr_expr (var ? var : obj);
5228 1387 : addr = force_gimple_operand_gsi (&m_gsi, addr, true,
5229 : NULL_TREE, true, GSI_SAME_STMT);
5230 1387 : tree sitype = lang_hooks.types.type_for_mode (SImode, 0);
5231 1387 : gimple *g
5232 2774 : = gimple_build_call_internal (IFN_MULBITINT, 6,
5233 : addr, build_int_cst (sitype,
5234 1449 : MAX (prec2, prec)),
5235 1387 : arg0, build_int_cst (sitype, prec0),
5236 1387 : arg1, build_int_cst (sitype, prec1));
5237 1387 : insert_before (g);
5238 :
5239 1387 : unsigned start, end;
5240 1387 : bool check_zero;
5241 1387 : tree ovf = arith_overflow (MULT_EXPR, type, prec, prec0, prec1, prec2,
5242 : &start, &end, &check_zero);
5243 1387 : if (ovf == NULL_TREE)
5244 : {
5245 1387 : unsigned startlimb = start / limb_prec;
5246 1387 : unsigned endlimb = (end - 1) / limb_prec;
5247 1387 : unsigned nelts = CEIL (MAX (prec, prec2), limb_prec);
5248 1387 : unsigned cnt;
5249 1387 : bool use_loop = false;
5250 1387 : if (startlimb == endlimb)
5251 : cnt = 1;
5252 1114 : else if (startlimb + 1 == endlimb)
5253 : cnt = 2;
5254 943 : else if ((end % limb_prec) == 0)
5255 : {
5256 : cnt = 2;
5257 : use_loop = true;
5258 : }
5259 : else
5260 : {
5261 702 : cnt = 3;
5262 702 : use_loop = startlimb + 2 < endlimb;
5263 : }
5264 702 : if (cnt == 1)
5265 : {
5266 480 : tree l = limb_access (NULL_TREE, var ? var : obj,
5267 273 : size_int (bitint_big_endian
5268 : ? nelts - 1 - startlimb
5269 : : startlimb), true);
5270 273 : g = gimple_build_assign (make_ssa_name (m_limb_type), l);
5271 273 : insert_before (g);
5272 273 : l = arith_overflow_extract_bits (start, end, gimple_assign_lhs (g),
5273 : startlimb, check_zero);
5274 273 : ovf = make_ssa_name (boolean_type_node);
5275 273 : if (check_zero)
5276 233 : g = gimple_build_assign (ovf, NE_EXPR, l,
5277 : build_zero_cst (m_limb_type));
5278 : else
5279 : {
5280 40 : g = gimple_build_assign (make_ssa_name (m_limb_type),
5281 : PLUS_EXPR, l,
5282 : build_int_cst (m_limb_type, 1));
5283 40 : insert_before (g);
5284 40 : g = gimple_build_assign (ovf, GT_EXPR, gimple_assign_lhs (g),
5285 : build_int_cst (m_limb_type, 1));
5286 : }
5287 273 : insert_before (g);
5288 : }
5289 : else
5290 : {
5291 1114 : basic_block edge_bb = NULL;
5292 1114 : gimple_stmt_iterator gsi = m_gsi;
5293 1114 : gsi_prev (&gsi);
5294 1114 : edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
5295 1114 : edge_bb = e->src;
5296 1114 : m_gsi = gsi_end_bb (edge_bb);
5297 :
5298 1114 : tree cmp = build_zero_cst (m_limb_type);
5299 4044 : for (unsigned i = 0; i < cnt; i++)
5300 : {
5301 2930 : tree idx, idx_next = NULL_TREE;
5302 2930 : if (i == 0)
5303 1114 : idx = size_int (bitint_big_endian
5304 : ? nelts - 1 - startlimb : startlimb);
5305 1816 : else if (i == 2)
5306 702 : idx = size_int (bitint_big_endian
5307 : ? nelts - 1 - endlimb : endlimb);
5308 1114 : else if (use_loop)
5309 595 : idx = create_loop (size_int (bitint_big_endian
5310 : ? nelts - startlimb - 2
5311 : : startlimb + 1), &idx_next);
5312 : else
5313 519 : idx = size_int (bitint_big_endian
5314 : ? nelts - startlimb - 2 : startlimb + 1);
5315 4673 : tree l = limb_access (NULL_TREE, var ? var : obj, idx, true);
5316 2930 : g = gimple_build_assign (make_ssa_name (m_limb_type), l);
5317 2930 : insert_before (g);
5318 2930 : l = gimple_assign_lhs (g);
5319 2930 : if (i == 0 || i == 2)
5320 2518 : l = arith_overflow_extract_bits (start, end, l,
5321 : i == 0 ? startlimb : endlimb,
5322 : check_zero);
5323 1816 : if (i == 0 && !check_zero)
5324 : {
5325 416 : cmp = l;
5326 416 : g = gimple_build_assign (make_ssa_name (m_limb_type),
5327 : PLUS_EXPR, l,
5328 : build_int_cst (m_limb_type, 1));
5329 416 : insert_before (g);
5330 416 : g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
5331 : build_int_cst (m_limb_type, 1),
5332 : NULL_TREE, NULL_TREE);
5333 : }
5334 : else
5335 2514 : g = gimple_build_cond (NE_EXPR, l, cmp, NULL_TREE, NULL_TREE);
5336 2930 : insert_before (g);
5337 2930 : edge e1 = split_block (gsi_bb (m_gsi), g);
5338 2930 : e1->flags = EDGE_FALSE_VALUE;
5339 2930 : edge e2 = make_edge (e1->src, gimple_bb (final_stmt),
5340 : EDGE_TRUE_VALUE);
5341 2930 : e1->probability = profile_probability::likely ();
5342 2930 : e2->probability = e1->probability.invert ();
5343 2930 : if (i == 0)
5344 1114 : set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
5345 2930 : m_gsi = gsi_after_labels (e1->dest);
5346 2930 : if (i == 1 && use_loop)
5347 : {
5348 595 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
5349 : bitint_big_endian
5350 0 : ? size_int (-1) : size_one_node);
5351 595 : insert_before (g);
5352 595 : if (bitint_big_endian)
5353 0 : g = gimple_build_cond (NE_EXPR, idx,
5354 0 : size_int (nelts - endlimb
5355 : - (cnt == 2)),
5356 : NULL_TREE, NULL_TREE);
5357 : else
5358 595 : g = gimple_build_cond (NE_EXPR, idx_next,
5359 595 : size_int (endlimb + (cnt == 2)),
5360 : NULL_TREE, NULL_TREE);
5361 595 : insert_before (g);
5362 595 : edge true_edge, false_edge;
5363 595 : extract_true_false_edges_from_block (gsi_bb (m_gsi),
5364 : &true_edge,
5365 : &false_edge);
5366 595 : m_gsi = gsi_after_labels (false_edge->dest);
5367 595 : m_bb = NULL;
5368 : }
5369 : }
5370 :
5371 1114 : ovf = make_ssa_name (boolean_type_node);
5372 1114 : basic_block bb = gimple_bb (final_stmt);
5373 1114 : gphi *phi = create_phi_node (ovf, bb);
5374 1114 : edge e1 = find_edge (gsi_bb (m_gsi), bb);
5375 1114 : edge_iterator ei;
5376 5158 : FOR_EACH_EDGE (e, ei, bb->preds)
5377 : {
5378 4044 : tree val = e == e1 ? boolean_false_node : boolean_true_node;
5379 4044 : add_phi_arg (phi, val, e, UNKNOWN_LOCATION);
5380 : }
5381 1114 : m_gsi = gsi_for_stmt (final_stmt);
5382 : }
5383 : }
5384 :
5385 1387 : finish_arith_overflow (var, obj, type, ovf, lhs, orig_obj, stmt,
5386 1387 : CEIL (MAX (prec, prec2), limb_prec), MULT_EXPR);
5387 : }
5388 :
5389 : /* Lower REALPART_EXPR or IMAGPART_EXPR stmt extracting part of result from
5390 : .{ADD,SUB,MUL}_OVERFLOW call. */
5391 :
5392 : void
5393 5720 : bitint_large_huge::lower_cplxpart_stmt (tree obj, gimple *stmt)
5394 : {
5395 5720 : tree rhs1 = gimple_assign_rhs1 (stmt);
5396 5720 : rhs1 = TREE_OPERAND (rhs1, 0);
5397 5720 : if (obj == NULL_TREE)
5398 : {
5399 5717 : int part = var_to_partition (m_map, gimple_assign_lhs (stmt));
5400 5717 : gcc_assert (m_vars[part] != NULL_TREE);
5401 : obj = m_vars[part];
5402 : }
5403 5720 : if (TREE_CODE (rhs1) == SSA_NAME
5404 5720 : && (m_names == NULL
5405 5719 : || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
5406 : {
5407 1541 : lower_call (obj, SSA_NAME_DEF_STMT (rhs1));
5408 1541 : return;
5409 : }
5410 4179 : int part = var_to_partition (m_map, rhs1);
5411 4179 : gcc_assert (m_vars[part] != NULL_TREE);
5412 4179 : tree var = m_vars[part];
5413 4179 : unsigned HOST_WIDE_INT nelts
5414 4179 : = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (obj))) / limb_prec;
5415 4179 : tree atype = build_array_type_nelts (m_limb_type, nelts);
5416 4179 : if (!useless_type_conversion_p (atype, TREE_TYPE (obj)))
5417 0 : obj = build1 (VIEW_CONVERT_EXPR, atype, obj);
5418 4179 : tree off = build_int_cst (build_pointer_type (TREE_TYPE (var)),
5419 4179 : gimple_assign_rhs_code (stmt) == REALPART_EXPR
5420 4179 : ? 0 : nelts * m_limb_size);
5421 4179 : tree v2 = build2 (MEM_REF, atype, build_fold_addr_expr (var), off);
5422 4179 : gimple *g = gimple_build_assign (obj, v2);
5423 4179 : insert_before (g);
5424 : }
5425 :
5426 : /* Lower COMPLEX_EXPR stmt. */
5427 :
5428 : void
5429 18 : bitint_large_huge::lower_complexexpr_stmt (gimple *stmt)
5430 : {
5431 18 : tree lhs = gimple_assign_lhs (stmt);
5432 18 : tree rhs1 = gimple_assign_rhs1 (stmt);
5433 18 : tree rhs2 = gimple_assign_rhs2 (stmt);
5434 18 : int part = var_to_partition (m_map, lhs);
5435 18 : gcc_assert (m_vars[part] != NULL_TREE);
5436 18 : lhs = m_vars[part];
5437 18 : unsigned HOST_WIDE_INT nelts
5438 18 : = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (rhs1))) / limb_prec;
5439 18 : tree atype = build_array_type_nelts (m_limb_type, nelts);
5440 18 : tree zero = build_zero_cst (build_pointer_type (TREE_TYPE (lhs)));
5441 18 : tree v1 = build2 (MEM_REF, atype, build_fold_addr_expr (lhs), zero);
5442 18 : tree v2;
5443 18 : if (TREE_CODE (rhs1) == SSA_NAME)
5444 : {
5445 18 : part = var_to_partition (m_map, rhs1);
5446 18 : gcc_assert (m_vars[part] != NULL_TREE);
5447 : v2 = m_vars[part];
5448 : }
5449 0 : else if (integer_zerop (rhs1))
5450 0 : v2 = build_zero_cst (atype);
5451 : else
5452 0 : v2 = tree_output_constant_def (rhs1);
5453 18 : if (!useless_type_conversion_p (atype, TREE_TYPE (v2)))
5454 18 : v2 = build1 (VIEW_CONVERT_EXPR, atype, v2);
5455 18 : gimple *g = gimple_build_assign (v1, v2);
5456 18 : insert_before (g);
5457 18 : tree off = fold_convert (build_pointer_type (TREE_TYPE (lhs)),
5458 : TYPE_SIZE_UNIT (atype));
5459 18 : v1 = build2 (MEM_REF, atype, build_fold_addr_expr (lhs), off);
5460 18 : if (TREE_CODE (rhs2) == SSA_NAME)
5461 : {
5462 0 : part = var_to_partition (m_map, rhs2);
5463 0 : gcc_assert (m_vars[part] != NULL_TREE);
5464 : v2 = m_vars[part];
5465 : }
5466 18 : else if (integer_zerop (rhs2))
5467 18 : v2 = build_zero_cst (atype);
5468 : else
5469 0 : v2 = tree_output_constant_def (rhs2);
5470 18 : if (!useless_type_conversion_p (atype, TREE_TYPE (v2)))
5471 0 : v2 = build1 (VIEW_CONVERT_EXPR, atype, v2);
5472 18 : g = gimple_build_assign (v1, v2);
5473 18 : insert_before (g);
5474 18 : }
5475 :
5476 : /* Lower a .{CLZ,CTZ,CLRSB,FFS,PARITY,POPCOUNT} call with one large/huge _BitInt
5477 : argument. */
5478 :
5479 : void
5480 91 : bitint_large_huge::lower_bit_query (gimple *stmt)
5481 : {
5482 91 : tree arg0 = gimple_call_arg (stmt, 0);
5483 91 : tree arg1 = (gimple_call_num_args (stmt) == 2
5484 91 : ? gimple_call_arg (stmt, 1) : NULL_TREE);
5485 91 : tree lhs = gimple_call_lhs (stmt);
5486 91 : gimple *g;
5487 :
5488 91 : if (!lhs)
5489 : {
5490 0 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5491 0 : gsi_remove (&gsi, true);
5492 0 : return;
5493 : }
5494 91 : tree type = TREE_TYPE (arg0);
5495 91 : gcc_assert (TREE_CODE (type) == BITINT_TYPE);
5496 91 : bitint_prec_kind kind = bitint_precision_kind (type);
5497 91 : gcc_assert (kind >= bitint_prec_large);
5498 91 : enum internal_fn ifn = gimple_call_internal_fn (stmt);
5499 91 : enum built_in_function fcode = END_BUILTINS;
5500 91 : gcc_assert (TYPE_PRECISION (unsigned_type_node) == limb_prec
5501 : || TYPE_PRECISION (long_unsigned_type_node) == limb_prec
5502 : || TYPE_PRECISION (long_long_unsigned_type_node) == limb_prec);
5503 91 : switch (ifn)
5504 : {
5505 25 : case IFN_CLZ:
5506 25 : if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
5507 : fcode = BUILT_IN_CLZ;
5508 25 : else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
5509 : fcode = BUILT_IN_CLZL;
5510 : else
5511 0 : fcode = BUILT_IN_CLZLL;
5512 : break;
5513 10 : case IFN_FFS:
5514 : /* .FFS (X) is .CTZ (X, -1) + 1, though under the hood
5515 : we don't add the addend at the end. */
5516 10 : arg1 = integer_zero_node;
5517 : /* FALLTHRU */
5518 37 : case IFN_CTZ:
5519 37 : if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
5520 : fcode = BUILT_IN_CTZ;
5521 37 : else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
5522 : fcode = BUILT_IN_CTZL;
5523 : else
5524 0 : fcode = BUILT_IN_CTZLL;
5525 37 : m_upwards = true;
5526 37 : break;
5527 8 : case IFN_CLRSB:
5528 8 : if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
5529 : fcode = BUILT_IN_CLRSB;
5530 8 : else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
5531 : fcode = BUILT_IN_CLRSBL;
5532 : else
5533 0 : fcode = BUILT_IN_CLRSBLL;
5534 : break;
5535 11 : case IFN_PARITY:
5536 11 : if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
5537 : fcode = BUILT_IN_PARITY;
5538 11 : else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
5539 : fcode = BUILT_IN_PARITYL;
5540 : else
5541 0 : fcode = BUILT_IN_PARITYLL;
5542 11 : m_upwards = true;
5543 11 : break;
5544 10 : case IFN_POPCOUNT:
5545 10 : if (TYPE_PRECISION (unsigned_type_node) == limb_prec)
5546 : fcode = BUILT_IN_POPCOUNT;
5547 10 : else if (TYPE_PRECISION (long_unsigned_type_node) == limb_prec)
5548 : fcode = BUILT_IN_POPCOUNTL;
5549 : else
5550 0 : fcode = BUILT_IN_POPCOUNTLL;
5551 10 : m_upwards = true;
5552 10 : break;
5553 0 : default:
5554 0 : gcc_unreachable ();
5555 : }
5556 91 : tree fndecl = builtin_decl_explicit (fcode), res = NULL_TREE;
5557 91 : unsigned cnt = 0, rem = 0, end = 0, prec = TYPE_PRECISION (type);
5558 91 : unsigned nelts = CEIL (prec, limb_prec);
5559 91 : struct bq_details { edge e; tree val, addend; } *bqp = NULL;
5560 91 : basic_block edge_bb = NULL;
5561 91 : if (m_upwards)
5562 : {
5563 58 : tree idx = NULL_TREE, idx_first = NULL_TREE, idx_next = NULL_TREE;
5564 58 : if (kind == bitint_prec_large)
5565 : cnt = nelts;
5566 : else
5567 : {
5568 32 : rem = (prec % (2 * limb_prec));
5569 32 : end = (prec - rem) / limb_prec;
5570 32 : cnt = 2 + CEIL (rem, limb_prec);
5571 32 : idx = idx_first = create_loop (bitint_big_endian
5572 32 : ? size_int (nelts - 1)
5573 : : size_zero_node, &idx_next);
5574 : }
5575 :
5576 58 : if (ifn == IFN_CTZ || ifn == IFN_FFS)
5577 : {
5578 37 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5579 37 : gsi_prev (&gsi);
5580 37 : edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
5581 37 : edge_bb = e->src;
5582 37 : if (kind == bitint_prec_large)
5583 32 : m_gsi = gsi_end_bb (edge_bb);
5584 37 : bqp = XALLOCAVEC (struct bq_details, cnt);
5585 : }
5586 : else
5587 21 : m_after_stmt = stmt;
5588 58 : if (kind != bitint_prec_large)
5589 32 : m_upwards_2limb = end;
5590 :
5591 214 : for (unsigned i = 0; i < cnt; i++)
5592 : {
5593 156 : m_data_cnt = 0;
5594 156 : if (kind == bitint_prec_large)
5595 80 : idx = size_int (bitint_big_endian ? nelts - 1 - i : i);
5596 76 : else if (i >= 2)
5597 12 : idx = size_int (bitint_big_endian
5598 : ? nelts - 1 - end - (i > 2) : end + (i > 2));
5599 :
5600 156 : tree rhs1 = handle_operand (arg0, idx);
5601 156 : if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
5602 : {
5603 26 : if (!TYPE_UNSIGNED (TREE_TYPE (rhs1)))
5604 4 : rhs1 = add_cast (unsigned_type_for (TREE_TYPE (rhs1)), rhs1);
5605 26 : rhs1 = add_cast (m_limb_type, rhs1);
5606 : }
5607 :
5608 156 : tree in, out, tem;
5609 156 : if (ifn == IFN_PARITY)
5610 30 : in = prepare_data_in_out (build_zero_cst (m_limb_type), idx, &out);
5611 126 : else if (ifn == IFN_FFS)
5612 26 : in = prepare_data_in_out (integer_one_node, idx, &out);
5613 : else
5614 100 : in = prepare_data_in_out (integer_zero_node, idx, &out);
5615 :
5616 156 : switch (ifn)
5617 : {
5618 98 : case IFN_CTZ:
5619 98 : case IFN_FFS:
5620 98 : g = gimple_build_cond (NE_EXPR, rhs1,
5621 : build_zero_cst (m_limb_type),
5622 : NULL_TREE, NULL_TREE);
5623 98 : insert_before (g);
5624 98 : edge e1, e2;
5625 98 : e1 = split_block (gsi_bb (m_gsi), g);
5626 98 : e1->flags = EDGE_FALSE_VALUE;
5627 98 : e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
5628 98 : e1->probability = profile_probability::unlikely ();
5629 98 : e2->probability = e1->probability.invert ();
5630 98 : if (i == 0)
5631 37 : set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
5632 98 : m_gsi = gsi_after_labels (e1->dest);
5633 98 : bqp[i].e = e2;
5634 98 : bqp[i].val = rhs1;
5635 98 : if (tree_fits_uhwi_p (idx))
5636 56 : bqp[i].addend
5637 56 : = build_int_cst (integer_type_node,
5638 : (bitint_big_endian
5639 56 : ? nelts - 1 - tree_to_uhwi (idx)
5640 56 : : tree_to_uhwi (idx)) * limb_prec
5641 56 : + (ifn == IFN_FFS));
5642 : else
5643 : {
5644 42 : bqp[i].addend = in;
5645 42 : if (i == 1)
5646 21 : res = out;
5647 : else
5648 21 : res = make_ssa_name (integer_type_node);
5649 42 : g = gimple_build_assign (res, PLUS_EXPR, in,
5650 : build_int_cst (integer_type_node,
5651 42 : limb_prec));
5652 42 : insert_before (g);
5653 42 : m_data[m_data_cnt] = res;
5654 : }
5655 : break;
5656 30 : case IFN_PARITY:
5657 30 : if (!integer_zerop (in))
5658 : {
5659 25 : if (kind == bitint_prec_huge && i == 1)
5660 6 : res = out;
5661 : else
5662 19 : res = make_ssa_name (m_limb_type);
5663 25 : g = gimple_build_assign (res, BIT_XOR_EXPR, in, rhs1);
5664 25 : insert_before (g);
5665 : }
5666 : else
5667 : res = rhs1;
5668 30 : m_data[m_data_cnt] = res;
5669 30 : break;
5670 28 : case IFN_POPCOUNT:
5671 28 : g = gimple_build_call (fndecl, 1, rhs1);
5672 28 : tem = make_ssa_name (integer_type_node);
5673 28 : gimple_call_set_lhs (g, tem);
5674 28 : insert_before (g);
5675 28 : if (!integer_zerop (in))
5676 : {
5677 23 : if (kind == bitint_prec_huge && i == 1)
5678 5 : res = out;
5679 : else
5680 18 : res = make_ssa_name (integer_type_node);
5681 23 : g = gimple_build_assign (res, PLUS_EXPR, in, tem);
5682 23 : insert_before (g);
5683 : }
5684 : else
5685 : res = tem;
5686 28 : m_data[m_data_cnt] = res;
5687 28 : break;
5688 0 : default:
5689 0 : gcc_unreachable ();
5690 : }
5691 :
5692 156 : m_first = false;
5693 156 : if (kind == bitint_prec_huge && i <= 1)
5694 : {
5695 64 : if (i == 0)
5696 : {
5697 32 : idx = make_ssa_name (sizetype);
5698 32 : g = gimple_build_assign (idx, PLUS_EXPR, idx_first,
5699 : bitint_big_endian
5700 0 : ? size_int (-1) : size_one_node);
5701 32 : insert_before (g);
5702 : }
5703 : else
5704 : {
5705 32 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx_first,
5706 64 : size_int (bitint_big_endian
5707 : ? -2 : 2));
5708 32 : insert_before (g);
5709 32 : if (bitint_big_endian)
5710 0 : g = gimple_build_cond (NE_EXPR, idx_first,
5711 0 : size_int (cnt - 1),
5712 : NULL_TREE, NULL_TREE);
5713 : else
5714 32 : g = gimple_build_cond (NE_EXPR, idx_next, size_int (end),
5715 : NULL_TREE, NULL_TREE);
5716 32 : insert_before (g);
5717 32 : if (ifn == IFN_CTZ || ifn == IFN_FFS)
5718 21 : m_gsi = gsi_after_labels (edge_bb);
5719 : else
5720 11 : m_gsi = gsi_for_stmt (stmt);
5721 32 : m_bb = NULL;
5722 : }
5723 : }
5724 : }
5725 : }
5726 : else
5727 : {
5728 33 : tree idx = NULL_TREE, idx_next = NULL_TREE, first = NULL_TREE;
5729 33 : int sub_one = 0;
5730 33 : if (kind == bitint_prec_large)
5731 : cnt = nelts;
5732 : else
5733 : {
5734 16 : rem = prec % limb_prec;
5735 16 : if (rem == 0 && (!TYPE_UNSIGNED (type) || ifn == IFN_CLRSB))
5736 : rem = limb_prec;
5737 16 : end = (prec - rem) / limb_prec;
5738 16 : cnt = 1 + (rem != 0);
5739 16 : if (ifn == IFN_CLRSB)
5740 4 : sub_one = 1;
5741 : }
5742 :
5743 33 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5744 33 : gsi_prev (&gsi);
5745 33 : edge e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
5746 33 : edge_bb = e->src;
5747 33 : m_gsi = gsi_end_bb (edge_bb);
5748 :
5749 33 : if (ifn == IFN_CLZ)
5750 25 : bqp = XALLOCAVEC (struct bq_details, cnt);
5751 : else
5752 : {
5753 8 : gsi = gsi_for_stmt (stmt);
5754 8 : gsi_prev (&gsi);
5755 8 : e = split_block (gsi_bb (gsi), gsi_stmt (gsi));
5756 8 : edge_bb = e->src;
5757 8 : bqp = XALLOCAVEC (struct bq_details, 2 * cnt);
5758 : }
5759 :
5760 110 : for (unsigned i = 0; i < cnt; i++)
5761 : {
5762 77 : m_data_cnt = 0;
5763 77 : if (kind == bitint_prec_large)
5764 51 : idx = size_int (bitint_big_endian ? i : cnt - i - 1);
5765 26 : else if (i == cnt - 1)
5766 16 : idx = create_loop (size_int (bitint_big_endian ? i : end - 1),
5767 : &idx_next);
5768 : else
5769 10 : idx = bitint_big_endian ? size_zero_node : size_int (end);
5770 :
5771 77 : tree rhs1 = handle_operand (arg0, idx);
5772 77 : if (!useless_type_conversion_p (m_limb_type, TREE_TYPE (rhs1)))
5773 : {
5774 17 : if (ifn == IFN_CLZ && !TYPE_UNSIGNED (TREE_TYPE (rhs1)))
5775 0 : rhs1 = add_cast (unsigned_type_for (TREE_TYPE (rhs1)), rhs1);
5776 17 : else if (ifn == IFN_CLRSB && TYPE_UNSIGNED (TREE_TYPE (rhs1)))
5777 0 : rhs1 = add_cast (signed_type_for (TREE_TYPE (rhs1)), rhs1);
5778 17 : rhs1 = add_cast (m_limb_type, rhs1);
5779 : }
5780 :
5781 77 : if (ifn == IFN_CLZ)
5782 : {
5783 57 : g = gimple_build_cond (NE_EXPR, rhs1,
5784 : build_zero_cst (m_limb_type),
5785 : NULL_TREE, NULL_TREE);
5786 57 : insert_before (g);
5787 57 : edge e1 = split_block (gsi_bb (m_gsi), g);
5788 57 : e1->flags = EDGE_FALSE_VALUE;
5789 57 : edge e2 = make_edge (e1->src, gimple_bb (stmt), EDGE_TRUE_VALUE);
5790 57 : e1->probability = profile_probability::unlikely ();
5791 57 : e2->probability = e1->probability.invert ();
5792 57 : if (i == 0)
5793 25 : set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
5794 57 : m_gsi = gsi_after_labels (e1->dest);
5795 57 : bqp[i].e = e2;
5796 57 : bqp[i].val = rhs1;
5797 : }
5798 : else
5799 : {
5800 20 : if (i == 0)
5801 : {
5802 8 : first = rhs1;
5803 8 : g = gimple_build_assign (make_ssa_name (m_limb_type),
5804 : PLUS_EXPR, rhs1,
5805 : build_int_cst (m_limb_type, 1));
5806 8 : insert_before (g);
5807 8 : g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
5808 : build_int_cst (m_limb_type, 1),
5809 : NULL_TREE, NULL_TREE);
5810 8 : insert_before (g);
5811 : }
5812 : else
5813 : {
5814 12 : g = gimple_build_assign (make_ssa_name (m_limb_type),
5815 : BIT_XOR_EXPR, rhs1, first);
5816 12 : insert_before (g);
5817 12 : tree stype = signed_type_for (m_limb_type);
5818 12 : g = gimple_build_cond (LT_EXPR,
5819 : add_cast (stype,
5820 : gimple_assign_lhs (g)),
5821 : build_zero_cst (stype),
5822 : NULL_TREE, NULL_TREE);
5823 12 : insert_before (g);
5824 12 : edge e1 = split_block (gsi_bb (m_gsi), g);
5825 12 : e1->flags = EDGE_FALSE_VALUE;
5826 12 : edge e2 = make_edge (e1->src, gimple_bb (stmt),
5827 : EDGE_TRUE_VALUE);
5828 12 : e1->probability = profile_probability::unlikely ();
5829 12 : e2->probability = e1->probability.invert ();
5830 12 : if (i == 1)
5831 8 : set_immediate_dominator (CDI_DOMINATORS, e2->dest,
5832 : e2->src);
5833 12 : m_gsi = gsi_after_labels (e1->dest);
5834 12 : bqp[2 * i].e = e2;
5835 12 : g = gimple_build_cond (NE_EXPR, rhs1, first,
5836 : NULL_TREE, NULL_TREE);
5837 12 : insert_before (g);
5838 : }
5839 20 : edge e1 = split_block (gsi_bb (m_gsi), g);
5840 20 : e1->flags = EDGE_FALSE_VALUE;
5841 20 : edge e2 = make_edge (e1->src, edge_bb, EDGE_TRUE_VALUE);
5842 20 : e1->probability = profile_probability::unlikely ();
5843 20 : e2->probability = e1->probability.invert ();
5844 20 : if (i == 0)
5845 8 : set_immediate_dominator (CDI_DOMINATORS, e2->dest, e2->src);
5846 20 : m_gsi = gsi_after_labels (e1->dest);
5847 20 : bqp[2 * i + 1].e = e2;
5848 20 : bqp[i].val = rhs1;
5849 : }
5850 77 : if (tree_fits_uhwi_p (idx))
5851 122 : bqp[i].addend
5852 61 : = build_int_cst (integer_type_node,
5853 61 : (int) prec
5854 61 : - (((int) (bitint_big_endian
5855 0 : ? nelts - 1 - tree_to_uhwi (idx)
5856 61 : : tree_to_uhwi (idx)) + 1)
5857 61 : * limb_prec) - sub_one);
5858 : else
5859 : {
5860 16 : tree in, out;
5861 16 : in = build_int_cst (integer_type_node, rem - sub_one);
5862 16 : m_first = true;
5863 16 : in = prepare_data_in_out (in, idx, &out);
5864 16 : out = m_data[m_data_cnt + 1];
5865 16 : bqp[i].addend = in;
5866 16 : g = gimple_build_assign (out, PLUS_EXPR, in,
5867 : build_int_cst (integer_type_node,
5868 16 : limb_prec));
5869 16 : insert_before (g);
5870 16 : m_data[m_data_cnt] = out;
5871 : }
5872 :
5873 77 : m_first = false;
5874 77 : if (kind == bitint_prec_huge && i == cnt - 1)
5875 : {
5876 32 : g = gimple_build_assign (idx_next, PLUS_EXPR, idx,
5877 : bitint_big_endian
5878 16 : ? size_one_node : size_int (-1));
5879 16 : insert_before (g);
5880 16 : g = gimple_build_cond (NE_EXPR, idx,
5881 : bitint_big_endian
5882 0 : ? size_int (nelts - 1) : size_zero_node,
5883 : NULL_TREE, NULL_TREE);
5884 16 : insert_before (g);
5885 16 : edge true_edge, false_edge;
5886 16 : extract_true_false_edges_from_block (gsi_bb (m_gsi),
5887 : &true_edge, &false_edge);
5888 16 : m_gsi = gsi_after_labels (false_edge->dest);
5889 16 : m_bb = NULL;
5890 : }
5891 : }
5892 : }
5893 91 : switch (ifn)
5894 : {
5895 62 : case IFN_CLZ:
5896 62 : case IFN_CTZ:
5897 62 : case IFN_FFS:
5898 62 : gphi *phi1, *phi2, *phi3;
5899 62 : basic_block bb;
5900 62 : bb = gsi_bb (m_gsi);
5901 62 : remove_edge (find_edge (bb, gimple_bb (stmt)));
5902 62 : phi1 = create_phi_node (make_ssa_name (m_limb_type),
5903 : gimple_bb (stmt));
5904 62 : phi2 = create_phi_node (make_ssa_name (integer_type_node),
5905 : gimple_bb (stmt));
5906 217 : for (unsigned i = 0; i < cnt; i++)
5907 : {
5908 155 : add_phi_arg (phi1, bqp[i].val, bqp[i].e, UNKNOWN_LOCATION);
5909 155 : add_phi_arg (phi2, bqp[i].addend, bqp[i].e, UNKNOWN_LOCATION);
5910 : }
5911 62 : if (arg1 == NULL_TREE)
5912 : {
5913 35 : g = gimple_build_builtin_unreachable (m_loc);
5914 35 : insert_before (g);
5915 : }
5916 62 : m_gsi = gsi_for_stmt (stmt);
5917 62 : g = gimple_build_call (fndecl, 1, gimple_phi_result (phi1));
5918 62 : gimple_call_set_lhs (g, make_ssa_name (integer_type_node));
5919 62 : insert_before (g);
5920 62 : if (arg1 == NULL_TREE)
5921 35 : g = gimple_build_assign (lhs, PLUS_EXPR,
5922 : gimple_phi_result (phi2),
5923 : gimple_call_lhs (g));
5924 : else
5925 : {
5926 27 : g = gimple_build_assign (make_ssa_name (integer_type_node),
5927 : PLUS_EXPR, gimple_phi_result (phi2),
5928 : gimple_call_lhs (g));
5929 27 : insert_before (g);
5930 27 : edge e1 = split_block (gimple_bb (stmt), g);
5931 27 : edge e2 = make_edge (bb, e1->dest, EDGE_FALLTHRU);
5932 27 : e2->probability = profile_probability::always ();
5933 27 : set_immediate_dominator (CDI_DOMINATORS, e1->dest,
5934 : get_immediate_dominator (CDI_DOMINATORS,
5935 : e1->src));
5936 27 : phi3 = create_phi_node (make_ssa_name (integer_type_node), e1->dest);
5937 27 : add_phi_arg (phi3, gimple_assign_lhs (g), e1, UNKNOWN_LOCATION);
5938 27 : add_phi_arg (phi3, arg1, e2, UNKNOWN_LOCATION);
5939 27 : m_gsi = gsi_for_stmt (stmt);
5940 27 : g = gimple_build_assign (lhs, gimple_phi_result (phi3));
5941 : }
5942 62 : gsi_replace (&m_gsi, g, true);
5943 62 : break;
5944 8 : case IFN_CLRSB:
5945 8 : bb = gsi_bb (m_gsi);
5946 8 : remove_edge (find_edge (bb, edge_bb));
5947 8 : edge e;
5948 8 : e = make_edge (bb, gimple_bb (stmt), EDGE_FALLTHRU);
5949 8 : e->probability = profile_probability::always ();
5950 8 : set_immediate_dominator (CDI_DOMINATORS, gimple_bb (stmt),
5951 : get_immediate_dominator (CDI_DOMINATORS,
5952 : edge_bb));
5953 8 : phi1 = create_phi_node (make_ssa_name (m_limb_type),
5954 : edge_bb);
5955 8 : phi2 = create_phi_node (make_ssa_name (integer_type_node),
5956 : edge_bb);
5957 8 : phi3 = create_phi_node (make_ssa_name (integer_type_node),
5958 : gimple_bb (stmt));
5959 28 : for (unsigned i = 0; i < cnt; i++)
5960 : {
5961 20 : add_phi_arg (phi1, bqp[i].val, bqp[2 * i + 1].e, UNKNOWN_LOCATION);
5962 20 : add_phi_arg (phi2, bqp[i].addend, bqp[2 * i + 1].e,
5963 : UNKNOWN_LOCATION);
5964 20 : tree a = bqp[i].addend;
5965 20 : if (i && kind == bitint_prec_large)
5966 8 : a = int_const_binop (PLUS_EXPR, a, integer_minus_one_node);
5967 20 : if (i)
5968 12 : add_phi_arg (phi3, a, bqp[2 * i].e, UNKNOWN_LOCATION);
5969 : }
5970 8 : add_phi_arg (phi3, build_int_cst (integer_type_node, prec - 1), e,
5971 : UNKNOWN_LOCATION);
5972 8 : m_gsi = gsi_after_labels (edge_bb);
5973 8 : g = gimple_build_call (fndecl, 1,
5974 : add_cast (signed_type_for (m_limb_type),
5975 : gimple_phi_result (phi1)));
5976 8 : gimple_call_set_lhs (g, make_ssa_name (integer_type_node));
5977 8 : insert_before (g);
5978 8 : g = gimple_build_assign (make_ssa_name (integer_type_node),
5979 : PLUS_EXPR, gimple_call_lhs (g),
5980 : gimple_phi_result (phi2));
5981 8 : insert_before (g);
5982 8 : if (kind != bitint_prec_large)
5983 : {
5984 4 : g = gimple_build_assign (make_ssa_name (integer_type_node),
5985 : PLUS_EXPR, gimple_assign_lhs (g),
5986 : integer_one_node);
5987 4 : insert_before (g);
5988 : }
5989 8 : add_phi_arg (phi3, gimple_assign_lhs (g),
5990 : find_edge (edge_bb, gimple_bb (stmt)), UNKNOWN_LOCATION);
5991 8 : m_gsi = gsi_for_stmt (stmt);
5992 8 : g = gimple_build_assign (lhs, gimple_phi_result (phi3));
5993 8 : gsi_replace (&m_gsi, g, true);
5994 8 : break;
5995 11 : case IFN_PARITY:
5996 11 : g = gimple_build_call (fndecl, 1, res);
5997 11 : gimple_call_set_lhs (g, lhs);
5998 11 : gsi_replace (&m_gsi, g, true);
5999 11 : break;
6000 10 : case IFN_POPCOUNT:
6001 10 : g = gimple_build_assign (lhs, res);
6002 10 : gsi_replace (&m_gsi, g, true);
6003 10 : break;
6004 0 : default:
6005 0 : gcc_unreachable ();
6006 : }
6007 : }
6008 :
6009 : /* Lower a call statement with one or more large/huge _BitInt
6010 : arguments or large/huge _BitInt return value. */
6011 :
6012 : void
6013 8604 : bitint_large_huge::lower_call (tree obj, gimple *stmt)
6014 : {
6015 8604 : gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
6016 8604 : unsigned int nargs = gimple_call_num_args (stmt);
6017 8604 : if (gimple_call_internal_p (stmt))
6018 4131 : switch (gimple_call_internal_fn (stmt))
6019 : {
6020 2653 : case IFN_ADD_OVERFLOW:
6021 2653 : case IFN_SUB_OVERFLOW:
6022 2653 : case IFN_UBSAN_CHECK_ADD:
6023 2653 : case IFN_UBSAN_CHECK_SUB:
6024 2653 : lower_addsub_overflow (obj, stmt);
6025 6784 : return;
6026 1387 : case IFN_MUL_OVERFLOW:
6027 1387 : case IFN_UBSAN_CHECK_MUL:
6028 1387 : lower_mul_overflow (obj, stmt);
6029 1387 : return;
6030 91 : case IFN_CLZ:
6031 91 : case IFN_CTZ:
6032 91 : case IFN_CLRSB:
6033 91 : case IFN_FFS:
6034 91 : case IFN_PARITY:
6035 91 : case IFN_POPCOUNT:
6036 91 : lower_bit_query (stmt);
6037 91 : return;
6038 : default:
6039 : break;
6040 : }
6041 4473 : bool returns_twice = (gimple_call_flags (stmt) & ECF_RETURNS_TWICE) != 0;
6042 10060 : for (unsigned int i = 0; i < nargs; ++i)
6043 : {
6044 5587 : tree arg = gimple_call_arg (stmt, i);
6045 8839 : if (TREE_CODE (arg) != SSA_NAME
6046 2432 : || TREE_CODE (TREE_TYPE (arg)) != BITINT_TYPE
6047 7957 : || bitint_precision_kind (TREE_TYPE (arg)) <= bitint_prec_middle)
6048 3252 : continue;
6049 2335 : if (SSA_NAME_IS_DEFAULT_DEF (arg)
6050 2335 : && (!SSA_NAME_VAR (arg) || VAR_P (SSA_NAME_VAR (arg))))
6051 : {
6052 1 : tree var = create_tmp_reg (TREE_TYPE (arg));
6053 1 : arg = get_or_create_ssa_default_def (cfun, var);
6054 : }
6055 : else
6056 : {
6057 2334 : int p = var_to_partition (m_map, arg);
6058 2334 : tree v = m_vars[p];
6059 2334 : gcc_assert (v != NULL_TREE);
6060 2334 : if (!types_compatible_p (TREE_TYPE (arg), TREE_TYPE (v)))
6061 2314 : v = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (arg), v);
6062 2334 : arg = make_ssa_name (TREE_TYPE (arg));
6063 2334 : gimple *g = gimple_build_assign (arg, v);
6064 2334 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
6065 2334 : if (returns_twice && bb_has_abnormal_pred (gimple_bb (stmt)))
6066 : {
6067 11 : m_returns_twice_calls.safe_push (stmt);
6068 11 : returns_twice = false;
6069 : }
6070 : }
6071 2335 : gimple_call_set_arg (stmt, i, arg);
6072 2335 : if (m_preserved == NULL)
6073 404 : m_preserved = BITMAP_ALLOC (NULL);
6074 2335 : bitmap_set_bit (m_preserved, SSA_NAME_VERSION (arg));
6075 : }
6076 4473 : tree lhs = gimple_call_lhs (stmt);
6077 4473 : if (lhs
6078 4342 : && TREE_CODE (lhs) == SSA_NAME
6079 4342 : && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
6080 8745 : && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large)
6081 : {
6082 4246 : int p = var_to_partition (m_map, lhs);
6083 4246 : tree v = m_vars[p];
6084 4246 : gcc_assert (v != NULL_TREE);
6085 4246 : if (!types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (v)))
6086 4246 : v = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), v);
6087 4246 : gimple_call_set_lhs (stmt, v);
6088 4246 : SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
6089 : }
6090 4473 : update_stmt (stmt);
6091 : }
6092 :
6093 : /* Lower __asm STMT which involves large/huge _BitInt values. */
6094 :
6095 : void
6096 3 : bitint_large_huge::lower_asm (gimple *stmt)
6097 : {
6098 3 : gasm *g = as_a <gasm *> (stmt);
6099 3 : unsigned noutputs = gimple_asm_noutputs (g);
6100 3 : unsigned ninputs = gimple_asm_ninputs (g);
6101 :
6102 5 : for (unsigned i = 0; i < noutputs; ++i)
6103 : {
6104 2 : tree t = gimple_asm_output_op (g, i);
6105 2 : tree s = TREE_VALUE (t);
6106 2 : if (TREE_CODE (s) == SSA_NAME
6107 1 : && TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
6108 3 : && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
6109 : {
6110 1 : int part = var_to_partition (m_map, s);
6111 1 : gcc_assert (m_vars[part] != NULL_TREE);
6112 1 : TREE_VALUE (t) = m_vars[part];
6113 : }
6114 : }
6115 8 : for (unsigned i = 0; i < ninputs; ++i)
6116 : {
6117 5 : tree t = gimple_asm_input_op (g, i);
6118 5 : tree s = TREE_VALUE (t);
6119 5 : if (TREE_CODE (s) == SSA_NAME
6120 4 : && TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
6121 9 : && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
6122 : {
6123 4 : if (SSA_NAME_IS_DEFAULT_DEF (s)
6124 4 : && (!SSA_NAME_VAR (s) || VAR_P (SSA_NAME_VAR (s))))
6125 : {
6126 1 : TREE_VALUE (t) = create_tmp_var (TREE_TYPE (s), "bitint");
6127 1 : mark_addressable (TREE_VALUE (t));
6128 : }
6129 : else
6130 : {
6131 3 : int part = var_to_partition (m_map, s);
6132 3 : gcc_assert (m_vars[part] != NULL_TREE);
6133 3 : TREE_VALUE (t) = m_vars[part];
6134 : }
6135 : }
6136 : }
6137 3 : update_stmt (stmt);
6138 3 : }
6139 :
6140 : /* Lower statement STMT which involves large/huge _BitInt values
6141 : into code accessing individual limbs. */
6142 :
6143 : void
6144 41939 : bitint_large_huge::lower_stmt (gimple *stmt)
6145 : {
6146 41939 : m_first = true;
6147 41939 : m_lhs = NULL_TREE;
6148 41939 : m_data.truncate (0);
6149 41939 : m_data_cnt = 0;
6150 41939 : m_gsi = gsi_for_stmt (stmt);
6151 41939 : m_after_stmt = NULL;
6152 41939 : m_bb = NULL;
6153 41939 : m_init_gsi = m_gsi;
6154 41939 : gsi_prev (&m_init_gsi);
6155 41939 : m_preheader_bb = NULL;
6156 41939 : m_upwards_2limb = 0;
6157 41939 : m_upwards = false;
6158 41939 : m_var_msb = false;
6159 41939 : m_cast_conditional = false;
6160 41939 : m_bitfld_load = 0;
6161 41939 : m_loc = gimple_location (stmt);
6162 41939 : if (is_gimple_call (stmt))
6163 : {
6164 6948 : lower_call (NULL_TREE, stmt);
6165 6948 : return;
6166 : }
6167 34991 : if (gimple_code (stmt) == GIMPLE_ASM)
6168 : {
6169 3 : lower_asm (stmt);
6170 3 : return;
6171 : }
6172 34988 : tree lhs = NULL_TREE, cmp_op1 = NULL_TREE, cmp_op2 = NULL_TREE;
6173 34988 : tree_code cmp_code = comparison_op (stmt, &cmp_op1, &cmp_op2);
6174 34988 : bool eq_p = (cmp_code == EQ_EXPR || cmp_code == NE_EXPR);
6175 34988 : bool mergeable_cast_p = false;
6176 34988 : bool final_cast_p = false;
6177 34988 : if (gimple_assign_cast_p (stmt))
6178 : {
6179 5319 : lhs = gimple_assign_lhs (stmt);
6180 5319 : tree rhs1 = gimple_assign_rhs1 (stmt);
6181 5319 : if (TREE_CODE (rhs1) == VIEW_CONVERT_EXPR)
6182 44 : rhs1 = TREE_OPERAND (rhs1, 0);
6183 5319 : if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
6184 1213 : && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
6185 6503 : && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
6186 : mergeable_cast_p = true;
6187 4278 : else if (TREE_CODE (TREE_TYPE (rhs1)) == BITINT_TYPE
6188 4135 : && bitint_precision_kind (TREE_TYPE (rhs1)) >= bitint_prec_large
6189 8413 : && (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6190 36 : || POINTER_TYPE_P (TREE_TYPE (lhs))
6191 35 : || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR))
6192 : {
6193 4135 : final_cast_p = true;
6194 4135 : if (((TREE_CODE (TREE_TYPE (lhs)) == INTEGER_TYPE
6195 536 : && TYPE_PRECISION (TREE_TYPE (lhs)) > MAX_FIXED_MODE_SIZE)
6196 4135 : || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6197 36 : && !POINTER_TYPE_P (TREE_TYPE (lhs))))
6198 4170 : && gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
6199 : {
6200 : /* Handle VIEW_CONVERT_EXPRs to not generally supported
6201 : huge INTEGER_TYPEs like uint256_t or uint512_t. These
6202 : are usually emitted from memcpy folding and backends
6203 : support moves with them but that is usually it.
6204 : Similarly handle VCEs to vector/complex types etc. */
6205 35 : gcc_assert (TREE_CODE (rhs1) == SSA_NAME);
6206 35 : if (SSA_NAME_IS_DEFAULT_DEF (rhs1)
6207 35 : && (!SSA_NAME_VAR (rhs1) || VAR_P (SSA_NAME_VAR (rhs1))))
6208 : {
6209 0 : tree var = create_tmp_reg (TREE_TYPE (lhs));
6210 0 : rhs1 = get_or_create_ssa_default_def (cfun, var);
6211 0 : gimple_assign_set_rhs1 (stmt, rhs1);
6212 0 : gimple_assign_set_rhs_code (stmt, SSA_NAME);
6213 : }
6214 35 : else if (m_names == NULL
6215 35 : || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1)))
6216 : {
6217 0 : gimple *g = SSA_NAME_DEF_STMT (rhs1);
6218 0 : gcc_assert (gimple_assign_load_p (g));
6219 0 : tree mem = gimple_assign_rhs1 (g);
6220 0 : tree ltype = TREE_TYPE (lhs);
6221 0 : addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (mem));
6222 0 : if (as != TYPE_ADDR_SPACE (ltype))
6223 0 : ltype
6224 0 : = build_qualified_type (ltype,
6225 0 : TYPE_QUALS (ltype)
6226 0 : | ENCODE_QUAL_ADDR_SPACE (as));
6227 0 : rhs1 = build1 (VIEW_CONVERT_EXPR, ltype, unshare_expr (mem));
6228 0 : gimple_assign_set_rhs1 (stmt, rhs1);
6229 : }
6230 : else
6231 : {
6232 35 : int part = var_to_partition (m_map, rhs1);
6233 35 : gcc_assert (m_vars[part] != NULL_TREE);
6234 35 : rhs1 = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
6235 : m_vars[part]);
6236 35 : gimple_assign_set_rhs1 (stmt, rhs1);
6237 : }
6238 35 : update_stmt (stmt);
6239 35 : return;
6240 : }
6241 4100 : if (TREE_CODE (rhs1) == SSA_NAME
6242 4100 : && (m_names == NULL
6243 4063 : || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
6244 : {
6245 1695 : gimple *g = SSA_NAME_DEF_STMT (rhs1);
6246 1695 : if (is_gimple_assign (g)
6247 1695 : && gimple_assign_rhs_code (g) == IMAGPART_EXPR)
6248 : {
6249 1632 : tree rhs2 = TREE_OPERAND (gimple_assign_rhs1 (g), 0);
6250 1632 : if (TREE_CODE (rhs2) == SSA_NAME
6251 1632 : && (m_names == NULL
6252 1595 : || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs2))))
6253 : {
6254 1632 : g = SSA_NAME_DEF_STMT (rhs2);
6255 1632 : int ovf = optimizable_arith_overflow (g);
6256 1632 : if (ovf == 2)
6257 : /* If .{ADD,SUB,MUL}_OVERFLOW has both REALPART_EXPR
6258 : and IMAGPART_EXPR uses, where the latter is cast to
6259 : non-_BitInt, it will be optimized when handling
6260 : the REALPART_EXPR. */
6261 : return;
6262 91 : if (ovf == 1)
6263 : {
6264 91 : lower_call (NULL_TREE, g);
6265 91 : return;
6266 : }
6267 : }
6268 : }
6269 : }
6270 : }
6271 143 : else if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
6272 143 : && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
6273 143 : && !INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
6274 143 : && !POINTER_TYPE_P (TREE_TYPE (rhs1))
6275 286 : && gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
6276 : {
6277 8 : int part = var_to_partition (m_map, lhs);
6278 8 : gcc_assert (m_vars[part] != NULL_TREE);
6279 8 : lhs = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs1), m_vars[part]);
6280 8 : insert_before (gimple_build_assign (lhs, rhs1));
6281 8 : return;
6282 : }
6283 : }
6284 33313 : if (gimple_store_p (stmt))
6285 : {
6286 8766 : tree rhs1 = gimple_assign_rhs1 (stmt);
6287 8766 : if (TREE_CODE (rhs1) == SSA_NAME
6288 8766 : && (m_names == NULL
6289 7731 : || !bitmap_bit_p (m_names, SSA_NAME_VERSION (rhs1))))
6290 : {
6291 1512 : gimple *g = SSA_NAME_DEF_STMT (rhs1);
6292 1512 : m_loc = gimple_location (g);
6293 1512 : lhs = gimple_assign_lhs (stmt);
6294 1512 : if (is_gimple_assign (g) && !mergeable_op (g))
6295 550 : switch (gimple_assign_rhs_code (g))
6296 : {
6297 118 : case LSHIFT_EXPR:
6298 118 : case RSHIFT_EXPR:
6299 118 : lower_shift_stmt (lhs, g);
6300 381 : handled:
6301 381 : m_gsi = gsi_for_stmt (stmt);
6302 381 : unlink_stmt_vdef (stmt);
6303 762 : release_ssa_name (gimple_vdef (stmt));
6304 381 : gsi_remove (&m_gsi, true);
6305 381 : return;
6306 192 : case MULT_EXPR:
6307 192 : case TRUNC_DIV_EXPR:
6308 192 : case EXACT_DIV_EXPR:
6309 192 : case TRUNC_MOD_EXPR:
6310 192 : lower_muldiv_stmt (lhs, g);
6311 192 : goto handled;
6312 44 : case FIX_TRUNC_EXPR:
6313 44 : lower_float_conv_stmt (lhs, g);
6314 44 : goto handled;
6315 3 : case REALPART_EXPR:
6316 3 : case IMAGPART_EXPR:
6317 3 : lower_cplxpart_stmt (lhs, g);
6318 3 : goto handled;
6319 7 : case VIEW_CONVERT_EXPR:
6320 7 : {
6321 7 : tree rhs1 = gimple_assign_rhs1 (g);
6322 7 : rhs1 = TREE_OPERAND (rhs1, 0);
6323 7 : if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
6324 6 : && !POINTER_TYPE_P (TREE_TYPE (rhs1)))
6325 : {
6326 6 : tree ltype = TREE_TYPE (rhs1);
6327 6 : addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (lhs));
6328 6 : ltype
6329 12 : = build_qualified_type (ltype,
6330 6 : TYPE_QUALS (TREE_TYPE (lhs))
6331 6 : | ENCODE_QUAL_ADDR_SPACE (as));
6332 6 : lhs = build1 (VIEW_CONVERT_EXPR, ltype, lhs);
6333 6 : gimple_assign_set_lhs (stmt, lhs);
6334 6 : gimple_assign_set_rhs1 (stmt, rhs1);
6335 6 : gimple_assign_set_rhs_code (stmt, TREE_CODE (rhs1));
6336 6 : update_stmt (stmt);
6337 6 : return;
6338 : }
6339 : }
6340 : break;
6341 : default:
6342 : break;
6343 : }
6344 962 : else if (optimizable_arith_overflow (g) == 3)
6345 : {
6346 24 : lower_call (lhs, g);
6347 24 : goto handled;
6348 : }
6349 1125 : m_loc = gimple_location (stmt);
6350 : }
6351 : }
6352 32926 : if (mergeable_op (stmt)
6353 21672 : || gimple_store_p (stmt)
6354 21672 : || gimple_assign_load_p (stmt)
6355 : || eq_p
6356 16873 : || mergeable_cast_p
6357 42726 : || (is_gimple_assign (stmt)
6358 9412 : && gimple_assign_rhs_code (stmt) == PAREN_EXPR))
6359 : {
6360 23128 : lhs = lower_mergeable_stmt (stmt, cmp_code, cmp_op1, cmp_op2);
6361 23128 : if (!eq_p)
6362 : return;
6363 : }
6364 9798 : else if (cmp_code != ERROR_MARK)
6365 722 : lhs = lower_comparison_stmt (stmt, cmp_code, cmp_op1, cmp_op2);
6366 16295 : if (cmp_code != ERROR_MARK)
6367 : {
6368 7219 : if (gimple_code (stmt) == GIMPLE_COND)
6369 : {
6370 6453 : gcond *cstmt = as_a <gcond *> (stmt);
6371 6453 : gimple_cond_set_lhs (cstmt, lhs);
6372 6453 : gimple_cond_set_rhs (cstmt, boolean_false_node);
6373 6453 : gimple_cond_set_code (cstmt, cmp_code);
6374 6453 : update_stmt (stmt);
6375 6453 : return;
6376 : }
6377 766 : if (gimple_assign_rhs_code (stmt) == COND_EXPR)
6378 : {
6379 0 : tree cond = build2 (cmp_code, boolean_type_node, lhs,
6380 : boolean_false_node);
6381 0 : gimple_assign_set_rhs1 (stmt, cond);
6382 0 : lhs = gimple_assign_lhs (stmt);
6383 0 : gcc_assert (TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
6384 : || (bitint_precision_kind (TREE_TYPE (lhs))
6385 : <= bitint_prec_middle));
6386 0 : update_stmt (stmt);
6387 0 : return;
6388 : }
6389 766 : gimple_assign_set_rhs1 (stmt, lhs);
6390 766 : gimple_assign_set_rhs2 (stmt, boolean_false_node);
6391 766 : gimple_assign_set_rhs_code (stmt, cmp_code);
6392 766 : update_stmt (stmt);
6393 766 : return;
6394 : }
6395 9076 : if (final_cast_p)
6396 : {
6397 2468 : tree lhs_type = TREE_TYPE (lhs);
6398 : /* Add support for 3 or more limbs filled in from normal integral
6399 : type if this assert fails. If no target chooses limb mode smaller
6400 : than half of largest supported normal integral type, this will not
6401 : be needed. */
6402 2468 : gcc_assert (TYPE_PRECISION (lhs_type) <= 2 * limb_prec);
6403 2468 : gimple *g;
6404 2468 : if ((TREE_CODE (lhs_type) == BITINT_TYPE
6405 29 : && bitint_precision_kind (lhs_type) == bitint_prec_middle)
6406 2489 : || POINTER_TYPE_P (lhs_type))
6407 9 : lhs_type = build_nonstandard_integer_type (TYPE_PRECISION (lhs_type),
6408 9 : TYPE_UNSIGNED (lhs_type));
6409 2468 : m_data_cnt = 0;
6410 2468 : tree rhs1 = gimple_assign_rhs1 (stmt);
6411 2468 : unsigned int prec = TYPE_PRECISION (TREE_TYPE (rhs1));
6412 2468 : unsigned int cnt = CEIL (prec, limb_prec);
6413 2468 : tree r1 = handle_operand (rhs1, size_int (bitint_big_endian
6414 : ? cnt - 1 : 0));
6415 2468 : if (!useless_type_conversion_p (lhs_type, TREE_TYPE (r1)))
6416 2361 : r1 = add_cast (lhs_type, r1);
6417 2468 : if (TYPE_PRECISION (lhs_type) > limb_prec)
6418 : {
6419 63 : m_data_cnt = 0;
6420 63 : m_first = false;
6421 63 : tree r2 = handle_operand (rhs1, size_int (bitint_big_endian
6422 : ? cnt - 2 : 1));
6423 63 : r2 = add_cast (lhs_type, r2);
6424 63 : g = gimple_build_assign (make_ssa_name (lhs_type), LSHIFT_EXPR, r2,
6425 : build_int_cst (unsigned_type_node,
6426 63 : limb_prec));
6427 63 : insert_before (g);
6428 63 : g = gimple_build_assign (make_ssa_name (lhs_type), BIT_IOR_EXPR, r1,
6429 : gimple_assign_lhs (g));
6430 63 : insert_before (g);
6431 63 : r1 = gimple_assign_lhs (g);
6432 : }
6433 2468 : if (lhs_type != TREE_TYPE (lhs))
6434 9 : g = gimple_build_assign (lhs, NOP_EXPR, r1);
6435 : else
6436 2459 : g = gimple_build_assign (lhs, r1);
6437 2468 : gsi_replace (&m_gsi, g, true);
6438 2468 : return;
6439 : }
6440 6608 : if (is_gimple_assign (stmt))
6441 6608 : switch (gimple_assign_rhs_code (stmt))
6442 : {
6443 457 : case LSHIFT_EXPR:
6444 457 : case RSHIFT_EXPR:
6445 457 : lower_shift_stmt (NULL_TREE, stmt);
6446 457 : return;
6447 143 : case MULT_EXPR:
6448 143 : case TRUNC_DIV_EXPR:
6449 143 : case EXACT_DIV_EXPR:
6450 143 : case TRUNC_MOD_EXPR:
6451 143 : lower_muldiv_stmt (NULL_TREE, stmt);
6452 143 : return;
6453 273 : case FIX_TRUNC_EXPR:
6454 273 : case FLOAT_EXPR:
6455 273 : lower_float_conv_stmt (NULL_TREE, stmt);
6456 273 : return;
6457 5717 : case REALPART_EXPR:
6458 5717 : case IMAGPART_EXPR:
6459 5717 : lower_cplxpart_stmt (NULL_TREE, stmt);
6460 5717 : return;
6461 18 : case COMPLEX_EXPR:
6462 18 : lower_complexexpr_stmt (stmt);
6463 18 : return;
6464 : default:
6465 : break;
6466 : }
6467 0 : gcc_unreachable ();
6468 : }
6469 :
6470 : /* Helper for walk_non_aliased_vuses. Determine if we arrived at
6471 : the desired memory state. */
6472 :
6473 : void *
6474 2180 : vuse_eq (ao_ref *, tree vuse1, void *data)
6475 : {
6476 2180 : tree vuse2 = (tree) data;
6477 2180 : if (vuse1 == vuse2)
6478 818 : return data;
6479 :
6480 : return NULL;
6481 : }
6482 :
6483 : /* Return true if STMT uses a library function and needs to take
6484 : address of its inputs. We need to avoid bit-fields in those
6485 : cases. Similarly, we need to avoid overlap between destination
6486 : and source limb arrays. */
6487 :
6488 : bool
6489 14984 : stmt_needs_operand_addr (gimple *stmt)
6490 : {
6491 14984 : if (is_gimple_assign (stmt))
6492 10230 : switch (gimple_assign_rhs_code (stmt))
6493 : {
6494 585 : case MULT_EXPR:
6495 585 : case TRUNC_DIV_EXPR:
6496 585 : case EXACT_DIV_EXPR:
6497 585 : case TRUNC_MOD_EXPR:
6498 585 : case FLOAT_EXPR:
6499 585 : return true;
6500 : default:
6501 : break;
6502 : }
6503 4754 : else if (gimple_call_internal_p (stmt, IFN_MUL_OVERFLOW)
6504 4754 : || gimple_call_internal_p (stmt, IFN_UBSAN_CHECK_MUL))
6505 : return true;
6506 : return false;
6507 : }
6508 :
6509 : /* Dominator walker used to discover which large/huge _BitInt
6510 : loads could be sunk into all their uses. */
6511 :
6512 592 : class bitint_dom_walker : public dom_walker
6513 : {
6514 : public:
6515 296 : bitint_dom_walker (bitmap names, bitmap loads)
6516 592 : : dom_walker (CDI_DOMINATORS), m_names (names), m_loads (loads) {}
6517 :
6518 : edge before_dom_children (basic_block) final override;
6519 :
6520 : private:
6521 : bitmap m_names, m_loads;
6522 : };
6523 :
6524 : edge
6525 4463 : bitint_dom_walker::before_dom_children (basic_block bb)
6526 : {
6527 4463 : gphi *phi = get_virtual_phi (bb);
6528 4463 : tree vop;
6529 4463 : if (phi)
6530 788 : vop = gimple_phi_result (phi);
6531 3675 : else if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
6532 : vop = NULL_TREE;
6533 : else
6534 3379 : vop = (tree) get_immediate_dominator (CDI_DOMINATORS, bb)->aux;
6535 :
6536 4463 : auto_vec<tree, 16> worklist;
6537 8926 : for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6538 19831 : !gsi_end_p (gsi); gsi_next (&gsi))
6539 : {
6540 15368 : gimple *stmt = gsi_stmt (gsi);
6541 15368 : if (is_gimple_debug (stmt))
6542 2750 : continue;
6543 :
6544 15390 : if (!vop && gimple_vuse (stmt))
6545 : vop = gimple_vuse (stmt);
6546 :
6547 14984 : tree cvop = vop;
6548 28118 : if (gimple_vdef (stmt))
6549 14984 : vop = gimple_vdef (stmt);
6550 :
6551 14984 : tree lhs = gimple_get_lhs (stmt);
6552 17350 : if (lhs
6553 10891 : && TREE_CODE (lhs) == SSA_NAME
6554 8680 : && TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
6555 5823 : && bitint_precision_kind (TREE_TYPE (lhs)) >= bitint_prec_large
6556 20665 : && !bitmap_bit_p (m_names, SSA_NAME_VERSION (lhs)))
6557 : /* If lhs of stmt is large/huge _BitInt SSA_NAME not in m_names,
6558 : it means it will be handled in a loop or straight line code
6559 : at the location of its (ultimate) immediate use, so for
6560 : vop checking purposes check these only at the ultimate
6561 : immediate use. */
6562 2366 : continue;
6563 :
6564 12618 : ssa_op_iter oi;
6565 12618 : use_operand_p use_p;
6566 21392 : FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
6567 : {
6568 8774 : tree s = USE_FROM_PTR (use_p);
6569 8774 : if (TREE_CODE (TREE_TYPE (s)) == BITINT_TYPE
6570 8774 : && bitint_precision_kind (TREE_TYPE (s)) >= bitint_prec_large)
6571 3221 : worklist.safe_push (s);
6572 : }
6573 :
6574 12618 : bool needs_operand_addr = stmt_needs_operand_addr (stmt);
6575 31293 : while (worklist.length () > 0)
6576 : {
6577 6057 : tree s = worklist.pop ();
6578 :
6579 6057 : if (!bitmap_bit_p (m_names, SSA_NAME_VERSION (s)))
6580 : {
6581 2366 : gimple *g = SSA_NAME_DEF_STMT (s);
6582 2366 : needs_operand_addr |= stmt_needs_operand_addr (g);
6583 5429 : FOR_EACH_SSA_USE_OPERAND (use_p, g, oi, SSA_OP_USE)
6584 : {
6585 3063 : tree s2 = USE_FROM_PTR (use_p);
6586 3063 : if (TREE_CODE (TREE_TYPE (s2)) == BITINT_TYPE
6587 3063 : && (bitint_precision_kind (TREE_TYPE (s2))
6588 : >= bitint_prec_large))
6589 2836 : worklist.safe_push (s2);
6590 : }
6591 3073 : continue;
6592 2366 : }
6593 3691 : if (!SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
6594 3691 : && gimple_assign_cast_p (SSA_NAME_DEF_STMT (s)))
6595 : {
6596 226 : tree rhs = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
6597 396 : if (TREE_CODE (rhs) == SSA_NAME
6598 226 : && bitmap_bit_p (m_loads, SSA_NAME_VERSION (rhs)))
6599 : s = rhs;
6600 : else
6601 170 : continue;
6602 : }
6603 3465 : else if (!bitmap_bit_p (m_loads, SSA_NAME_VERSION (s)))
6604 535 : continue;
6605 :
6606 2986 : gimple *g = SSA_NAME_DEF_STMT (s);
6607 2986 : tree rhs1 = gimple_assign_rhs1 (g);
6608 2986 : if (needs_operand_addr
6609 214 : && TREE_CODE (rhs1) == COMPONENT_REF
6610 3003 : && DECL_BIT_FIELD_TYPE (TREE_OPERAND (rhs1, 1)))
6611 : {
6612 4 : tree fld = TREE_OPERAND (rhs1, 1);
6613 : /* For little-endian, we can allow as inputs bit-fields
6614 : which start at a limb boundary. */
6615 6 : if (!bitint_big_endian
6616 4 : && DECL_OFFSET_ALIGN (fld) >= TYPE_ALIGN (TREE_TYPE (rhs1))
6617 4 : && tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (fld))
6618 4 : && (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))
6619 4 : % limb_prec) == 0)
6620 : ;
6621 : else
6622 : {
6623 2 : bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
6624 2 : continue;
6625 : }
6626 : }
6627 :
6628 2984 : ao_ref ref;
6629 2984 : ao_ref_init (&ref, rhs1);
6630 2984 : tree lvop = gimple_vuse (g);
6631 2984 : unsigned limit = 64;
6632 2984 : tree vuse = cvop;
6633 2984 : if (vop != cvop
6634 1330 : && is_gimple_assign (stmt)
6635 1328 : && gimple_store_p (stmt)
6636 4312 : && (needs_operand_addr
6637 1139 : || !operand_equal_p (lhs, gimple_assign_rhs1 (g), 0)))
6638 : vuse = vop;
6639 2984 : if (vuse != lvop
6640 2984 : && walk_non_aliased_vuses (&ref, vuse, false, vuse_eq,
6641 : NULL, NULL, NULL, limit, lvop) == NULL)
6642 526 : bitmap_clear_bit (m_loads, SSA_NAME_VERSION (s));
6643 : }
6644 : }
6645 :
6646 4463 : bb->aux = (void *) vop;
6647 4463 : return NULL;
6648 4463 : }
6649 :
6650 : }
6651 :
6652 : /* Replacement for normal processing of STMT in tree-ssa-coalesce.cc
6653 : build_ssa_conflict_graph.
6654 : The differences are:
6655 : 1) don't process assignments with large/huge _BitInt lhs not in NAMES
6656 : 2) for large/huge _BitInt multiplication/division/modulo process def
6657 : only after processing uses rather than before to make uses conflict
6658 : with the definition
6659 : 3) for large/huge _BitInt uses not in NAMES mark the uses of their
6660 : SSA_NAME_DEF_STMT (recursively), because those uses will be sunk into
6661 : the final statement. */
6662 :
6663 : void
6664 82130 : build_bitint_stmt_ssa_conflicts (gimple *stmt, live_track *live,
6665 : ssa_conflicts *graph, bitmap names,
6666 : void (*def) (live_track *, tree,
6667 : ssa_conflicts *),
6668 : void (*use) (live_track *, tree),
6669 : void (*clear) (live_track *, tree))
6670 : {
6671 82130 : bool muldiv_p = false;
6672 82130 : tree lhs = NULL_TREE;
6673 82130 : if (is_gimple_assign (stmt))
6674 : {
6675 44852 : lhs = gimple_assign_lhs (stmt);
6676 44852 : if (TREE_CODE (lhs) == SSA_NAME)
6677 : {
6678 32349 : tree type = TREE_TYPE (lhs);
6679 32349 : if (TREE_CODE (type) == COMPLEX_TYPE)
6680 63 : type = TREE_TYPE (type);
6681 32349 : if (TREE_CODE (type) == BITINT_TYPE
6682 32349 : && bitint_precision_kind (type) >= bitint_prec_large)
6683 : {
6684 19464 : if (!bitmap_bit_p (names, SSA_NAME_VERSION (lhs)))
6685 4701 : return;
6686 :
6687 : /* A copy between 2 partitions does not introduce an interference
6688 : by itself. If they did, you would never be able to coalesce
6689 : two things which are copied. If the two variables really do
6690 : conflict, they will conflict elsewhere in the program.
6691 :
6692 : This is handled by simply removing the SRC of the copy from
6693 : the live list, and processing the stmt normally.
6694 :
6695 : Don't do this if lhs is not in names though, in such cases
6696 : it is actually used at some point later in the basic
6697 : block. */
6698 14763 : if (gimple_assign_copy_p (stmt))
6699 : {
6700 1665 : tree rhs1 = gimple_assign_rhs1 (stmt);
6701 1665 : if (TREE_CODE (rhs1) == SSA_NAME)
6702 31 : clear (live, rhs1);
6703 : }
6704 :
6705 14763 : switch (gimple_assign_rhs_code (stmt))
6706 : {
6707 143 : case MULT_EXPR:
6708 143 : case TRUNC_DIV_EXPR:
6709 143 : case EXACT_DIV_EXPR:
6710 143 : case TRUNC_MOD_EXPR:
6711 143 : muldiv_p = true;
6712 : default:
6713 : break;
6714 : }
6715 : }
6716 : }
6717 : }
6718 37278 : else if (bitint_big_endian
6719 0 : && is_gimple_call (stmt)
6720 37278 : && gimple_call_internal_p (stmt))
6721 0 : switch (gimple_call_internal_fn (stmt))
6722 : {
6723 0 : case IFN_ADD_OVERFLOW:
6724 0 : case IFN_SUB_OVERFLOW:
6725 0 : case IFN_UBSAN_CHECK_ADD:
6726 0 : case IFN_UBSAN_CHECK_SUB:
6727 0 : case IFN_MUL_OVERFLOW:
6728 0 : case IFN_UBSAN_CHECK_MUL:
6729 0 : lhs = gimple_call_lhs (stmt);
6730 0 : if (lhs)
6731 77429 : muldiv_p = true;
6732 : break;
6733 : default:
6734 : break;
6735 : }
6736 :
6737 154858 : auto_vec<tree, 16> worklist;
6738 77429 : ssa_op_iter iter;
6739 77429 : tree var;
6740 : /* On little-endian, mergeable ops process limbs from 0 up so except
6741 : for multiplication/division/modulo there is no risk in using the
6742 : same underlying variable for lhs and some operand, even when casts
6743 : are involved, the lhs limb is stored only after processing the source
6744 : limbs with the same index.
6745 : For multiplication/division/modulo, the libgcc library function requires
6746 : no aliasing between result and sources.
6747 : On big-endian, even mergeable ops limb processing can be problematic
6748 : though, because it can apply various index corrections e.g. when there
6749 : is a cast from operand with different number of limbs. So, make the
6750 : lhs conflict with all the operands which are (for now virtually) used on
6751 : the current stmt if there is any mismatch in the number of limbs between
6752 : operands and the lhs. */
6753 77429 : if (bitint_big_endian && lhs && !muldiv_p)
6754 : {
6755 0 : tree ltype = TREE_TYPE (lhs);
6756 0 : if (TREE_CODE (ltype) == COMPLEX_TYPE)
6757 : muldiv_p = true;
6758 0 : else if (TREE_CODE (lhs) == SSA_NAME
6759 0 : && TREE_CODE (ltype) == BITINT_TYPE
6760 0 : && bitint_precision_kind (ltype) >= bitint_prec_large)
6761 : {
6762 0 : unsigned lnelts = CEIL (TYPE_PRECISION (ltype), limb_prec);
6763 0 : FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE)
6764 : {
6765 0 : tree type = TREE_TYPE (var);
6766 0 : if (TREE_CODE (type) == COMPLEX_TYPE)
6767 0 : type = TREE_TYPE (type);
6768 0 : if (TREE_CODE (type) == BITINT_TYPE
6769 0 : && bitint_precision_kind (type) >= bitint_prec_large)
6770 : {
6771 0 : if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
6772 : {
6773 0 : unsigned nelts = CEIL (TYPE_PRECISION (type), limb_prec);
6774 0 : if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
6775 0 : || lnelts != nelts)
6776 : {
6777 0 : muldiv_p = true;
6778 : break;
6779 : }
6780 : }
6781 : else
6782 0 : worklist.safe_push (var);
6783 : }
6784 : }
6785 :
6786 0 : while (!muldiv_p && worklist.length () > 0)
6787 : {
6788 0 : tree s = worklist.pop ();
6789 0 : FOR_EACH_SSA_TREE_OPERAND (var, SSA_NAME_DEF_STMT (s), iter,
6790 : SSA_OP_USE)
6791 : {
6792 0 : tree type = TREE_TYPE (var);
6793 0 : if (TREE_CODE (type) == COMPLEX_TYPE)
6794 0 : type = TREE_TYPE (type);
6795 0 : if (TREE_CODE (type) == BITINT_TYPE
6796 0 : && bitint_precision_kind (type) >= bitint_prec_large)
6797 : {
6798 0 : if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
6799 : {
6800 0 : unsigned nelts = CEIL (TYPE_PRECISION (type),
6801 : limb_prec);
6802 0 : if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
6803 0 : || lnelts != nelts)
6804 : {
6805 : muldiv_p = true;
6806 : break;
6807 : }
6808 : }
6809 : else
6810 0 : worklist.safe_push (var);
6811 : }
6812 : }
6813 : }
6814 0 : worklist.truncate (0);
6815 : }
6816 : }
6817 :
6818 77429 : if (!muldiv_p)
6819 : {
6820 : /* For stmts with more than one SSA_NAME definition pretend all the
6821 : SSA_NAME outputs but the first one are live at this point, so
6822 : that conflicts are added in between all those even when they are
6823 : actually not really live after the asm, because expansion might
6824 : copy those into pseudos after the asm and if multiple outputs
6825 : share the same partition, it might overwrite those that should
6826 : be live. E.g.
6827 : asm volatile (".." : "=r" (a) : "=r" (b) : "0" (a), "1" (a));
6828 : return a;
6829 : See PR70593. */
6830 77286 : bool first = true;
6831 113665 : FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
6832 36379 : if (first)
6833 : first = false;
6834 : else
6835 0 : use (live, var);
6836 :
6837 113665 : FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
6838 36379 : def (live, var, graph);
6839 : }
6840 :
6841 131712 : FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE)
6842 : {
6843 54283 : tree type = TREE_TYPE (var);
6844 54283 : if (TREE_CODE (type) == COMPLEX_TYPE)
6845 6081 : type = TREE_TYPE (type);
6846 54283 : if (TREE_CODE (type) == BITINT_TYPE
6847 54283 : && bitint_precision_kind (type) >= bitint_prec_large)
6848 : {
6849 35048 : if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
6850 30111 : use (live, var);
6851 : else
6852 4937 : worklist.safe_push (var);
6853 : }
6854 : }
6855 :
6856 85304 : while (worklist.length () > 0)
6857 : {
6858 7875 : tree s = worklist.pop ();
6859 16432 : FOR_EACH_SSA_TREE_OPERAND (var, SSA_NAME_DEF_STMT (s), iter, SSA_OP_USE)
6860 : {
6861 8557 : tree type = TREE_TYPE (var);
6862 8557 : if (TREE_CODE (type) == COMPLEX_TYPE)
6863 1597 : type = TREE_TYPE (type);
6864 8557 : if (TREE_CODE (type) == BITINT_TYPE
6865 8557 : && bitint_precision_kind (type) >= bitint_prec_large)
6866 : {
6867 7905 : if (bitmap_bit_p (names, SSA_NAME_VERSION (var)))
6868 4967 : use (live, var);
6869 : else
6870 2938 : worklist.safe_push (var);
6871 : }
6872 : }
6873 : }
6874 :
6875 77429 : if (muldiv_p)
6876 143 : def (live, lhs, graph);
6877 : }
6878 :
6879 : /* If STMT is .{ADD,SUB,MUL}_OVERFLOW with INTEGER_CST arguments,
6880 : return the largest bitint_prec_kind of them, otherwise return
6881 : bitint_prec_small. */
6882 :
6883 : static bitint_prec_kind
6884 190368 : arith_overflow_arg_kind (gimple *stmt)
6885 : {
6886 190368 : bitint_prec_kind ret = bitint_prec_small;
6887 190368 : if (is_gimple_call (stmt) && gimple_call_internal_p (stmt))
6888 87847 : switch (gimple_call_internal_fn (stmt))
6889 : {
6890 : case IFN_ADD_OVERFLOW:
6891 : case IFN_SUB_OVERFLOW:
6892 : case IFN_MUL_OVERFLOW:
6893 223005 : for (int i = 0; i < 2; ++i)
6894 : {
6895 148670 : tree a = gimple_call_arg (stmt, i);
6896 148670 : if (TREE_CODE (a) == INTEGER_CST
6897 148670 : && TREE_CODE (TREE_TYPE (a)) == BITINT_TYPE)
6898 : {
6899 5928 : bitint_prec_kind kind = bitint_precision_kind (TREE_TYPE (a));
6900 148670 : ret = MAX (ret, kind);
6901 : }
6902 : }
6903 : break;
6904 : default:
6905 : break;
6906 : }
6907 190368 : return ret;
6908 : }
6909 :
6910 : /* Entry point for _BitInt(N) operation lowering during optimization. */
6911 :
6912 : static unsigned int
6913 1481008 : gimple_lower_bitint (void)
6914 : {
6915 1481008 : small_max_prec = mid_min_prec = large_min_prec = huge_min_prec = 0;
6916 1481008 : limb_prec = abi_limb_prec = 0;
6917 1481008 : bitint_big_endian = false;
6918 :
6919 1481008 : unsigned int i;
6920 63231660 : for (i = 0; i < num_ssa_names; ++i)
6921 : {
6922 61757811 : tree s = ssa_name (i);
6923 61757811 : if (s == NULL)
6924 13212331 : continue;
6925 48545480 : tree type = TREE_TYPE (s);
6926 48545480 : if (TREE_CODE (type) == COMPLEX_TYPE)
6927 : {
6928 180004 : if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s))
6929 : != bitint_prec_small)
6930 : break;
6931 179891 : type = TREE_TYPE (type);
6932 : }
6933 48545367 : if (TREE_CODE (type) == BITINT_TYPE
6934 48545367 : && bitint_precision_kind (type) != bitint_prec_small)
6935 : break;
6936 : /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
6937 : into memory. Such functions could have no large/huge SSA_NAMEs. */
6938 48538380 : if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
6939 : {
6940 21181186 : gimple *g = SSA_NAME_DEF_STMT (s);
6941 21181186 : if (is_gimple_assign (g) && gimple_store_p (g))
6942 : {
6943 10718619 : tree t = gimple_assign_rhs1 (g);
6944 10718619 : if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6945 10718619 : && (bitint_precision_kind (TREE_TYPE (t))
6946 : >= bitint_prec_large))
6947 : break;
6948 : }
6949 : }
6950 : /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
6951 : to floating point types need to be rewritten. */
6952 27357194 : else if (SCALAR_FLOAT_TYPE_P (type))
6953 : {
6954 2293792 : gimple *g = SSA_NAME_DEF_STMT (s);
6955 2293792 : if (is_gimple_assign (g) && gimple_assign_rhs_code (g) == FLOAT_EXPR)
6956 : {
6957 127672 : tree t = gimple_assign_rhs1 (g);
6958 127672 : if (TREE_CODE (t) == INTEGER_CST
6959 110 : && TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
6960 127673 : && (bitint_precision_kind (TREE_TYPE (t))
6961 : != bitint_prec_small))
6962 : break;
6963 : }
6964 : }
6965 : }
6966 2962016 : if (i == num_ssa_names)
6967 : return 0;
6968 :
6969 7159 : basic_block bb;
6970 7159 : auto_vec<gimple *, 4> switch_statements;
6971 45423 : FOR_EACH_BB_FN (bb, cfun)
6972 : {
6973 114054 : if (gswitch *swtch = safe_dyn_cast <gswitch *> (*gsi_last_bb (bb)))
6974 : {
6975 23 : tree idx = gimple_switch_index (swtch);
6976 23 : if (TREE_CODE (TREE_TYPE (idx)) != BITINT_TYPE
6977 23 : || bitint_precision_kind (TREE_TYPE (idx)) < bitint_prec_large)
6978 12 : continue;
6979 :
6980 11 : if (optimize)
6981 6 : group_case_labels_stmt (swtch);
6982 11 : if (gimple_switch_num_labels (swtch) == 1)
6983 : {
6984 1 : single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
6985 1 : gimple_stmt_iterator gsi = gsi_for_stmt (swtch);
6986 1 : gsi_remove (&gsi, true);
6987 : }
6988 : else
6989 10 : switch_statements.safe_push (swtch);
6990 : }
6991 : }
6992 :
6993 7159 : if (!switch_statements.is_empty ())
6994 : {
6995 10 : bool expanded = false;
6996 10 : gimple *stmt;
6997 10 : unsigned int j;
6998 10 : i = 0;
6999 20 : FOR_EACH_VEC_ELT (switch_statements, j, stmt)
7000 : {
7001 10 : gswitch *swtch = as_a<gswitch *> (stmt);
7002 10 : tree_switch_conversion::switch_decision_tree dt (swtch);
7003 10 : expanded |= dt.analyze_switch_statement ();
7004 10 : }
7005 :
7006 10 : if (expanded)
7007 : {
7008 10 : free_dominance_info (CDI_DOMINATORS);
7009 10 : free_dominance_info (CDI_POST_DOMINATORS);
7010 10 : mark_virtual_operands_for_renaming (cfun);
7011 10 : cleanup_tree_cfg (TODO_update_ssa);
7012 : }
7013 : }
7014 :
7015 7159 : struct bitint_large_huge large_huge;
7016 7159 : bool has_large_huge_parm_result = false;
7017 7159 : bool has_large_huge = false;
7018 7159 : unsigned int ret = 0, first_large_huge = ~0U;
7019 7159 : bool edge_insertions = false;
7020 123631 : for (; i < num_ssa_names; ++i)
7021 : {
7022 116472 : tree s = ssa_name (i);
7023 116472 : if (s == NULL)
7024 2730 : continue;
7025 113742 : tree type = TREE_TYPE (s);
7026 113742 : if (TREE_CODE (type) == COMPLEX_TYPE)
7027 : {
7028 5197 : if (arith_overflow_arg_kind (SSA_NAME_DEF_STMT (s))
7029 : >= bitint_prec_large)
7030 1957 : has_large_huge = true;
7031 5197 : type = TREE_TYPE (type);
7032 : }
7033 113742 : if (TREE_CODE (type) == BITINT_TYPE
7034 113742 : && bitint_precision_kind (type) >= bitint_prec_large)
7035 : {
7036 34922 : if (first_large_huge == ~0U)
7037 5625 : first_large_huge = i;
7038 34922 : gimple *stmt = SSA_NAME_DEF_STMT (s), *g;
7039 34922 : gimple_stmt_iterator gsi;
7040 34922 : tree_code rhs_code;
7041 : /* Unoptimize certain constructs to simpler alternatives to
7042 : avoid having to lower all of them. */
7043 34922 : if (is_gimple_assign (stmt) && gimple_bb (stmt))
7044 21727 : switch (rhs_code = gimple_assign_rhs_code (stmt))
7045 : {
7046 : default:
7047 : break;
7048 335 : case MULT_EXPR:
7049 335 : case TRUNC_DIV_EXPR:
7050 335 : case EXACT_DIV_EXPR:
7051 335 : case TRUNC_MOD_EXPR:
7052 335 : if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s))
7053 : {
7054 2 : location_t loc = gimple_location (stmt);
7055 2 : gsi = gsi_for_stmt (stmt);
7056 2 : tree rhs1 = gimple_assign_rhs1 (stmt);
7057 2 : tree rhs2 = gimple_assign_rhs2 (stmt);
7058 : /* For multiplication and division with (ab)
7059 : lhs and one or both operands force the operands
7060 : into new SSA_NAMEs to avoid coalescing failures. */
7061 2 : if (TREE_CODE (rhs1) == SSA_NAME
7062 2 : && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1))
7063 : {
7064 2 : first_large_huge = 0;
7065 2 : tree t = make_ssa_name (TREE_TYPE (rhs1));
7066 2 : g = gimple_build_assign (t, SSA_NAME, rhs1);
7067 2 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
7068 2 : gimple_set_location (g, loc);
7069 2 : gimple_assign_set_rhs1 (stmt, t);
7070 2 : if (rhs1 == rhs2)
7071 : {
7072 0 : gimple_assign_set_rhs2 (stmt, t);
7073 0 : rhs2 = t;
7074 : }
7075 2 : update_stmt (stmt);
7076 : }
7077 2 : if (TREE_CODE (rhs2) == SSA_NAME
7078 2 : && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs2))
7079 : {
7080 0 : first_large_huge = 0;
7081 0 : tree t = make_ssa_name (TREE_TYPE (rhs2));
7082 0 : g = gimple_build_assign (t, SSA_NAME, rhs2);
7083 0 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
7084 0 : gimple_set_location (g, loc);
7085 0 : gimple_assign_set_rhs2 (stmt, t);
7086 0 : update_stmt (stmt);
7087 : }
7088 : }
7089 : break;
7090 3 : case LROTATE_EXPR:
7091 3 : case RROTATE_EXPR:
7092 3 : {
7093 3 : first_large_huge = 0;
7094 3 : location_t loc = gimple_location (stmt);
7095 3 : gsi = gsi_for_stmt (stmt);
7096 3 : tree rhs1 = gimple_assign_rhs1 (stmt);
7097 3 : tree type = TREE_TYPE (rhs1);
7098 3 : tree n = gimple_assign_rhs2 (stmt), m;
7099 3 : tree p = build_int_cst (TREE_TYPE (n),
7100 3 : TYPE_PRECISION (type));
7101 3 : if (TREE_CODE (n) == INTEGER_CST)
7102 : {
7103 0 : if (integer_zerop (n))
7104 : m = n;
7105 : else
7106 0 : m = fold_build2 (MINUS_EXPR, TREE_TYPE (n), p, n);
7107 : }
7108 : else
7109 : {
7110 3 : tree tem = make_ssa_name (TREE_TYPE (n));
7111 3 : g = gimple_build_assign (tem, MINUS_EXPR, p, n);
7112 3 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
7113 3 : gimple_set_location (g, loc);
7114 3 : m = make_ssa_name (TREE_TYPE (n));
7115 3 : g = gimple_build_assign (m, TRUNC_MOD_EXPR, tem, p);
7116 3 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
7117 3 : gimple_set_location (g, loc);
7118 : }
7119 3 : if (!TYPE_UNSIGNED (type))
7120 : {
7121 0 : tree utype = build_bitint_type (TYPE_PRECISION (type),
7122 : 1);
7123 0 : if (TREE_CODE (rhs1) == INTEGER_CST)
7124 0 : rhs1 = fold_convert (utype, rhs1);
7125 : else
7126 : {
7127 0 : tree t = make_ssa_name (type);
7128 0 : g = gimple_build_assign (t, NOP_EXPR, rhs1);
7129 0 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
7130 0 : gimple_set_location (g, loc);
7131 : }
7132 : }
7133 4 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
7134 : rhs_code == LROTATE_EXPR
7135 : ? LSHIFT_EXPR : RSHIFT_EXPR,
7136 : rhs1, n);
7137 3 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
7138 3 : gimple_set_location (g, loc);
7139 3 : tree op1 = gimple_assign_lhs (g);
7140 4 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
7141 : rhs_code == LROTATE_EXPR
7142 : ? RSHIFT_EXPR : LSHIFT_EXPR,
7143 : rhs1, m);
7144 3 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
7145 3 : gimple_set_location (g, loc);
7146 3 : tree op2 = gimple_assign_lhs (g);
7147 3 : tree lhs = gimple_assign_lhs (stmt);
7148 3 : if (!TYPE_UNSIGNED (type))
7149 : {
7150 0 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (op1)),
7151 : BIT_IOR_EXPR, op1, op2);
7152 0 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
7153 0 : gimple_set_location (g, loc);
7154 0 : g = gimple_build_assign (lhs, NOP_EXPR,
7155 : gimple_assign_lhs (g));
7156 : }
7157 : else
7158 3 : g = gimple_build_assign (lhs, BIT_IOR_EXPR, op1, op2);
7159 3 : gsi_replace (&gsi, g, true);
7160 3 : gimple_set_location (g, loc);
7161 : }
7162 3 : break;
7163 21 : case ABS_EXPR:
7164 21 : case ABSU_EXPR:
7165 21 : case MIN_EXPR:
7166 21 : case MAX_EXPR:
7167 21 : case COND_EXPR:
7168 21 : first_large_huge = 0;
7169 21 : gsi = gsi_for_stmt (stmt);
7170 21 : tree lhs = gimple_assign_lhs (stmt);
7171 21 : tree rhs1 = gimple_assign_rhs1 (stmt), rhs2 = NULL_TREE;
7172 21 : location_t loc = gimple_location (stmt);
7173 21 : if (rhs_code == ABS_EXPR)
7174 4 : g = gimple_build_cond (LT_EXPR, rhs1,
7175 4 : build_zero_cst (TREE_TYPE (rhs1)),
7176 : NULL_TREE, NULL_TREE);
7177 17 : else if (rhs_code == ABSU_EXPR)
7178 : {
7179 8 : rhs2 = make_ssa_name (TREE_TYPE (lhs));
7180 8 : g = gimple_build_assign (rhs2, NOP_EXPR, rhs1);
7181 8 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
7182 8 : gimple_set_location (g, loc);
7183 8 : g = gimple_build_cond (LT_EXPR, rhs1,
7184 8 : build_zero_cst (TREE_TYPE (rhs1)),
7185 : NULL_TREE, NULL_TREE);
7186 8 : rhs1 = rhs2;
7187 : }
7188 9 : else if (rhs_code == MIN_EXPR || rhs_code == MAX_EXPR)
7189 : {
7190 9 : rhs2 = gimple_assign_rhs2 (stmt);
7191 9 : if (TREE_CODE (rhs1) == INTEGER_CST)
7192 0 : std::swap (rhs1, rhs2);
7193 9 : g = gimple_build_cond (LT_EXPR, rhs1, rhs2,
7194 : NULL_TREE, NULL_TREE);
7195 9 : if (rhs_code == MAX_EXPR)
7196 5 : std::swap (rhs1, rhs2);
7197 : }
7198 : else
7199 : {
7200 0 : g = gimple_build_cond (NE_EXPR, rhs1,
7201 0 : build_zero_cst (TREE_TYPE (rhs1)),
7202 : NULL_TREE, NULL_TREE);
7203 0 : rhs1 = gimple_assign_rhs2 (stmt);
7204 0 : rhs2 = gimple_assign_rhs3 (stmt);
7205 : }
7206 21 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
7207 21 : gimple_set_location (g, loc);
7208 21 : edge e1 = split_block (gsi_bb (gsi), g);
7209 21 : edge e2 = split_block (e1->dest, (gimple *) NULL);
7210 21 : edge e3 = make_edge (e1->src, e2->dest, EDGE_FALSE_VALUE);
7211 21 : e3->probability = profile_probability::even ();
7212 21 : e1->flags = EDGE_TRUE_VALUE;
7213 21 : e1->probability = e3->probability.invert ();
7214 21 : if (dom_info_available_p (CDI_DOMINATORS))
7215 13 : set_immediate_dominator (CDI_DOMINATORS, e2->dest, e1->src);
7216 21 : if (rhs_code == ABS_EXPR || rhs_code == ABSU_EXPR)
7217 : {
7218 12 : gsi = gsi_after_labels (e1->dest);
7219 12 : g = gimple_build_assign (make_ssa_name (TREE_TYPE (rhs1)),
7220 : NEGATE_EXPR, rhs1);
7221 12 : gsi_insert_before (&gsi, g, GSI_SAME_STMT);
7222 12 : gimple_set_location (g, loc);
7223 12 : rhs2 = gimple_assign_lhs (g);
7224 12 : std::swap (rhs1, rhs2);
7225 : }
7226 21 : gsi = gsi_for_stmt (stmt);
7227 21 : gsi_remove (&gsi, true);
7228 21 : gphi *phi = create_phi_node (lhs, e2->dest);
7229 21 : add_phi_arg (phi, rhs1, e2, UNKNOWN_LOCATION);
7230 21 : add_phi_arg (phi, rhs2, e3, UNKNOWN_LOCATION);
7231 21 : break;
7232 : }
7233 : }
7234 : /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
7235 : into memory. Such functions could have no large/huge SSA_NAMEs. */
7236 78820 : else if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
7237 : {
7238 50342 : gimple *g = SSA_NAME_DEF_STMT (s);
7239 50342 : if (is_gimple_assign (g) && gimple_store_p (g))
7240 : {
7241 16043 : tree t = gimple_assign_rhs1 (g);
7242 16043 : if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
7243 16043 : && (bitint_precision_kind (TREE_TYPE (t))
7244 : >= bitint_prec_large))
7245 : has_large_huge = true;
7246 : }
7247 : }
7248 : /* Similarly, e.g. with -frounding-math casts from _BitInt INTEGER_CSTs
7249 : to floating point types need to be rewritten. */
7250 28478 : else if (SCALAR_FLOAT_TYPE_P (type))
7251 : {
7252 678 : gimple *g = SSA_NAME_DEF_STMT (s);
7253 678 : if (is_gimple_assign (g) && gimple_assign_rhs_code (g) == FLOAT_EXPR)
7254 : {
7255 177 : tree t = gimple_assign_rhs1 (g);
7256 177 : if (TREE_CODE (t) == INTEGER_CST
7257 1 : && TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
7258 178 : && (bitint_precision_kind (TREE_TYPE (t))
7259 : >= bitint_prec_large))
7260 : has_large_huge = true;
7261 : }
7262 : }
7263 : }
7264 100342 : for (i = first_large_huge; i < num_ssa_names; ++i)
7265 : {
7266 93183 : tree s = ssa_name (i);
7267 93183 : if (s == NULL)
7268 2427 : continue;
7269 90756 : tree type = TREE_TYPE (s);
7270 90756 : if (TREE_CODE (type) == COMPLEX_TYPE)
7271 3968 : type = TREE_TYPE (type);
7272 90756 : if (TREE_CODE (type) == BITINT_TYPE
7273 90756 : && bitint_precision_kind (type) >= bitint_prec_large)
7274 : {
7275 34922 : use_operand_p use_p;
7276 34922 : gimple *use_stmt;
7277 34922 : has_large_huge = true;
7278 36578 : if (optimize
7279 51151 : && optimizable_arith_overflow (SSA_NAME_DEF_STMT (s)))
7280 6270 : continue;
7281 : /* Ignore large/huge _BitInt SSA_NAMEs which have single use in
7282 : the same bb and could be handled in the same loop with the
7283 : immediate use. */
7284 33266 : if (optimize
7285 14573 : && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
7286 14551 : && single_imm_use (s, &use_p, &use_stmt)
7287 47315 : && gimple_bb (SSA_NAME_DEF_STMT (s)) == gimple_bb (use_stmt))
7288 : {
7289 9924 : if (mergeable_op (SSA_NAME_DEF_STMT (s)))
7290 : {
7291 2108 : if (mergeable_op (use_stmt))
7292 1838 : continue;
7293 270 : tree_code cmp_code = comparison_op (use_stmt, NULL, NULL);
7294 270 : if (cmp_code == EQ_EXPR || cmp_code == NE_EXPR)
7295 26 : continue;
7296 244 : if (gimple_assign_cast_p (use_stmt))
7297 : {
7298 101 : tree lhs = gimple_assign_lhs (use_stmt);
7299 202 : if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
7300 : /* Don't merge with VIEW_CONVERT_EXPRs to
7301 : huge INTEGER_TYPEs used sometimes in memcpy
7302 : expansion. */
7303 190 : && (TREE_CODE (TREE_TYPE (lhs)) != INTEGER_TYPE
7304 8 : || (TYPE_PRECISION (TREE_TYPE (lhs))
7305 16 : <= MAX_FIXED_MODE_SIZE)))
7306 89 : continue;
7307 : }
7308 143 : else if (gimple_store_p (use_stmt)
7309 0 : && is_gimple_assign (use_stmt)
7310 0 : && !gimple_has_volatile_ops (use_stmt)
7311 143 : && !stmt_ends_bb_p (use_stmt))
7312 0 : continue;
7313 : }
7314 7971 : if (gimple_assign_cast_p (SSA_NAME_DEF_STMT (s)))
7315 : {
7316 818 : tree rhs1 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s));
7317 818 : if (TREE_CODE (rhs1) == VIEW_CONVERT_EXPR)
7318 : {
7319 17 : rhs1 = TREE_OPERAND (rhs1, 0);
7320 17 : if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
7321 13 : && !POINTER_TYPE_P (TREE_TYPE (rhs1))
7322 13 : && gimple_store_p (use_stmt))
7323 6 : continue;
7324 : }
7325 1624 : if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
7326 723 : && ((is_gimple_assign (use_stmt)
7327 676 : && (gimple_assign_rhs_code (use_stmt)
7328 : != COMPLEX_EXPR))
7329 47 : || gimple_code (use_stmt) == GIMPLE_COND)
7330 713 : && (!gimple_store_p (use_stmt)
7331 122 : || (is_gimple_assign (use_stmt)
7332 122 : && !gimple_has_volatile_ops (use_stmt)
7333 122 : && !stmt_ends_bb_p (use_stmt)))
7334 1525 : && (TREE_CODE (rhs1) != SSA_NAME
7335 713 : || !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
7336 : {
7337 713 : if (is_gimple_assign (use_stmt))
7338 676 : switch (gimple_assign_rhs_code (use_stmt))
7339 : {
7340 54 : case TRUNC_DIV_EXPR:
7341 54 : case EXACT_DIV_EXPR:
7342 54 : case TRUNC_MOD_EXPR:
7343 54 : case FLOAT_EXPR:
7344 : /* For division, modulo and casts to floating
7345 : point, avoid representing unsigned operands
7346 : using negative prec if they were sign-extended
7347 : from narrower precision. */
7348 54 : if (TYPE_UNSIGNED (TREE_TYPE (s))
7349 28 : && !TYPE_UNSIGNED (TREE_TYPE (rhs1))
7350 63 : && (TYPE_PRECISION (TREE_TYPE (s))
7351 9 : > TYPE_PRECISION (TREE_TYPE (rhs1))))
7352 8 : goto force_name;
7353 : /* FALLTHRU */
7354 101 : case MULT_EXPR:
7355 101 : if (TREE_CODE (TREE_TYPE (rhs1)) != BITINT_TYPE
7356 101 : || (bitint_precision_kind (TREE_TYPE (rhs1))
7357 : < bitint_prec_large))
7358 36 : continue;
7359 : /* Uses which use handle_operand_addr can't
7360 : deal with nested casts. */
7361 65 : if (TREE_CODE (rhs1) == SSA_NAME
7362 65 : && gimple_assign_cast_p
7363 65 : (SSA_NAME_DEF_STMT (rhs1))
7364 43 : && has_single_use (rhs1)
7365 108 : && (gimple_bb (SSA_NAME_DEF_STMT (rhs1))
7366 43 : == gimple_bb (SSA_NAME_DEF_STMT (s))))
7367 43 : goto force_name;
7368 : break;
7369 0 : case VIEW_CONVERT_EXPR:
7370 0 : {
7371 0 : tree lhs = gimple_assign_lhs (use_stmt);
7372 : /* Don't merge with VIEW_CONVERT_EXPRs to
7373 : non-integral types. */
7374 0 : if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
7375 0 : goto force_name;
7376 : /* Don't merge with VIEW_CONVERT_EXPRs to
7377 : huge INTEGER_TYPEs used sometimes in memcpy
7378 : expansion. */
7379 0 : if (TREE_CODE (TREE_TYPE (lhs)) == INTEGER_TYPE
7380 0 : && (TYPE_PRECISION (TREE_TYPE (lhs))
7381 0 : > MAX_FIXED_MODE_SIZE))
7382 0 : goto force_name;
7383 : }
7384 : break;
7385 : default:
7386 : break;
7387 : }
7388 626 : if (TREE_CODE (TREE_TYPE (rhs1)) != BITINT_TYPE
7389 626 : || (bitint_precision_kind (TREE_TYPE (rhs1))
7390 : < bitint_prec_large))
7391 224 : continue;
7392 402 : if ((TYPE_PRECISION (TREE_TYPE (rhs1))
7393 402 : >= TYPE_PRECISION (TREE_TYPE (s)))
7394 402 : && mergeable_op (use_stmt))
7395 60 : continue;
7396 : /* Prevent merging a widening non-mergeable cast
7397 : on result of some narrower mergeable op
7398 : together with later mergeable operations. E.g.
7399 : result of _BitInt(223) addition shouldn't be
7400 : sign-extended to _BitInt(513) and have another
7401 : _BitInt(513) added to it, as handle_plus_minus
7402 : with its PHI node handling inside of handle_cast
7403 : will not work correctly. An exception is if
7404 : use_stmt is a store, this is handled directly
7405 : in lower_mergeable_stmt. */
7406 677 : if (TREE_CODE (rhs1) != SSA_NAME
7407 342 : || !has_single_use (rhs1)
7408 256 : || (gimple_bb (SSA_NAME_DEF_STMT (rhs1))
7409 256 : != gimple_bb (SSA_NAME_DEF_STMT (s)))
7410 207 : || !mergeable_op (SSA_NAME_DEF_STMT (rhs1))
7411 420 : || gimple_store_p (use_stmt))
7412 335 : continue;
7413 7 : if ((TYPE_PRECISION (TREE_TYPE (rhs1))
7414 7 : < TYPE_PRECISION (TREE_TYPE (s)))
7415 9 : && gimple_assign_cast_p (SSA_NAME_DEF_STMT (rhs1)))
7416 : {
7417 : /* Another exception is if the widening cast is
7418 : from mergeable same precision cast from something
7419 : not mergeable. */
7420 0 : tree rhs2
7421 0 : = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (rhs1));
7422 0 : if (TREE_CODE (TREE_TYPE (rhs2)) == BITINT_TYPE
7423 0 : && (TYPE_PRECISION (TREE_TYPE (rhs1))
7424 0 : == TYPE_PRECISION (TREE_TYPE (rhs2))))
7425 : {
7426 0 : if (TREE_CODE (rhs2) != SSA_NAME
7427 0 : || !has_single_use (rhs2)
7428 0 : || (gimple_bb (SSA_NAME_DEF_STMT (rhs2))
7429 0 : != gimple_bb (SSA_NAME_DEF_STMT (s)))
7430 0 : || !mergeable_op (SSA_NAME_DEF_STMT (rhs2)))
7431 0 : continue;
7432 : }
7433 : }
7434 : }
7435 : }
7436 7259 : if (is_gimple_assign (SSA_NAME_DEF_STMT (s)))
7437 5201 : switch (gimple_assign_rhs_code (SSA_NAME_DEF_STMT (s)))
7438 : {
7439 1640 : case REALPART_EXPR:
7440 1640 : case IMAGPART_EXPR:
7441 1640 : {
7442 1640 : gimple *ds = SSA_NAME_DEF_STMT (s);
7443 1640 : tree rhs1 = gimple_assign_rhs1 (ds);
7444 1640 : rhs1 = TREE_OPERAND (rhs1, 0);
7445 1640 : if (TREE_CODE (rhs1) == SSA_NAME)
7446 : {
7447 1640 : gimple *g = SSA_NAME_DEF_STMT (rhs1);
7448 1640 : if (optimizable_arith_overflow (g))
7449 : {
7450 1636 : if (gimple_assign_rhs_code (ds) == IMAGPART_EXPR)
7451 1632 : continue;
7452 4 : if (gimple_store_p (use_stmt))
7453 : {
7454 : /* Punt if the cast use of IMAGPART_EXPR stmt
7455 : appears before the store use_stmt, because
7456 : optimizable arith overflow can't be
7457 : lowered at the store location in that case.
7458 : See PR121828. */
7459 4 : gimple_stmt_iterator gsi
7460 4 : = gsi_for_stmt (use_stmt);
7461 4 : unsigned int cnt = 0;
7462 6 : do
7463 : {
7464 6 : gsi_prev_nondebug (&gsi);
7465 6 : if (gsi_end_p (gsi))
7466 : break;
7467 6 : gimple *g2 = gsi_stmt (gsi);
7468 6 : if (g2 == ds)
7469 : break;
7470 3 : if (++cnt == 64)
7471 : break;
7472 3 : if (!gimple_assign_cast_p (g2))
7473 2 : continue;
7474 1 : tree rhs2 = gimple_assign_rhs1 (g2);
7475 1 : if (TREE_CODE (rhs2) != SSA_NAME)
7476 0 : continue;
7477 1 : gimple *g3 = SSA_NAME_DEF_STMT (rhs2);
7478 1 : if (!is_gimple_assign (g3))
7479 0 : continue;
7480 1 : if (gimple_assign_rhs_code (g3)
7481 : != IMAGPART_EXPR)
7482 0 : continue;
7483 1 : rhs2 = gimple_assign_rhs1 (g3);
7484 1 : rhs2 = TREE_OPERAND (rhs2, 0);
7485 1 : if (rhs2 != rhs1)
7486 0 : continue;
7487 : cnt = 64;
7488 : break;
7489 : }
7490 : while (1);
7491 4 : if (cnt == 64)
7492 : break;
7493 : }
7494 : }
7495 : }
7496 : }
7497 : /* FALLTHRU */
7498 665 : case LSHIFT_EXPR:
7499 665 : case RSHIFT_EXPR:
7500 665 : case MULT_EXPR:
7501 665 : case TRUNC_DIV_EXPR:
7502 665 : case EXACT_DIV_EXPR:
7503 665 : case TRUNC_MOD_EXPR:
7504 665 : case FIX_TRUNC_EXPR:
7505 665 : if (gimple_store_p (use_stmt)
7506 360 : && is_gimple_assign (use_stmt)
7507 360 : && !gimple_has_volatile_ops (use_stmt)
7508 1025 : && !stmt_ends_bb_p (use_stmt))
7509 : {
7510 360 : tree lhs = gimple_assign_lhs (use_stmt);
7511 : /* As multiply/division passes address of the lhs
7512 : to library function and that assumes it can extend
7513 : it to whole number of limbs, avoid merging those
7514 : with bit-field stores. Don't allow it for
7515 : shifts etc. either, so that the bit-field store
7516 : handling doesn't have to be done everywhere. */
7517 360 : if (TREE_CODE (lhs) == COMPONENT_REF
7518 360 : && DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
7519 : break;
7520 357 : continue;
7521 357 : }
7522 : break;
7523 : default:
7524 : break;
7525 : }
7526 : }
7527 :
7528 : /* Also ignore uninitialized uses. */
7529 28612 : if (SSA_NAME_IS_DEFAULT_DEF (s)
7530 28612 : && (!SSA_NAME_VAR (s) || VAR_P (SSA_NAME_VAR (s))))
7531 11 : continue;
7532 :
7533 28652 : force_name:
7534 28652 : if (!large_huge.m_names)
7535 5522 : large_huge.m_names = BITMAP_ALLOC (NULL);
7536 28652 : bitmap_set_bit (large_huge.m_names, SSA_NAME_VERSION (s));
7537 28652 : if (has_single_use (s))
7538 : {
7539 : tree s2 = s;
7540 : /* The coalescing hook special cases SSA_NAME copies.
7541 : Make sure not to mark in m_single_use_names single
7542 : use SSA_NAMEs copied from non-single use SSA_NAMEs. */
7543 25133 : while (gimple_assign_copy_p (SSA_NAME_DEF_STMT (s2)))
7544 : {
7545 919 : s2 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (s2));
7546 919 : if (TREE_CODE (s2) != SSA_NAME)
7547 : break;
7548 27 : if (!has_single_use (s2))
7549 : {
7550 : s2 = NULL_TREE;
7551 : break;
7552 : }
7553 : }
7554 25126 : if (s2)
7555 : {
7556 25106 : if (!large_huge.m_single_use_names)
7557 5409 : large_huge.m_single_use_names = BITMAP_ALLOC (NULL);
7558 25106 : bitmap_set_bit (large_huge.m_single_use_names,
7559 25106 : SSA_NAME_VERSION (s));
7560 : }
7561 : }
7562 28652 : if (SSA_NAME_VAR (s)
7563 6892 : && ((TREE_CODE (SSA_NAME_VAR (s)) == PARM_DECL
7564 5051 : && SSA_NAME_IS_DEFAULT_DEF (s))
7565 1878 : || TREE_CODE (SSA_NAME_VAR (s)) == RESULT_DECL))
7566 : has_large_huge_parm_result = true;
7567 28652 : if (optimize
7568 9959 : && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s)
7569 9937 : && gimple_assign_load_p (SSA_NAME_DEF_STMT (s))
7570 6030 : && !gimple_has_volatile_ops (SSA_NAME_DEF_STMT (s))
7571 31618 : && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s)))
7572 : {
7573 2966 : use_operand_p use_p;
7574 2966 : imm_use_iterator iter;
7575 2966 : bool optimizable_load = true;
7576 8960 : FOR_EACH_IMM_USE_FAST (use_p, iter, s)
7577 : {
7578 3126 : gimple *use_stmt = USE_STMT (use_p);
7579 3126 : if (is_gimple_debug (use_stmt))
7580 0 : continue;
7581 3126 : if (gimple_code (use_stmt) == GIMPLE_PHI
7582 3114 : || is_gimple_call (use_stmt)
7583 3029 : || gimple_code (use_stmt) == GIMPLE_ASM
7584 6154 : || (is_gimple_assign (use_stmt)
7585 1896 : && (gimple_assign_rhs_code (use_stmt)
7586 : == COMPLEX_EXPR)))
7587 : {
7588 : optimizable_load = false;
7589 : break;
7590 : }
7591 2966 : }
7592 :
7593 2966 : ssa_op_iter oi;
7594 4012 : FOR_EACH_SSA_USE_OPERAND (use_p, SSA_NAME_DEF_STMT (s),
7595 : oi, SSA_OP_USE)
7596 : {
7597 1046 : tree s2 = USE_FROM_PTR (use_p);
7598 1046 : if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (s2))
7599 : {
7600 : optimizable_load = false;
7601 : break;
7602 : }
7603 : }
7604 :
7605 2966 : if (optimizable_load && !stmt_ends_bb_p (SSA_NAME_DEF_STMT (s)))
7606 : {
7607 2868 : if (!large_huge.m_loads)
7608 296 : large_huge.m_loads = BITMAP_ALLOC (NULL);
7609 2868 : bitmap_set_bit (large_huge.m_loads, SSA_NAME_VERSION (s));
7610 : }
7611 : }
7612 : }
7613 : /* We need to also rewrite stores of large/huge _BitInt INTEGER_CSTs
7614 : into memory. Such functions could have no large/huge SSA_NAMEs. */
7615 55834 : else if (SSA_NAME_IS_VIRTUAL_OPERAND (s))
7616 : {
7617 39577 : gimple *g = SSA_NAME_DEF_STMT (s);
7618 39577 : if (is_gimple_assign (g) && gimple_store_p (g))
7619 : {
7620 13410 : tree t = gimple_assign_rhs1 (g);
7621 13410 : if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
7622 13410 : && bitint_precision_kind (TREE_TYPE (t)) >= bitint_prec_large)
7623 : has_large_huge = true;
7624 : }
7625 : }
7626 : }
7627 :
7628 7159 : if (large_huge.m_names || has_large_huge)
7629 : {
7630 5694 : ret = TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
7631 5694 : calculate_dominance_info (CDI_DOMINATORS);
7632 5694 : if (optimize)
7633 2954 : enable_ranger (cfun);
7634 5694 : if (large_huge.m_loads)
7635 : {
7636 296 : basic_block entry = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7637 296 : entry->aux = NULL;
7638 592 : bitint_dom_walker (large_huge.m_names,
7639 296 : large_huge.m_loads).walk (entry);
7640 296 : bitmap_and_compl_into (large_huge.m_names, large_huge.m_loads);
7641 296 : clear_aux_for_blocks ();
7642 296 : BITMAP_FREE (large_huge.m_loads);
7643 : }
7644 5694 : large_huge.m_limb_type = build_nonstandard_integer_type (limb_prec, 1);
7645 5694 : large_huge.m_limb_size
7646 5694 : = tree_to_uhwi (TYPE_SIZE_UNIT (large_huge.m_limb_type));
7647 : }
7648 7159 : if (large_huge.m_names)
7649 : {
7650 5522 : large_huge.m_map
7651 11044 : = init_var_map (num_ssa_names, NULL, large_huge.m_names);
7652 5522 : coalesce_ssa_name (large_huge.m_map);
7653 5522 : partition_view_normal (large_huge.m_map);
7654 5522 : if (dump_file && (dump_flags & TDF_DETAILS))
7655 : {
7656 0 : fprintf (dump_file, "After Coalescing:\n");
7657 0 : dump_var_map (dump_file, large_huge.m_map);
7658 : }
7659 5522 : large_huge.m_vars
7660 5522 : = XCNEWVEC (tree, num_var_partitions (large_huge.m_map));
7661 5522 : bitmap_iterator bi;
7662 5522 : if (has_large_huge_parm_result)
7663 18823 : EXECUTE_IF_SET_IN_BITMAP (large_huge.m_names, 0, i, bi)
7664 : {
7665 14732 : tree s = ssa_name (i);
7666 14732 : if (SSA_NAME_VAR (s)
7667 5812 : && ((TREE_CODE (SSA_NAME_VAR (s)) == PARM_DECL
7668 5051 : && SSA_NAME_IS_DEFAULT_DEF (s))
7669 798 : || TREE_CODE (SSA_NAME_VAR (s)) == RESULT_DECL))
7670 : {
7671 5014 : int p = var_to_partition (large_huge.m_map, s);
7672 5014 : if (large_huge.m_vars[p] == NULL_TREE)
7673 : {
7674 5014 : large_huge.m_vars[p] = SSA_NAME_VAR (s);
7675 5014 : mark_addressable (SSA_NAME_VAR (s));
7676 : }
7677 : }
7678 : }
7679 5522 : tree atype = NULL_TREE;
7680 5522 : if (dump_file && (dump_flags & TDF_DETAILS))
7681 0 : fprintf (dump_file, "Mapping SSA_NAMEs to decls:\n");
7682 31834 : EXECUTE_IF_SET_IN_BITMAP (large_huge.m_names, 0, i, bi)
7683 : {
7684 26312 : tree s = ssa_name (i);
7685 26312 : int p = var_to_partition (large_huge.m_map, s);
7686 26312 : if (large_huge.m_vars[p] == NULL_TREE)
7687 : {
7688 18534 : if (atype == NULL_TREE
7689 32381 : || !tree_int_cst_equal (TYPE_SIZE (atype),
7690 13847 : TYPE_SIZE (TREE_TYPE (s))))
7691 : {
7692 7604 : unsigned HOST_WIDE_INT nelts
7693 7604 : = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (s))) / limb_prec;
7694 7604 : atype = build_array_type_nelts (large_huge.m_limb_type,
7695 7604 : nelts);
7696 : }
7697 18534 : large_huge.m_vars[p] = create_tmp_var (atype, "bitint");
7698 18534 : mark_addressable (large_huge.m_vars[p]);
7699 : }
7700 26312 : if (dump_file && (dump_flags & TDF_DETAILS))
7701 : {
7702 0 : print_generic_expr (dump_file, s, TDF_SLIM);
7703 0 : fprintf (dump_file, " -> ");
7704 0 : print_generic_expr (dump_file, large_huge.m_vars[p], TDF_SLIM);
7705 0 : fprintf (dump_file, "\n");
7706 : }
7707 : }
7708 : }
7709 :
7710 45717 : FOR_EACH_BB_REVERSE_FN (bb, cfun)
7711 : {
7712 38558 : gimple_stmt_iterator prev;
7713 194119 : for (gimple_stmt_iterator gsi = gsi_last_bb (bb); !gsi_end_p (gsi);
7714 117003 : gsi = prev)
7715 : {
7716 117003 : prev = gsi;
7717 117003 : gsi_prev (&prev);
7718 117003 : ssa_op_iter iter;
7719 117003 : gimple *stmt = gsi_stmt (gsi);
7720 117003 : if (is_gimple_debug (stmt))
7721 79754 : continue;
7722 112313 : bitint_prec_kind kind = bitint_prec_small;
7723 112313 : tree t;
7724 337715 : FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, SSA_OP_ALL_OPERANDS)
7725 225402 : if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE)
7726 : {
7727 77422 : bitint_prec_kind this_kind
7728 77422 : = bitint_precision_kind (TREE_TYPE (t));
7729 225603 : kind = MAX (kind, this_kind);
7730 : }
7731 112313 : if (is_gimple_assign (stmt) && gimple_store_p (stmt))
7732 : {
7733 16066 : t = gimple_assign_rhs1 (stmt);
7734 16066 : if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE)
7735 : {
7736 13878 : bitint_prec_kind this_kind
7737 13878 : = bitint_precision_kind (TREE_TYPE (t));
7738 13878 : kind = MAX (kind, this_kind);
7739 : }
7740 : }
7741 112313 : if (is_gimple_assign (stmt)
7742 112313 : && gimple_assign_rhs_code (stmt) == FLOAT_EXPR)
7743 : {
7744 179 : t = gimple_assign_rhs1 (stmt);
7745 179 : if (TREE_CODE (TREE_TYPE (t)) == BITINT_TYPE
7746 179 : && TREE_CODE (t) == INTEGER_CST)
7747 : {
7748 1 : bitint_prec_kind this_kind
7749 1 : = bitint_precision_kind (TREE_TYPE (t));
7750 1 : kind = MAX (kind, this_kind);
7751 : }
7752 : }
7753 112313 : if (is_gimple_call (stmt))
7754 : {
7755 25393 : t = gimple_call_lhs (stmt);
7756 25393 : if (t && TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE)
7757 : {
7758 5167 : bitint_prec_kind this_kind = arith_overflow_arg_kind (stmt);
7759 5167 : kind = MAX (kind, this_kind);
7760 5167 : if (TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == BITINT_TYPE)
7761 : {
7762 5047 : this_kind
7763 5047 : = bitint_precision_kind (TREE_TYPE (TREE_TYPE (t)));
7764 5047 : kind = MAX (kind, this_kind);
7765 : }
7766 : }
7767 : }
7768 111997 : if (kind == bitint_prec_small)
7769 44022 : continue;
7770 68291 : switch (gimple_code (stmt))
7771 : {
7772 10881 : case GIMPLE_CALL:
7773 : /* For now. We'll need to handle some internal functions and
7774 : perhaps some builtins. */
7775 10881 : if (kind == bitint_prec_middle)
7776 2277 : continue;
7777 : break;
7778 4 : case GIMPLE_ASM:
7779 4 : if (kind == bitint_prec_middle)
7780 1 : continue;
7781 : break;
7782 1117 : case GIMPLE_RETURN:
7783 1117 : continue;
7784 47716 : case GIMPLE_ASSIGN:
7785 47716 : if (gimple_clobber_p (stmt))
7786 3509 : continue;
7787 44207 : if (kind >= bitint_prec_large)
7788 : break;
7789 8729 : if (gimple_assign_single_p (stmt))
7790 : /* No need to lower copies, loads or stores. */
7791 5784 : continue;
7792 2945 : if (gimple_assign_cast_p (stmt))
7793 : {
7794 2379 : tree lhs = gimple_assign_lhs (stmt);
7795 2379 : tree rhs = gimple_assign_rhs1 (stmt);
7796 4758 : if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
7797 2379 : && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
7798 4752 : && (TYPE_PRECISION (TREE_TYPE (lhs))
7799 2373 : == TYPE_PRECISION (TREE_TYPE (rhs))))
7800 : /* No need to lower casts to same precision. */
7801 28 : continue;
7802 : }
7803 : break;
7804 : default:
7805 : break;
7806 1117 : }
7807 :
7808 11490 : if (kind == bitint_prec_middle)
7809 : {
7810 5037 : tree type = NULL_TREE;
7811 : /* Middle _BitInt(N) is rewritten to casts to INTEGER_TYPEs
7812 : with the same precision and back. */
7813 5037 : unsigned int nops = gimple_num_ops (stmt);
7814 16906 : for (unsigned int i = is_gimple_assign (stmt) ? 1 : 0;
7815 16906 : i < nops; ++i)
7816 11869 : if (tree op = gimple_op (stmt, i))
7817 : {
7818 7637 : tree nop = maybe_cast_middle_bitint (&gsi, op, type);
7819 7637 : if (nop != op)
7820 6757 : gimple_set_op (stmt, i, nop);
7821 880 : else if (COMPARISON_CLASS_P (op))
7822 : {
7823 0 : TREE_OPERAND (op, 0)
7824 0 : = maybe_cast_middle_bitint (&gsi,
7825 0 : TREE_OPERAND (op, 0),
7826 : type);
7827 0 : TREE_OPERAND (op, 1)
7828 0 : = maybe_cast_middle_bitint (&gsi,
7829 0 : TREE_OPERAND (op, 1),
7830 : type);
7831 : }
7832 880 : else if (TREE_CODE (op) == CASE_LABEL_EXPR)
7833 : {
7834 24 : CASE_LOW (op)
7835 24 : = maybe_cast_middle_bitint (&gsi, CASE_LOW (op),
7836 : type);
7837 48 : CASE_HIGH (op)
7838 48 : = maybe_cast_middle_bitint (&gsi, CASE_HIGH (op),
7839 : type);
7840 : }
7841 : }
7842 5037 : if (tree lhs = gimple_get_lhs (stmt))
7843 2917 : if (TREE_CODE (TREE_TYPE (lhs)) == BITINT_TYPE
7844 2917 : && (bitint_precision_kind (TREE_TYPE (lhs))
7845 : == bitint_prec_middle))
7846 : {
7847 1368 : int prec = TYPE_PRECISION (TREE_TYPE (lhs));
7848 1368 : int uns = TYPE_UNSIGNED (TREE_TYPE (lhs));
7849 1368 : type = build_nonstandard_integer_type (prec, uns);
7850 1368 : tree lhs2 = make_ssa_name (type);
7851 1368 : gimple_set_lhs (stmt, lhs2);
7852 1368 : gimple *g = gimple_build_assign (lhs, NOP_EXPR, lhs2);
7853 1368 : if (stmt_ends_bb_p (stmt))
7854 : {
7855 4 : edge e = find_fallthru_edge (gsi_bb (gsi)->succs);
7856 4 : gsi_insert_on_edge (e, g);
7857 4 : edge_insertions = true;
7858 : }
7859 : else
7860 1364 : gsi_insert_after (&gsi, g, GSI_SAME_STMT);
7861 : }
7862 5037 : update_stmt (stmt);
7863 5037 : continue;
7864 5037 : }
7865 :
7866 50538 : if (tree lhs = gimple_get_lhs (stmt))
7867 43951 : if (TREE_CODE (lhs) == SSA_NAME)
7868 : {
7869 35185 : tree type = TREE_TYPE (lhs);
7870 35185 : if (TREE_CODE (type) == COMPLEX_TYPE)
7871 4043 : type = TREE_TYPE (type);
7872 43784 : if (TREE_CODE (type) == BITINT_TYPE
7873 29907 : && bitint_precision_kind (type) >= bitint_prec_large
7874 64903 : && (large_huge.m_names == NULL
7875 29559 : || !bitmap_bit_p (large_huge.m_names,
7876 29559 : SSA_NAME_VERSION (lhs))))
7877 8599 : continue;
7878 : }
7879 :
7880 41939 : large_huge.lower_stmt (stmt);
7881 : }
7882 :
7883 38558 : tree atype = NULL_TREE;
7884 47084 : for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7885 8526 : gsi_next (&gsi))
7886 : {
7887 8526 : gphi *phi = gsi.phi ();
7888 8526 : tree lhs = gimple_phi_result (phi);
7889 8526 : if (TREE_CODE (TREE_TYPE (lhs)) != BITINT_TYPE
7890 8526 : || bitint_precision_kind (TREE_TYPE (lhs)) < bitint_prec_large)
7891 8383 : continue;
7892 143 : int p1 = var_to_partition (large_huge.m_map, lhs);
7893 143 : gcc_assert (large_huge.m_vars[p1] != NULL_TREE);
7894 : tree v1 = large_huge.m_vars[p1];
7895 544 : for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
7896 : {
7897 401 : tree arg = gimple_phi_arg_def (phi, i);
7898 401 : edge e = gimple_phi_arg_edge (phi, i);
7899 401 : gimple *g;
7900 401 : switch (TREE_CODE (arg))
7901 : {
7902 74 : case INTEGER_CST:
7903 74 : if (integer_zerop (arg) && VAR_P (v1))
7904 : {
7905 45 : tree zero = build_zero_cst (TREE_TYPE (v1));
7906 45 : g = gimple_build_assign (v1, zero);
7907 45 : gsi_insert_on_edge (e, g);
7908 45 : edge_insertions = true;
7909 136 : break;
7910 : }
7911 29 : int ext;
7912 29 : unsigned int min_prec, prec, rem;
7913 29 : tree c;
7914 29 : prec = TYPE_PRECISION (TREE_TYPE (arg));
7915 29 : rem = prec % (2 * limb_prec);
7916 29 : min_prec = bitint_min_cst_precision (arg, ext);
7917 29 : if (min_prec > prec - rem - 2 * limb_prec
7918 12 : && min_prec > (unsigned) limb_prec)
7919 : /* Constant which has enough significant bits that it
7920 : isn't worth trying to save .rodata space by extending
7921 : from smaller number. */
7922 : min_prec = prec;
7923 : else
7924 : {
7925 20 : min_prec = CEIL (min_prec, limb_prec) * limb_prec;
7926 20 : if (min_prec > (unsigned) limb_prec
7927 3 : && abi_limb_prec > limb_prec)
7928 : {
7929 : /* For targets with ABI limb precision higher than
7930 : limb precision round to ABI limb precision,
7931 : otherwise c can contain padding bits. */
7932 0 : min_prec
7933 0 : = CEIL (min_prec, abi_limb_prec) * abi_limb_prec;
7934 0 : if (min_prec > prec - rem - 2 * limb_prec)
7935 9 : min_prec = prec;
7936 : }
7937 : }
7938 29 : if (min_prec == 0)
7939 : c = NULL_TREE;
7940 26 : else if (min_prec == prec)
7941 9 : c = tree_output_constant_def (arg);
7942 17 : else if (min_prec == (unsigned) limb_prec)
7943 14 : c = fold_convert (large_huge.m_limb_type, arg);
7944 : else
7945 : {
7946 3 : tree ctype = build_bitint_type (min_prec, 1);
7947 3 : c = tree_output_constant_def (fold_convert (ctype, arg));
7948 : }
7949 26 : if (c)
7950 : {
7951 26 : if (VAR_P (v1) && min_prec == prec)
7952 : {
7953 8 : tree v2 = build1 (VIEW_CONVERT_EXPR,
7954 8 : TREE_TYPE (v1), c);
7955 8 : g = gimple_build_assign (v1, v2);
7956 8 : gsi_insert_on_edge (e, g);
7957 8 : edge_insertions = true;
7958 8 : break;
7959 : }
7960 18 : if (TREE_CODE (TREE_TYPE (c)) == INTEGER_TYPE)
7961 : {
7962 14 : if (bitint_big_endian)
7963 : {
7964 0 : tree ptype = build_pointer_type (TREE_TYPE (v1));
7965 0 : tree sz1 = TYPE_SIZE_UNIT (TREE_TYPE (v1));
7966 0 : tree sz2 = TYPE_SIZE_UNIT (TREE_TYPE (c));
7967 0 : tree off = build_int_cst (ptype,
7968 0 : tree_to_uhwi (sz1)
7969 0 : - tree_to_uhwi (sz2));
7970 0 : tree vd = build2 (MEM_REF, TREE_TYPE (c),
7971 : build_fold_addr_expr (v1),
7972 : off);
7973 0 : g = gimple_build_assign (vd, c);
7974 : }
7975 : else
7976 14 : g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
7977 14 : TREE_TYPE (c),
7978 : v1), c);
7979 : }
7980 : else
7981 : {
7982 4 : unsigned HOST_WIDE_INT nelts
7983 4 : = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (c)))
7984 4 : / limb_prec;
7985 4 : tree vtype
7986 8 : = build_array_type_nelts (large_huge.m_limb_type,
7987 4 : nelts);
7988 4 : tree vd;
7989 4 : if (bitint_big_endian)
7990 : {
7991 0 : tree ptype = build_pointer_type (TREE_TYPE (v1));
7992 0 : tree sz1 = TYPE_SIZE_UNIT (TREE_TYPE (v1));
7993 0 : tree sz2 = TYPE_SIZE_UNIT (vtype);
7994 0 : tree off = build_int_cst (ptype,
7995 0 : tree_to_uhwi (sz1)
7996 0 : - tree_to_uhwi (sz2));
7997 0 : vd = build2 (MEM_REF, vtype,
7998 : build_fold_addr_expr (v1), off);
7999 : }
8000 : else
8001 4 : vd = build1 (VIEW_CONVERT_EXPR, vtype, v1);
8002 4 : g = gimple_build_assign (vd,
8003 : build1 (VIEW_CONVERT_EXPR,
8004 : vtype, c));
8005 : }
8006 18 : gsi_insert_on_edge (e, g);
8007 18 : if (min_prec == prec)
8008 : {
8009 : edge_insertions = true;
8010 : break;
8011 : }
8012 : }
8013 20 : if (ext == 0)
8014 : {
8015 14 : unsigned HOST_WIDE_INT nelts
8016 14 : = (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (v1)))
8017 14 : - min_prec) / limb_prec;
8018 14 : tree vtype
8019 28 : = build_array_type_nelts (large_huge.m_limb_type,
8020 14 : nelts);
8021 14 : tree ptype = build_pointer_type (TREE_TYPE (v1));
8022 14 : tree off;
8023 14 : if (c && !bitint_big_endian)
8024 13 : off = fold_convert (ptype,
8025 : TYPE_SIZE_UNIT (TREE_TYPE (c)));
8026 : else
8027 1 : off = build_zero_cst (ptype);
8028 14 : tree vd = build2 (MEM_REF, vtype,
8029 : build_fold_addr_expr (v1), off);
8030 14 : g = gimple_build_assign (vd, build_zero_cst (vtype));
8031 : }
8032 : else
8033 : {
8034 6 : tree vd = v1;
8035 6 : if (c && !bitint_big_endian)
8036 : {
8037 4 : tree ptype = build_pointer_type (TREE_TYPE (v1));
8038 4 : tree off
8039 4 : = fold_convert (ptype,
8040 : TYPE_SIZE_UNIT (TREE_TYPE (c)));
8041 4 : vd = build2 (MEM_REF, large_huge.m_limb_type,
8042 : build_fold_addr_expr (v1), off);
8043 : }
8044 6 : vd = build_fold_addr_expr (vd);
8045 6 : unsigned HOST_WIDE_INT nbytes
8046 6 : = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (v1)));
8047 6 : if (c)
8048 4 : nbytes
8049 4 : -= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (c)));
8050 6 : tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
8051 6 : g = gimple_build_call (fn, 3, vd,
8052 : integer_minus_one_node,
8053 : build_int_cst (sizetype,
8054 6 : nbytes));
8055 : }
8056 20 : gsi_insert_on_edge (e, g);
8057 20 : edge_insertions = true;
8058 20 : break;
8059 0 : default:
8060 0 : gcc_unreachable ();
8061 327 : case SSA_NAME:
8062 327 : if (gimple_code (SSA_NAME_DEF_STMT (arg)) == GIMPLE_NOP)
8063 : {
8064 9 : if (large_huge.m_names == NULL
8065 18 : || !bitmap_bit_p (large_huge.m_names,
8066 9 : SSA_NAME_VERSION (arg)))
8067 310 : continue;
8068 : }
8069 327 : int p2 = var_to_partition (large_huge.m_map, arg);
8070 327 : if (p1 == p2)
8071 310 : continue;
8072 17 : gcc_assert (large_huge.m_vars[p2] != NULL_TREE);
8073 17 : tree v2 = large_huge.m_vars[p2];
8074 17 : if (VAR_P (v1) && VAR_P (v2))
8075 17 : g = gimple_build_assign (v1, v2);
8076 0 : else if (VAR_P (v1))
8077 0 : g = gimple_build_assign (v1, build1 (VIEW_CONVERT_EXPR,
8078 0 : TREE_TYPE (v1), v2));
8079 0 : else if (VAR_P (v2))
8080 0 : g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
8081 0 : TREE_TYPE (v2), v1), v2);
8082 : else
8083 : {
8084 0 : if (atype == NULL_TREE
8085 0 : || !tree_int_cst_equal (TYPE_SIZE (atype),
8086 0 : TYPE_SIZE (TREE_TYPE (lhs))))
8087 : {
8088 0 : unsigned HOST_WIDE_INT nelts
8089 0 : = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))
8090 0 : / limb_prec;
8091 0 : atype
8092 0 : = build_array_type_nelts (large_huge.m_limb_type,
8093 0 : nelts);
8094 : }
8095 0 : g = gimple_build_assign (build1 (VIEW_CONVERT_EXPR,
8096 : atype, v1),
8097 : build1 (VIEW_CONVERT_EXPR,
8098 : atype, v2));
8099 : }
8100 17 : gsi_insert_on_edge (e, g);
8101 17 : edge_insertions = true;
8102 17 : break;
8103 : }
8104 : }
8105 : }
8106 : }
8107 :
8108 7159 : if (large_huge.m_names || has_large_huge)
8109 : {
8110 5694 : gimple *nop = NULL;
8111 376478 : for (i = 0; i < num_ssa_names; ++i)
8112 : {
8113 370784 : tree s = ssa_name (i);
8114 370784 : if (s == NULL_TREE)
8115 15028 : continue;
8116 355756 : tree type = TREE_TYPE (s);
8117 355756 : if (TREE_CODE (type) == COMPLEX_TYPE)
8118 17062 : type = TREE_TYPE (type);
8119 355756 : if (TREE_CODE (type) == BITINT_TYPE
8120 355756 : && bitint_precision_kind (type) >= bitint_prec_large)
8121 : {
8122 39592 : if (large_huge.m_preserved
8123 44066 : && bitmap_bit_p (large_huge.m_preserved,
8124 6809 : SSA_NAME_VERSION (s)))
8125 2335 : continue;
8126 34922 : gimple *g = SSA_NAME_DEF_STMT (s);
8127 34922 : if (gimple_code (g) == GIMPLE_NOP)
8128 : {
8129 9271 : if (SSA_NAME_VAR (s))
8130 5035 : set_ssa_default_def (cfun, SSA_NAME_VAR (s), NULL_TREE);
8131 9271 : release_ssa_name (s);
8132 9271 : continue;
8133 : }
8134 25651 : if (gimple_bb (g) == NULL)
8135 : {
8136 2 : release_ssa_name (s);
8137 2 : continue;
8138 : }
8139 25649 : if (gimple_code (g) != GIMPLE_ASM)
8140 : {
8141 25648 : gimple_stmt_iterator gsi = gsi_for_stmt (g);
8142 25648 : bool save_vta = flag_var_tracking_assignments;
8143 25648 : flag_var_tracking_assignments = false;
8144 25648 : gsi_remove (&gsi, true);
8145 25648 : flag_var_tracking_assignments = save_vta;
8146 : }
8147 25649 : if (nop == NULL)
8148 4771 : nop = gimple_build_nop ();
8149 25649 : SSA_NAME_DEF_STMT (s) = nop;
8150 25649 : release_ssa_name (s);
8151 : }
8152 : }
8153 5694 : if (optimize)
8154 2954 : disable_ranger (cfun);
8155 : }
8156 :
8157 7159 : if (edge_insertions)
8158 33 : gsi_commit_edge_inserts ();
8159 :
8160 : /* Fix up arguments of ECF_RETURNS_TWICE calls. Those were temporarily
8161 : inserted before the call, but that is invalid IL, so move them to the
8162 : right place and add corresponding PHIs. */
8163 7159 : if (!large_huge.m_returns_twice_calls.is_empty ())
8164 : {
8165 9 : auto_vec<gimple *, 16> arg_stmts;
8166 29 : while (!large_huge.m_returns_twice_calls.is_empty ())
8167 : {
8168 11 : gimple *stmt = large_huge.m_returns_twice_calls.pop ();
8169 11 : gimple_stmt_iterator gsi = gsi_after_labels (gimple_bb (stmt));
8170 36 : while (gsi_stmt (gsi) != stmt)
8171 : {
8172 25 : if (is_gimple_debug (gsi_stmt (gsi)))
8173 2 : gsi_next (&gsi);
8174 : else
8175 : {
8176 23 : arg_stmts.safe_push (gsi_stmt (gsi));
8177 23 : gsi_remove (&gsi, false);
8178 : }
8179 : }
8180 11 : gimple *g;
8181 11 : basic_block bb = NULL;
8182 11 : edge e = NULL, ead = NULL;
8183 34 : FOR_EACH_VEC_ELT (arg_stmts, i, g)
8184 : {
8185 23 : gsi_safe_insert_before (&gsi, g);
8186 23 : if (i == 0)
8187 : {
8188 11 : bb = gimple_bb (stmt);
8189 11 : gcc_checking_assert (EDGE_COUNT (bb->preds) == 2);
8190 11 : e = EDGE_PRED (bb, 0);
8191 11 : ead = EDGE_PRED (bb, 1);
8192 11 : if ((ead->flags & EDGE_ABNORMAL) == 0)
8193 0 : std::swap (e, ead);
8194 11 : gcc_checking_assert ((e->flags & EDGE_ABNORMAL) == 0
8195 : && (ead->flags & EDGE_ABNORMAL));
8196 : }
8197 23 : tree lhs = gimple_assign_lhs (g);
8198 23 : tree arg = lhs;
8199 23 : gphi *phi = create_phi_node (copy_ssa_name (arg), bb);
8200 23 : add_phi_arg (phi, arg, e, UNKNOWN_LOCATION);
8201 23 : tree var = create_tmp_reg (TREE_TYPE (arg));
8202 23 : suppress_warning (var, OPT_Wuninitialized);
8203 23 : arg = get_or_create_ssa_default_def (cfun, var);
8204 23 : SSA_NAME_OCCURS_IN_ABNORMAL_PHI (arg) = 1;
8205 23 : add_phi_arg (phi, arg, ead, UNKNOWN_LOCATION);
8206 23 : arg = gimple_phi_result (phi);
8207 23 : SSA_NAME_OCCURS_IN_ABNORMAL_PHI (arg) = 1;
8208 23 : imm_use_iterator iter;
8209 23 : gimple *use_stmt;
8210 92 : FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
8211 : {
8212 46 : if (use_stmt == phi)
8213 23 : continue;
8214 23 : gcc_checking_assert (use_stmt == stmt);
8215 23 : use_operand_p use_p;
8216 69 : FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
8217 23 : SET_USE (use_p, arg);
8218 23 : }
8219 : }
8220 11 : update_stmt (stmt);
8221 11 : arg_stmts.truncate (0);
8222 : }
8223 9 : }
8224 :
8225 7159 : return ret;
8226 7159 : }
8227 :
8228 : namespace {
8229 :
8230 : const pass_data pass_data_lower_bitint =
8231 : {
8232 : GIMPLE_PASS, /* type */
8233 : "bitintlower", /* name */
8234 : OPTGROUP_NONE, /* optinfo_flags */
8235 : TV_NONE, /* tv_id */
8236 : PROP_ssa, /* properties_required */
8237 : PROP_gimple_lbitint, /* properties_provided */
8238 : 0, /* properties_destroyed */
8239 : 0, /* todo_flags_start */
8240 : 0, /* todo_flags_finish */
8241 : };
8242 :
8243 : class pass_lower_bitint : public gimple_opt_pass
8244 : {
8245 : public:
8246 577550 : pass_lower_bitint (gcc::context *ctxt)
8247 1155100 : : gimple_opt_pass (pass_data_lower_bitint, ctxt)
8248 : {}
8249 :
8250 : /* opt_pass methods: */
8251 288775 : opt_pass * clone () final override { return new pass_lower_bitint (m_ctxt); }
8252 1046007 : unsigned int execute (function *) final override
8253 : {
8254 1046007 : return gimple_lower_bitint ();
8255 : }
8256 :
8257 : }; // class pass_lower_bitint
8258 :
8259 : } // anon namespace
8260 :
8261 : gimple_opt_pass *
8262 288775 : make_pass_lower_bitint (gcc::context *ctxt)
8263 : {
8264 288775 : return new pass_lower_bitint (ctxt);
8265 : }
8266 :
8267 :
8268 : namespace {
8269 :
8270 : const pass_data pass_data_lower_bitint_O0 =
8271 : {
8272 : GIMPLE_PASS, /* type */
8273 : "bitintlower0", /* name */
8274 : OPTGROUP_NONE, /* optinfo_flags */
8275 : TV_NONE, /* tv_id */
8276 : PROP_cfg, /* properties_required */
8277 : PROP_gimple_lbitint, /* properties_provided */
8278 : 0, /* properties_destroyed */
8279 : 0, /* todo_flags_start */
8280 : 0, /* todo_flags_finish */
8281 : };
8282 :
8283 : class pass_lower_bitint_O0 : public gimple_opt_pass
8284 : {
8285 : public:
8286 288775 : pass_lower_bitint_O0 (gcc::context *ctxt)
8287 577550 : : gimple_opt_pass (pass_data_lower_bitint_O0, ctxt)
8288 : {}
8289 :
8290 : /* opt_pass methods: */
8291 1480905 : bool gate (function *fun) final override
8292 : {
8293 : /* With errors, normal optimization passes are not run. If we don't
8294 : lower bitint operations at all, rtl expansion will abort. */
8295 1480905 : return !(fun->curr_properties & PROP_gimple_lbitint);
8296 : }
8297 :
8298 435001 : unsigned int execute (function *) final override
8299 : {
8300 435001 : return gimple_lower_bitint ();
8301 : }
8302 :
8303 : }; // class pass_lower_bitint_O0
8304 :
8305 : } // anon namespace
8306 :
8307 : gimple_opt_pass *
8308 288775 : make_pass_lower_bitint_O0 (gcc::context *ctxt)
8309 : {
8310 288775 : return new pass_lower_bitint_O0 (ctxt);
8311 : }
|