Line data Source code
1 : /* Consolidation of svalues and regions.
2 : Copyright (C) 2020-2026 Free Software Foundation, Inc.
3 : Contributed by David Malcolm <dmalcolm@redhat.com>.
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify it
8 : under the terms of the GNU General Public License as published by
9 : the Free Software Foundation; either version 3, or (at your option)
10 : any later version.
11 :
12 : GCC is distributed in the hope that it will be useful, but
13 : WITHOUT ANY WARRANTY; without even the implied warranty of
14 : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 : General Public License for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 : #include "analyzer/common.h"
22 :
23 : #include "fold-const.h"
24 : #include "ordered-hash-map.h"
25 : #include "options.h"
26 : #include "analyzer/supergraph.h"
27 : #include "sbitmap.h"
28 : #include "target.h"
29 :
30 : #include "analyzer/analyzer-logging.h"
31 : #include "analyzer/call-string.h"
32 : #include "analyzer/program-point.h"
33 : #include "analyzer/store.h"
34 : #include "analyzer/region-model.h"
35 : #include "analyzer/constraint-manager.h"
36 :
37 : #if ENABLE_ANALYZER
38 :
39 : namespace ana {
40 :
41 : /* class region_model_manager. */
42 :
43 : /* region_model_manager's ctor. */
44 :
45 3983 : region_model_manager::region_model_manager (logger *logger)
46 3983 : : m_logger (logger),
47 3983 : m_next_symbol_id (0),
48 3983 : m_empty_call_string (),
49 3983 : m_root_region (alloc_symbol_id ()),
50 3983 : m_stack_region (alloc_symbol_id (), &m_root_region),
51 3983 : m_heap_region (alloc_symbol_id (), &m_root_region),
52 3983 : m_unknown_NULL (nullptr),
53 3983 : m_checking_feasibility (false),
54 3983 : m_max_complexity (0, 0),
55 3983 : m_code_region (alloc_symbol_id (), &m_root_region),
56 3983 : m_fndecls_map (), m_labels_map (),
57 3983 : m_globals_region (alloc_symbol_id (), &m_root_region),
58 3983 : m_globals_map (),
59 3983 : m_thread_local_region (alloc_symbol_id (), &m_root_region),
60 3983 : m_errno_region (alloc_symbol_id (), &m_thread_local_region),
61 3983 : m_store_mgr (this),
62 3983 : m_range_mgr (new bounded_ranges_manager ()),
63 11949 : m_known_fn_mgr (logger)
64 : {
65 3983 : }
66 :
67 : /* region_model_manager's dtor. Delete all of the managed svalues
68 : and regions. */
69 :
70 7966 : region_model_manager::~region_model_manager ()
71 : {
72 : /* Delete consolidated svalues. */
73 59854 : for (constants_map_t::iterator iter = m_constants_map.begin ();
74 115725 : iter != m_constants_map.end (); ++iter)
75 55871 : delete (*iter).second;
76 12599 : for (unknowns_map_t::iterator iter = m_unknowns_map.begin ();
77 21215 : iter != m_unknowns_map.end (); ++iter)
78 8616 : delete (*iter).second;
79 3983 : delete m_unknown_NULL;
80 9959 : for (poisoned_values_map_t::iterator iter = m_poisoned_values_map.begin ();
81 15935 : iter != m_poisoned_values_map.end (); ++iter)
82 5976 : delete (*iter).second;
83 4017 : for (setjmp_values_map_t::iterator iter = m_setjmp_values_map.begin ();
84 4051 : iter != m_setjmp_values_map.end (); ++iter)
85 34 : delete (*iter).second;
86 30592 : for (initial_values_map_t::iterator iter = m_initial_values_map.begin ();
87 57201 : iter != m_initial_values_map.end (); ++iter)
88 26609 : delete (*iter).second;
89 29057 : for (pointer_values_map_t::iterator iter = m_pointer_values_map.begin ();
90 54131 : iter != m_pointer_values_map.end (); ++iter)
91 25074 : delete (*iter).second;
92 12319 : for (unaryop_values_map_t::iterator iter = m_unaryop_values_map.begin ();
93 20655 : iter != m_unaryop_values_map.end (); ++iter)
94 8336 : delete (*iter).second;
95 24337 : for (binop_values_map_t::iterator iter = m_binop_values_map.begin ();
96 44691 : iter != m_binop_values_map.end (); ++iter)
97 20354 : delete (*iter).second;
98 6314 : for (sub_values_map_t::iterator iter = m_sub_values_map.begin ();
99 8645 : iter != m_sub_values_map.end (); ++iter)
100 2331 : delete (*iter).second;
101 5007 : for (auto iter : m_repeated_values_map)
102 512 : delete iter.second;
103 5509 : for (auto iter : m_bits_within_values_map)
104 763 : delete iter.second;
105 4193 : for (unmergeable_values_map_t::iterator iter
106 3983 : = m_unmergeable_values_map.begin ();
107 4403 : iter != m_unmergeable_values_map.end (); ++iter)
108 210 : delete (*iter).second;
109 6338 : for (widening_values_map_t::iterator iter = m_widening_values_map.begin ();
110 8693 : iter != m_widening_values_map.end (); ++iter)
111 2355 : delete (*iter).second;
112 4595 : for (compound_values_map_t::iterator iter = m_compound_values_map.begin ();
113 5207 : iter != m_compound_values_map.end (); ++iter)
114 612 : delete (*iter).second;
115 30208 : for (conjured_values_map_t::iterator iter = m_conjured_values_map.begin ();
116 56433 : iter != m_conjured_values_map.end (); ++iter)
117 26225 : delete (*iter).second;
118 4357 : for (auto iter : m_asm_output_values_map)
119 187 : delete iter.second;
120 4357 : for (auto iter : m_const_fn_result_values_map)
121 187 : delete iter.second;
122 :
123 : /* Delete consolidated regions. */
124 13443 : for (fndecls_map_t::iterator iter = m_fndecls_map.begin ();
125 22903 : iter != m_fndecls_map.end (); ++iter)
126 9460 : delete (*iter).second;
127 4033 : for (labels_map_t::iterator iter = m_labels_map.begin ();
128 4083 : iter != m_labels_map.end (); ++iter)
129 50 : delete (*iter).second;
130 10361 : for (globals_map_t::iterator iter = m_globals_map.begin ();
131 16739 : iter != m_globals_map.end (); ++iter)
132 6378 : delete (*iter).second;
133 7766 : for (string_map_t::iterator iter = m_string_map.begin ();
134 11549 : iter != m_string_map.end (); ++iter)
135 3783 : delete (*iter).second;
136 :
137 3983 : delete m_range_mgr;
138 7966 : }
139 :
140 : /* Return true if C exceeds the complexity limit for svalues. */
141 :
142 : bool
143 170295 : region_model_manager::too_complex_p (const complexity &c) const
144 : {
145 170295 : if (c.m_max_depth > (unsigned)param_analyzer_max_svalue_depth)
146 1987 : return true;
147 : return false;
148 : }
149 :
150 : /* If SVAL exceeds the complexity limit for svalues, delete it
151 : and return true.
152 : Otherwise update m_max_complexity and return false. */
153 :
154 : bool
155 177623 : region_model_manager::reject_if_too_complex (svalue *sval)
156 : {
157 177623 : if (m_checking_feasibility)
158 : return false;
159 :
160 170295 : const complexity &c = sval->get_complexity ();
161 170295 : if (!too_complex_p (c))
162 : {
163 168308 : if (m_max_complexity.m_num_nodes < c.m_num_nodes)
164 11779 : m_max_complexity.m_num_nodes = c.m_num_nodes;
165 168308 : if (m_max_complexity.m_max_depth < c.m_max_depth)
166 10808 : m_max_complexity.m_max_depth = c.m_max_depth;
167 168308 : return false;
168 : }
169 :
170 1987 : pretty_printer pp;
171 1987 : pp_format_decoder (&pp) = default_tree_printer;
172 1987 : sval->dump_to_pp (&pp, true);
173 1987 : if (warning_at (input_location, OPT_Wanalyzer_symbol_too_complex,
174 : "symbol too complicated: %qs",
175 : pp_formatted_text (&pp)))
176 2 : inform (input_location,
177 : "max_depth %i exceeds --param=analyzer-max-svalue-depth=%i",
178 2 : c.m_max_depth, param_analyzer_max_svalue_depth);
179 :
180 1987 : delete sval;
181 1987 : return true;
182 1987 : }
183 :
184 : /* Macro for imposing a complexity limit on svalues, for use within
185 : region_model_manager member functions.
186 :
187 : If SVAL exceeds the complexity limit, delete it and return an UNKNOWN
188 : value of the same type.
189 : Otherwise update m_max_complexity and carry on. */
190 :
191 : #define RETURN_UNKNOWN_IF_TOO_COMPLEX(SVAL) \
192 : do { \
193 : svalue *sval_ = (SVAL); \
194 : tree type_ = sval_->get_type (); \
195 : if (reject_if_too_complex (sval_)) \
196 : return get_or_create_unknown_svalue (type_); \
197 : } while (0)
198 :
199 : /* svalue consolidation. */
200 :
201 : /* Return the svalue * for a constant_svalue for CST_EXPR,
202 : creating it if necessary.
203 : The constant_svalue instances are reused, based on pointer equality
204 : of trees */
205 :
206 : const svalue *
207 2551197 : region_model_manager::get_or_create_constant_svalue (tree type, tree cst_expr)
208 : {
209 2551197 : gcc_assert (cst_expr);
210 2551197 : gcc_assert (CONSTANT_CLASS_P (cst_expr));
211 2551197 : gcc_assert (type == TREE_TYPE (cst_expr) || type == NULL_TREE);
212 :
213 2551197 : constant_svalue::key_t key (type, cst_expr);
214 2551197 : constant_svalue **slot = m_constants_map.get (key);
215 2551197 : if (slot)
216 2494522 : return *slot;
217 56675 : constant_svalue *cst_sval
218 56675 : = new constant_svalue (alloc_symbol_id (), type, cst_expr);
219 56675 : RETURN_UNKNOWN_IF_TOO_COMPLEX (cst_sval);
220 55871 : m_constants_map.put (key, cst_sval);
221 55871 : return cst_sval;
222 : }
223 :
224 : const svalue *
225 1643710 : region_model_manager::get_or_create_constant_svalue (tree cst_expr)
226 : {
227 1643710 : tree type = TREE_TYPE (cst_expr);
228 1643710 : if (TREE_CODE (cst_expr) == RAW_DATA_CST)
229 : /* The type of a RAW_DATA_CST is the type of each element, rather than
230 : that of the constant as a whole, so use NULL_TREE for simplicity. */
231 14 : type = NULL_TREE;
232 1643710 : return get_or_create_constant_svalue (type, cst_expr);
233 : }
234 :
235 : /* Return the svalue * for a constant_svalue for the INTEGER_CST
236 : for VAL of type TYPE, creating it if necessary. */
237 :
238 : const svalue *
239 869976 : region_model_manager::get_or_create_int_cst (tree type,
240 : const poly_wide_int_ref &cst)
241 : {
242 869976 : tree effective_type = type;
243 869976 : if (!type)
244 7302 : effective_type = ptrdiff_type_node;
245 869976 : gcc_assert (INTEGRAL_TYPE_P (effective_type)
246 : || POINTER_TYPE_P (effective_type));
247 869976 : tree tree_cst = wide_int_to_tree (effective_type, cst);
248 869976 : return get_or_create_constant_svalue (type, tree_cst);
249 : }
250 :
251 : /* Return the svalue * for the constant_svalue for the NULL pointer
252 : of POINTER_TYPE, creating it if necessary. */
253 :
254 : const svalue *
255 1342 : region_model_manager::get_or_create_null_ptr (tree pointer_type)
256 : {
257 1342 : gcc_assert (pointer_type);
258 1342 : gcc_assert (POINTER_TYPE_P (pointer_type));
259 1342 : return get_or_create_int_cst (pointer_type, 0);
260 : }
261 :
262 : /* Return the svalue * for a unknown_svalue for TYPE (which can be NULL_TREE),
263 : creating it if necessary.
264 : The unknown_svalue instances are reused, based on pointer equality
265 : of the types */
266 :
267 : const svalue *
268 830671 : region_model_manager::get_or_create_unknown_svalue (tree type)
269 : {
270 : /* Don't create unknown values when doing feasibility testing;
271 : instead, create a unique svalue. */
272 830671 : if (m_checking_feasibility)
273 12427 : return create_unique_svalue (type);
274 :
275 : /* Special-case NULL, so that the hash_map can use NULL as the
276 : "empty" value. */
277 818244 : if (type == NULL_TREE)
278 : {
279 118718 : if (!m_unknown_NULL)
280 1733 : m_unknown_NULL = new unknown_svalue (alloc_symbol_id (), type);
281 118718 : return m_unknown_NULL;
282 : }
283 :
284 699526 : unknown_svalue **slot = m_unknowns_map.get (type);
285 699526 : if (slot)
286 690910 : return *slot;
287 8616 : unknown_svalue *sval = new unknown_svalue (alloc_symbol_id (), type);
288 8616 : m_unknowns_map.put (type, sval);
289 8616 : return sval;
290 : }
291 :
292 : /* Return a freshly-allocated svalue of TYPE, owned by this manager. */
293 :
294 : const svalue *
295 12427 : region_model_manager::create_unique_svalue (tree type)
296 : {
297 12427 : svalue *sval = new placeholder_svalue (alloc_symbol_id (), type, "unique");
298 12427 : m_managed_dynamic_svalues.safe_push (sval);
299 12427 : return sval;
300 : }
301 :
302 : /* Return the svalue * for the initial value of REG, creating it if
303 : necessary. */
304 :
305 : const svalue *
306 3092033 : region_model_manager::get_or_create_initial_value (const region *reg,
307 : bool check_poisoned)
308 : {
309 3092033 : if (!reg->can_have_initial_svalue_p () && check_poisoned)
310 194013 : return get_or_create_poisoned_svalue (poison_kind::uninit,
311 194013 : reg->get_type ());
312 :
313 : /* The initial value of a cast is a cast of the initial value. */
314 2898020 : if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
315 : {
316 2249 : const region *original_reg = cast_reg->get_parent_region ();
317 2249 : return get_or_create_cast (cast_reg->get_type (),
318 2249 : get_or_create_initial_value (original_reg));
319 : }
320 :
321 : /* Simplify:
322 : INIT_VAL(ELEMENT_REG(STRING_REG), CONSTANT_SVAL)
323 : to:
324 : CONSTANT_SVAL(STRING[N]). */
325 2895771 : if (const element_region *element_reg = reg->dyn_cast_element_region ())
326 7203 : if (tree cst_idx = element_reg->get_index ()->maybe_get_constant ())
327 8316 : if (const string_region *string_reg
328 4158 : = element_reg->get_parent_region ()->dyn_cast_string_region ())
329 702 : if (tree_fits_shwi_p (cst_idx))
330 : {
331 702 : HOST_WIDE_INT idx = tree_to_shwi (cst_idx);
332 702 : tree string_cst = string_reg->get_string_cst ();
333 1404 : if (idx >= 0 && idx <= TREE_STRING_LENGTH (string_cst))
334 : {
335 702 : int ch = TREE_STRING_POINTER (string_cst)[idx];
336 702 : return get_or_create_int_cst (reg->get_type (), ch);
337 : }
338 : }
339 :
340 : /* INIT_VAL (*UNKNOWN_PTR) -> UNKNOWN_VAL. */
341 2895069 : if (reg->symbolic_for_unknown_ptr_p ())
342 7506 : return get_or_create_unknown_svalue (reg->get_type ());
343 :
344 2887563 : if (initial_svalue **slot = m_initial_values_map.get (reg))
345 2860565 : return *slot;
346 26998 : initial_svalue *initial_sval
347 26998 : = new initial_svalue (alloc_symbol_id (), reg->get_type (), reg);
348 26998 : RETURN_UNKNOWN_IF_TOO_COMPLEX (initial_sval);
349 26609 : m_initial_values_map.put (reg, initial_sval);
350 26609 : return initial_sval;
351 : }
352 :
353 : /* Return the svalue * for R using type TYPE, creating it if
354 : necessary. */
355 :
356 : const svalue *
357 34 : region_model_manager::get_or_create_setjmp_svalue (const setjmp_record &r,
358 : tree type)
359 : {
360 34 : setjmp_svalue::key_t key (r, type);
361 34 : if (setjmp_svalue **slot = m_setjmp_values_map.get (key))
362 0 : return *slot;
363 34 : setjmp_svalue *setjmp_sval = new setjmp_svalue (r, alloc_symbol_id (), type);
364 34 : RETURN_UNKNOWN_IF_TOO_COMPLEX (setjmp_sval);
365 34 : m_setjmp_values_map.put (key, setjmp_sval);
366 34 : return setjmp_sval;
367 : }
368 :
369 : /* Return the svalue * for a poisoned value of KIND and TYPE, creating it if
370 : necessary. */
371 :
372 : const svalue *
373 201398 : region_model_manager::get_or_create_poisoned_svalue (enum poison_kind kind,
374 : tree type)
375 : {
376 201398 : poisoned_svalue::key_t key (kind, type);
377 201398 : if (poisoned_svalue **slot = m_poisoned_values_map.get (key))
378 195422 : return *slot;
379 5976 : poisoned_svalue *poisoned_sval
380 5976 : = new poisoned_svalue (kind, alloc_symbol_id (), type);
381 5976 : RETURN_UNKNOWN_IF_TOO_COMPLEX (poisoned_sval);
382 5976 : m_poisoned_values_map.put (key, poisoned_sval);
383 5976 : return poisoned_sval;
384 : }
385 :
386 : /* Return the svalue * for a pointer to POINTEE of type PTR_TYPE,
387 : creating it if necessary. */
388 :
389 : const svalue *
390 986907 : region_model_manager::get_ptr_svalue (tree ptr_type, const region *pointee)
391 : {
392 : /* If this is a symbolic region from dereferencing a pointer, and the types
393 : match, then return the original pointer. */
394 986907 : if (const symbolic_region *sym_reg = pointee->dyn_cast_symbolic_region ())
395 159 : if (ptr_type == sym_reg->get_pointer ()->get_type ())
396 : return sym_reg->get_pointer ();
397 :
398 986748 : region_svalue::key_t key (ptr_type, pointee);
399 986748 : if (region_svalue **slot = m_pointer_values_map.get (key))
400 961162 : return *slot;
401 25586 : region_svalue *sval
402 25586 : = new region_svalue (alloc_symbol_id (), ptr_type, pointee);
403 25586 : RETURN_UNKNOWN_IF_TOO_COMPLEX (sval);
404 25074 : m_pointer_values_map.put (key, sval);
405 25074 : return sval;
406 : }
407 :
408 : /* Subroutine of region_model_manager::maybe_fold_unaryop
409 : when the arg is a binop_svalue.
410 : Invert comparisons e.g. "!(x == y)" => "x != y".
411 : Otherwise, return nullptr. */
412 :
413 : const svalue *
414 16 : region_model_manager::
415 : maybe_invert_comparison_in_unaryop (tree result_type,
416 : const binop_svalue *binop)
417 : {
418 16 : if (TREE_CODE_CLASS (binop->get_op ()) == tcc_comparison)
419 : {
420 16 : enum tree_code inv_op
421 16 : = invert_tree_comparison (binop->get_op (),
422 16 : HONOR_NANS (binop->get_type ()));
423 16 : if (inv_op != ERROR_MARK)
424 16 : return get_or_create_cast
425 16 : (result_type,
426 : get_or_create_binop (binop->get_type (), inv_op,
427 : binop->get_arg0 (),
428 16 : binop->get_arg1 ()));
429 : }
430 : return nullptr;
431 : }
432 :
433 : /* Subroutine of region_model_manager::get_or_create_unaryop.
434 : Attempt to fold the inputs and return a simpler svalue *.
435 : Otherwise, return nullptr. */
436 :
437 : const svalue *
438 322115 : region_model_manager::maybe_fold_unaryop (tree type, enum tree_code op,
439 : const svalue *arg)
440 : {
441 : /* Ops on "unknown" are also unknown. */
442 331717 : if (arg->get_kind () == SK_UNKNOWN)
443 42217 : return get_or_create_unknown_svalue (type);
444 : /* Likewise for "poisoned". */
445 579000 : else if (const poisoned_svalue *poisoned_sval
446 289500 : = arg->dyn_cast_poisoned_svalue ())
447 1390 : return get_or_create_poisoned_svalue (poisoned_sval->get_poison_kind (),
448 1390 : type);
449 :
450 288110 : gcc_assert (arg->can_have_associated_state_p ());
451 :
452 288110 : switch (op)
453 : {
454 : default: break;
455 286908 : case VIEW_CONVERT_EXPR:
456 286908 : case NOP_EXPR:
457 286908 : {
458 286908 : if (!type)
459 : return nullptr;
460 :
461 : /* Handle redundant casts. */
462 286013 : if (arg->get_type ()
463 286013 : && useless_type_conversion_p (arg->get_type (), type))
464 : return arg;
465 :
466 : /* Fold "cast<TYPE> (cast <INNER_TYPE> (innermost_arg))
467 : => "cast<TYPE> (innermost_arg)",
468 : unless INNER_TYPE is narrower than TYPE. */
469 149167 : if (const svalue *innermost_arg = arg->maybe_undo_cast ())
470 : {
471 21854 : if (tree inner_type = arg->get_type ())
472 21734 : if (TYPE_SIZE (type)
473 21734 : && TYPE_SIZE (inner_type)
474 43468 : && (fold_binary (LE_EXPR, boolean_type_node,
475 : TYPE_SIZE (type), TYPE_SIZE (inner_type))
476 21734 : == boolean_true_node))
477 : return maybe_fold_unaryop (type, op, innermost_arg);
478 : }
479 : /* Avoid creating symbolic regions for pointer casts by
480 : simplifying (T*)(®ION) to ((T*)®ION). */
481 139565 : if (const region_svalue *region_sval = arg->dyn_cast_region_svalue ())
482 4195 : if (POINTER_TYPE_P (type)
483 333 : && region_sval->get_type ()
484 4349 : && POINTER_TYPE_P (region_sval->get_type ()))
485 64 : return get_ptr_svalue (type, region_sval->get_pointee ());
486 :
487 : /* Casting all zeroes should give all zeroes. */
488 139501 : if (type
489 139501 : && arg->all_zeroes_p ()
490 6312 : && (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)))
491 5587 : return get_or_create_int_cst (type, 0);
492 : }
493 : break;
494 8 : case TRUTH_NOT_EXPR:
495 8 : {
496 : /* Invert comparisons e.g. "!(x == y)" => "x != y". */
497 8 : if (const binop_svalue *binop = arg->dyn_cast_binop_svalue ())
498 16 : if (const svalue *folded
499 8 : = maybe_invert_comparison_in_unaryop (type, binop))
500 : return folded;
501 : }
502 : break;
503 519 : case NEGATE_EXPR:
504 519 : {
505 : /* -(-(VAL)) is VAL, for integer types. */
506 519 : if (const unaryop_svalue *unaryop = arg->dyn_cast_unaryop_svalue ())
507 10 : if (unaryop->get_op () == NEGATE_EXPR
508 1 : && type == unaryop->get_type ()
509 1 : && type
510 11 : && INTEGRAL_TYPE_P (type))
511 1 : return unaryop->get_arg ();
512 : }
513 : break;
514 215 : case BIT_NOT_EXPR:
515 215 : {
516 : /* Invert comparisons for e.g. "~(x == y)" => "x != y". */
517 215 : if (type
518 215 : && TREE_CODE (type) == BOOLEAN_TYPE
519 146 : && arg->get_type ()
520 361 : && TREE_CODE (arg->get_type ()) == BOOLEAN_TYPE)
521 146 : if (const binop_svalue *binop = arg->dyn_cast_binop_svalue ())
522 16 : if (const svalue *folded
523 8 : = maybe_invert_comparison_in_unaryop (type, binop))
524 : return folded;
525 : }
526 : break;
527 : }
528 :
529 : /* Constants. */
530 135099 : if (type)
531 134883 : if (tree cst = arg->maybe_get_constant ())
532 18316 : if (tree result = fold_unary (op, type, cst))
533 : {
534 17409 : if (CONSTANT_CLASS_P (result))
535 17409 : return get_or_create_constant_svalue (result);
536 :
537 : /* fold_unary can return casts of constants; try to handle them. */
538 0 : if (op != NOP_EXPR
539 0 : && TREE_CODE (result) == NOP_EXPR
540 0 : && CONSTANT_CLASS_P (TREE_OPERAND (result, 0)))
541 : {
542 0 : const svalue *inner_cst
543 0 : = get_or_create_constant_svalue (TREE_OPERAND (result, 0));
544 0 : return get_or_create_cast (type,
545 0 : get_or_create_cast (TREE_TYPE (result),
546 0 : inner_cst));
547 : }
548 : }
549 :
550 : return nullptr;
551 : }
552 :
553 : /* Return the svalue * for an unary operation OP on ARG with a result of
554 : type TYPE, creating it if necessary. */
555 :
556 : const svalue *
557 322115 : region_model_manager::get_or_create_unaryop (tree type, enum tree_code op,
558 : const svalue *arg)
559 : {
560 322115 : if (const svalue *folded = maybe_fold_unaryop (type, op, arg))
561 : return folded;
562 118585 : unaryop_svalue::key_t key (type, op, arg);
563 118585 : if (unaryop_svalue **slot = m_unaryop_values_map.get (key))
564 110147 : return *slot;
565 8438 : unaryop_svalue *unaryop_sval
566 8438 : = new unaryop_svalue (alloc_symbol_id (), type, op, arg);
567 8438 : RETURN_UNKNOWN_IF_TOO_COMPLEX (unaryop_sval);
568 8336 : m_unaryop_values_map.put (key, unaryop_sval);
569 8336 : return unaryop_sval;
570 : }
571 :
572 : /* Get a tree code for a cast to DST_TYPE from SRC_TYPE.
573 : Use NOP_EXPR if possible (e.g. to help fold_unary convert casts
574 : of 0 to (T*) to simple pointer constants), but use FIX_TRUNC_EXPR
575 : and VIEW_CONVERT_EXPR for cases that fold_unary would otherwise crash
576 : on. */
577 :
578 : static enum tree_code
579 246590 : get_code_for_cast (tree dst_type, tree src_type)
580 : {
581 0 : if (!dst_type)
582 : return NOP_EXPR;
583 0 : if (!src_type)
584 : return NOP_EXPR;
585 :
586 241689 : if (SCALAR_FLOAT_TYPE_P (src_type))
587 : {
588 355 : if (TREE_CODE (dst_type) == INTEGER_TYPE)
589 : return FIX_TRUNC_EXPR;
590 : else
591 351 : return VIEW_CONVERT_EXPR;
592 : }
593 :
594 : return NOP_EXPR;
595 : }
596 :
597 : /* Return the svalue * for a cast of ARG to type TYPE, creating it
598 : if necessary. */
599 :
600 : const svalue *
601 1240324 : region_model_manager::get_or_create_cast (tree type, const svalue *arg)
602 : {
603 : /* No-op if the types are the same. */
604 1240324 : if (type == arg->get_type ())
605 : return arg;
606 :
607 : /* Don't attempt to handle casts involving vector types for now. */
608 256133 : if (type)
609 246676 : if (VECTOR_TYPE_P (type)
610 246676 : || (arg->get_type ()
611 241689 : && VECTOR_TYPE_P (arg->get_type ())))
612 86 : return get_or_create_unknown_svalue (type);
613 :
614 246590 : enum tree_code op = get_code_for_cast (type, arg->get_type ());
615 256047 : return get_or_create_unaryop (type, op, arg);
616 : }
617 :
618 : /* Subroutine of region_model_manager::maybe_fold_binop for handling
619 : (TYPE)(COMPOUND_SVAL BIT_AND_EXPR CST) that may have been generated by
620 : optimize_bit_field_compare, where CST is from ARG1.
621 :
622 : Support masking out bits from a compound_svalue for comparing a bitfield
623 : against a value, as generated by optimize_bit_field_compare for
624 : BITFIELD == VALUE.
625 :
626 : If COMPOUND_SVAL has a value for the appropriate bits, return it,
627 : shifted accordingly.
628 : Otherwise return nullptr. */
629 :
630 : const svalue *
631 58 : region_model_manager::
632 : maybe_undo_optimize_bit_field_compare (tree type,
633 : const compound_svalue *compound_sval,
634 : tree cst,
635 : const svalue *arg1)
636 : {
637 58 : if (!type)
638 : return nullptr;
639 58 : if (!INTEGRAL_TYPE_P (type))
640 : return nullptr;
641 :
642 56 : const concrete_binding_map &map = compound_sval->get_concrete_bindings ();
643 56 : unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (cst);
644 : /* If "mask" is a contiguous range of set bits, see if the
645 : compound_sval has a value for those bits. */
646 56 : bit_range bits (0, 0);
647 56 : if (!bit_range::from_mask (mask, &bits))
648 : return nullptr;
649 :
650 56 : bit_range bound_bits (bits);
651 56 : if (BYTES_BIG_ENDIAN)
652 : bound_bits = bit_range (BITS_PER_UNIT - bits.get_next_bit_offset (),
653 : bits.m_size_in_bits);
654 56 : const svalue *sval = map.get_any_exact_binding (bound_bits);
655 56 : if (!sval)
656 : {
657 : /* In theory we could also look for bindings that straddle the
658 : bit range. For simplicity, bail out on this case. */
659 : return nullptr;
660 : }
661 :
662 : /* We have a value;
663 : shift it by the correct number of bits. */
664 56 : const svalue *lhs = get_or_create_cast (type, sval);
665 56 : HOST_WIDE_INT bit_offset = bits.get_start_bit_offset ().to_shwi ();
666 56 : const svalue *shift_sval = get_or_create_int_cst (type, bit_offset);
667 56 : const svalue *shifted_sval = get_or_create_binop (type, LSHIFT_EXPR,
668 : lhs, shift_sval);
669 : /* Reapply the mask (needed for negative
670 : signed bitfields). */
671 56 : return get_or_create_binop (type, BIT_AND_EXPR,
672 56 : shifted_sval, arg1);
673 : }
674 :
675 : /* Subroutine of region_model_manager::get_or_create_binop.
676 : Attempt to fold the inputs and return a simpler svalue *.
677 : Otherwise, return nullptr. */
678 :
679 : const svalue *
680 233593 : region_model_manager::maybe_fold_binop (tree type, enum tree_code op,
681 : const svalue *arg0,
682 : const svalue *arg1)
683 : {
684 233593 : tree cst0 = arg0->maybe_get_constant ();
685 233593 : tree cst1 = arg1->maybe_get_constant ();
686 : /* (CST OP CST). */
687 233593 : if (cst0 && cst1)
688 : {
689 78459 : if (type)
690 : {
691 40948 : if (tree result = fold_binary (op, type, cst0, cst1))
692 40910 : if (CONSTANT_CLASS_P (result))
693 40908 : return get_or_create_constant_svalue (result);
694 : }
695 : else
696 : {
697 37511 : if (tree result = int_const_binop (op, cst0, cst1, -1))
698 37511 : return get_or_create_constant_svalue (NULL_TREE, result);
699 : }
700 : }
701 :
702 108316 : if ((type && FLOAT_TYPE_P (type))
703 154687 : || (arg0->get_type () && FLOAT_TYPE_P (arg0->get_type ()))
704 309809 : || (arg1->get_type () && FLOAT_TYPE_P (arg1->get_type ())))
705 : return nullptr;
706 :
707 154625 : switch (op)
708 : {
709 : default:
710 : break;
711 97558 : case POINTER_PLUS_EXPR:
712 97558 : case PLUS_EXPR:
713 : /* (VAL + 0) -> VAL. */
714 97558 : if (cst1 && zerop (cst1))
715 15143 : return get_or_create_cast (type, arg0);
716 : /* X + (-X) -> 0. */
717 82415 : if (const unaryop_svalue *unary_op = arg1->dyn_cast_unaryop_svalue ())
718 2044 : if (unary_op->get_op () == NEGATE_EXPR
719 780 : && unary_op->get_arg () == arg0
720 2166 : && type && (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)))
721 120 : return get_or_create_int_cst (type, 0);
722 : /* X + (Y - X) -> Y. */
723 82295 : if (const binop_svalue *bin_op = arg1->dyn_cast_binop_svalue ())
724 9120 : if (bin_op->get_op () == MINUS_EXPR)
725 203 : if (bin_op->get_arg1 () == arg0)
726 8 : return get_or_create_cast (type, bin_op->get_arg0 ());
727 : break;
728 4325 : case MINUS_EXPR:
729 : /* (VAL - 0) -> VAL. */
730 4325 : if (cst1 && zerop (cst1))
731 47 : return get_or_create_cast (type, arg0);
732 : /* (0 - VAL) -> -VAL. */
733 4278 : if (cst0 && zerop (cst0))
734 17 : return get_or_create_unaryop (type, NEGATE_EXPR, arg1);
735 : /* (X + Y) - X -> Y. */
736 4261 : if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
737 860 : if (binop->get_op () == PLUS_EXPR)
738 125 : if (binop->get_arg0 () == arg1)
739 15 : return get_or_create_cast (type, binop->get_arg1 ());
740 : break;
741 26441 : case MULT_EXPR:
742 : /* (VAL * 0). */
743 26441 : if (cst1
744 25835 : && zerop (cst1)
745 26505 : && (type == NULL_TREE || INTEGRAL_TYPE_P (type)))
746 56 : return get_or_create_int_cst (type, 0);
747 : /* (VAL * 1) -> VAL. */
748 26385 : if (cst1 && integer_onep (cst1))
749 308 : return get_or_create_cast (type, arg0);
750 : break;
751 3292 : case BIT_AND_EXPR:
752 3292 : if (cst1)
753 : {
754 2762 : if (zerop (cst1)
755 2762 : && (type == NULL_TREE || INTEGRAL_TYPE_P (type)))
756 : /* "(ARG0 & 0)" -> "0". */
757 58 : return get_or_create_int_cst (type, 0);
758 :
759 5408 : if (const compound_svalue *compound_sval
760 2704 : = arg0->dyn_cast_compound_svalue ())
761 116 : if (const svalue *sval
762 58 : = maybe_undo_optimize_bit_field_compare (type,
763 : compound_sval,
764 : cst1, arg1))
765 : return sval;
766 : }
767 3178 : if (arg0->get_type () == boolean_type_node
768 3178 : && arg1->get_type () == boolean_type_node)
769 : {
770 : /* If the LHS are both _Bool, then... */
771 : /* ..."(1 & x) -> x". */
772 559 : if (cst0 && !zerop (cst0))
773 0 : return get_or_create_cast (type, arg1);
774 : /* ..."(x & 1) -> x". */
775 559 : if (cst1 && !zerop (cst1))
776 268 : return get_or_create_cast (type, arg0);
777 : /* ..."(0 & x) -> 0". */
778 291 : if (cst0 && zerop (cst0))
779 0 : return get_or_create_int_cst (type, 0);
780 : /* ..."(x & 0) -> 0". */
781 291 : if (cst1 && zerop (cst1))
782 0 : return get_or_create_int_cst (type, 0);
783 : }
784 : break;
785 3174 : case BIT_IOR_EXPR:
786 3174 : if (arg0->get_type () == boolean_type_node
787 3174 : && arg1->get_type () == boolean_type_node)
788 : {
789 : /* If the LHS are both _Bool, then... */
790 : /* ..."(1 | x) -> 1". */
791 120 : if (cst0 && !zerop (cst0))
792 0 : return get_or_create_int_cst (type, 1);
793 : /* ..."(x | 1) -> 1". */
794 120 : if (cst1 && !zerop (cst1))
795 38 : return get_or_create_int_cst (type, 1);
796 : /* ..."(0 | x) -> x". */
797 82 : if (cst0 && zerop (cst0))
798 0 : return get_or_create_cast (type, arg1);
799 : /* ..."(x | 0) -> x". */
800 82 : if (cst1 && zerop (cst1))
801 20 : return get_or_create_cast (type, arg0);
802 : }
803 : break;
804 12 : case TRUTH_ANDIF_EXPR:
805 12 : case TRUTH_AND_EXPR:
806 12 : if (cst1)
807 : {
808 12 : if (zerop (cst1) && INTEGRAL_TYPE_P (type))
809 : /* "(ARG0 && 0)" -> "0". */
810 4 : return get_or_create_constant_svalue (build_int_cst (type, 0));
811 : else
812 : /* "(ARG0 && nonzero-cst)" -> "ARG0". */
813 8 : return get_or_create_cast (type, arg0);
814 : }
815 : break;
816 12 : case TRUTH_ORIF_EXPR:
817 12 : case TRUTH_OR_EXPR:
818 12 : if (cst1)
819 : {
820 12 : if (zerop (cst1))
821 : /* "(ARG0 || 0)" -> "ARG0". */
822 8 : return get_or_create_cast (type, arg0);
823 : else
824 : /* "(ARG0 && nonzero-cst)" -> "nonzero-cst". */
825 4 : return get_or_create_cast (type, arg1);
826 : }
827 : break;
828 :
829 1220 : case TRUNC_DIV_EXPR:
830 1220 : case CEIL_DIV_EXPR:
831 1220 : case FLOOR_DIV_EXPR:
832 1220 : case ROUND_DIV_EXPR:
833 1220 : case TRUNC_MOD_EXPR:
834 1220 : case CEIL_MOD_EXPR:
835 1220 : case FLOOR_MOD_EXPR:
836 1220 : case ROUND_MOD_EXPR:
837 1220 : case RDIV_EXPR:
838 1220 : case EXACT_DIV_EXPR:
839 1220 : {
840 1220 : value_range arg1_vr;
841 1220 : if (arg1->maybe_get_value_range (arg1_vr))
842 1123 : if (arg1_vr.zero_p ())
843 0 : return get_or_create_unknown_svalue (type);
844 1220 : }
845 1220 : break;
846 : }
847 :
848 : /* For associative ops, fold "(X op CST_A) op CST_B)" to
849 : "X op (CST_A op CST_B)". */
850 138447 : if (cst1 && associative_tree_code (op))
851 64532 : if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
852 16891 : if (binop->get_op () == op
853 16891 : && binop->get_arg1 ()->maybe_get_constant ())
854 2574 : return get_or_create_binop
855 2574 : (type, op, binop->get_arg0 (),
856 : get_or_create_binop (type, op,
857 2574 : binop->get_arg1 (), arg1));
858 :
859 : /* associative_tree_code is false for POINTER_PLUS_EXPR, but we
860 : can fold:
861 : "(PTR ptr+ CST_A) ptr+ CST_B)" to "PTR ptr+ (CST_A ptr+ CST_B)"
862 : e.g. in data-model-1.c: test_4c. */
863 135873 : if (cst1 && op == POINTER_PLUS_EXPR)
864 26994 : if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
865 1672 : if (binop->get_op () == POINTER_PLUS_EXPR)
866 1520 : if (binop->get_arg1 ()->maybe_get_constant ())
867 1179 : return get_or_create_binop
868 1179 : (type, op, binop->get_arg0 (),
869 : get_or_create_binop (size_type_node, op,
870 1179 : binop->get_arg1 (), arg1));
871 :
872 : /* Distribute multiplication by a constant through addition/subtraction:
873 : (X + Y) * CST => (X * CST) + (Y * CST). */
874 134694 : if (cst1 && op == MULT_EXPR)
875 25308 : if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
876 2821 : if (binop->get_op () == PLUS_EXPR
877 2821 : || binop->get_op () == MINUS_EXPR)
878 : {
879 2150 : return get_or_create_binop
880 2150 : (type, binop->get_op (),
881 : get_or_create_binop (type, op,
882 : binop->get_arg0 (), arg1),
883 : get_or_create_binop (type, op,
884 2150 : binop->get_arg1 (), arg1));
885 : }
886 :
887 :
888 : /* Typeless operations, assumed to be effectively arbitrary sized
889 : integers following normal arithmetic rules. */
890 132544 : if (!type)
891 34361 : switch (op)
892 : {
893 : default:
894 : break;
895 827 : case MINUS_EXPR:
896 827 : {
897 : /* (X - X) -> 0. */
898 827 : if (arg0 == arg1)
899 751 : return get_or_create_int_cst (type, 0);
900 :
901 : /* (X + A) - (A + B) -> (A - B). */
902 76 : if (const binop_svalue *binop0 = arg0->dyn_cast_binop_svalue ())
903 60 : if (const binop_svalue *binop1 = arg1->dyn_cast_binop_svalue ())
904 36 : if (binop0->get_op () == PLUS_EXPR
905 28 : && binop1->get_op () == PLUS_EXPR
906 56 : && binop0->get_arg0 () == binop1->get_arg0 ())
907 20 : return get_or_create_binop (NULL_TREE, op,
908 : binop0->get_arg1 (),
909 20 : binop1->get_arg1 ());
910 : }
911 : break;
912 :
913 73 : case EXACT_DIV_EXPR:
914 73 : {
915 73 : if (const unaryop_svalue *unaryop0 = arg0->dyn_cast_unaryop_svalue ())
916 : {
917 0 : if (unaryop0->get_op () == NOP_EXPR)
918 0 : if (const svalue *sval = maybe_fold_binop (NULL_TREE, op,
919 : unaryop0->get_arg (),
920 : arg1))
921 : return sval;
922 : }
923 73 : if (const binop_svalue *binop0 = arg0->dyn_cast_binop_svalue ())
924 : {
925 72 : switch (binop0->get_op ())
926 : {
927 : default:
928 : break;
929 :
930 26 : case PLUS_EXPR:
931 26 : case MINUS_EXPR:
932 : /* (A op B) / C -> (A / C) op (B / C). */
933 26 : {
934 26 : if (const svalue *op_on_a
935 26 : = maybe_fold_binop (NULL_TREE, op,
936 : binop0->get_arg0 (), arg1))
937 26 : if (const svalue *op_on_b
938 26 : = maybe_fold_binop (NULL_TREE, op,
939 : binop0->get_arg1 (), arg1))
940 26 : return get_or_create_binop (NULL_TREE,
941 : binop0->get_op (),
942 26 : op_on_a, op_on_b);
943 : }
944 : break;
945 :
946 46 : case MULT_EXPR:
947 : /* (A * B) / C -> A * (B / C) if C is a divisor of B.
948 : In particular, this should also handle the case
949 : (A * B) / B -> A. */
950 46 : if (const svalue *b_div_c
951 46 : = maybe_fold_binop (NULL_TREE, op,
952 : binop0->get_arg1 (), arg1))
953 46 : return get_or_create_binop (NULL_TREE, binop0->get_op (),
954 46 : binop0->get_arg0 (), b_div_c);
955 : }
956 : }
957 : }
958 : break;
959 : }
960 :
961 : /* etc. */
962 :
963 : return nullptr;
964 : }
965 :
966 : /* Return the svalue * for an binary operation OP on ARG0 and ARG1
967 : with a result of type TYPE, creating it if necessary. */
968 :
969 : const svalue *
970 233314 : region_model_manager::get_or_create_binop (tree type, enum tree_code op,
971 : const svalue *arg0,
972 : const svalue *arg1)
973 : {
974 : /* For commutative ops, put any constant on the RHS. */
975 233314 : if (arg0->maybe_get_constant () && commutative_tree_code (op))
976 : std::swap (arg0, arg1);
977 :
978 233314 : if (const svalue *folded = maybe_fold_binop (type, op, arg0, arg1))
979 : return folded;
980 :
981 : /* Ops on "unknown"/"poisoned" are unknown (unless we were able to fold
982 : it via an identity in maybe_fold_binop). */
983 132249 : if (!arg0->can_have_associated_state_p ()
984 132249 : || !arg1->can_have_associated_state_p ())
985 25376 : return get_or_create_unknown_svalue (type);
986 :
987 106873 : binop_svalue::key_t key (type, op, arg0, arg1);
988 106873 : if (binop_svalue **slot = m_binop_values_map.get (key))
989 86414 : return *slot;
990 20459 : binop_svalue *binop_sval
991 20459 : = new binop_svalue (alloc_symbol_id (), type, op, arg0, arg1);
992 20459 : RETURN_UNKNOWN_IF_TOO_COMPLEX (binop_sval);
993 20354 : m_binop_values_map.put (key, binop_sval);
994 20354 : return binop_sval;
995 : }
996 :
997 : /* Subroutine of region_model_manager::get_or_create_sub_svalue.
998 : Return a folded svalue, or nullptr. */
999 :
1000 : const svalue *
1001 63420 : region_model_manager::maybe_fold_sub_svalue (tree type,
1002 : const svalue *parent_svalue,
1003 : const region *subregion)
1004 : {
1005 : /* Subvalues of "unknown"/"poisoned" are unknown. */
1006 63420 : if (!parent_svalue->can_have_associated_state_p ())
1007 39862 : return get_or_create_unknown_svalue (type);
1008 :
1009 : /* If we have a subvalue of a zero constant, it's zero. */
1010 23558 : if (tree cst = parent_svalue->maybe_get_constant ())
1011 6505 : if (TREE_CODE (cst) == INTEGER_CST)
1012 116 : if (zerop (cst))
1013 107 : return get_or_create_cast (type, parent_svalue);
1014 :
1015 : /* If we have a subregion of a zero-fill, it's zero. */
1016 46902 : if (const unaryop_svalue *unary
1017 23451 : = parent_svalue->dyn_cast_unaryop_svalue ())
1018 : {
1019 619 : if (unary->get_op () == NOP_EXPR
1020 619 : || unary->get_op () == VIEW_CONVERT_EXPR)
1021 619 : if (tree cst = unary->get_arg ()->maybe_get_constant ())
1022 619 : if (zerop (cst) && type)
1023 : {
1024 607 : const svalue *cst_sval
1025 607 : = get_or_create_constant_svalue (cst);
1026 607 : return get_or_create_cast (type, cst_sval);
1027 : }
1028 : }
1029 :
1030 : /* Handle getting individual chars from a STRING_CST or RAW_DATA_CST. */
1031 22844 : if (tree cst = parent_svalue->maybe_get_constant ())
1032 6398 : if (TREE_CODE (cst) == STRING_CST
1033 6398 : || TREE_CODE (cst) == RAW_DATA_CST)
1034 : {
1035 : /* If we have a concrete 1-byte access within the parent region... */
1036 6376 : byte_range subregion_bytes (0, 0);
1037 6376 : if (subregion->get_relative_concrete_byte_range (&subregion_bytes)
1038 6376 : && subregion_bytes.m_size_in_bytes == 1
1039 12493 : && type)
1040 : {
1041 : /* ...then attempt to get that char from the constant. */
1042 6117 : HOST_WIDE_INT hwi_start_byte
1043 6117 : = subregion_bytes.m_start_byte_offset.to_shwi ();
1044 6117 : tree cst_idx
1045 6117 : = build_int_cst_type (size_type_node, hwi_start_byte);
1046 12234 : if (const svalue *char_sval
1047 6117 : = maybe_get_char_from_cst (cst, cst_idx))
1048 6081 : return get_or_create_cast (type, char_sval);
1049 : }
1050 : }
1051 :
1052 33526 : if (const initial_svalue *init_sval
1053 16763 : = parent_svalue->dyn_cast_initial_svalue ())
1054 : {
1055 : /* SUB(INIT(r)).FIELD -> INIT(r.FIELD)
1056 : i.e.
1057 : Subvalue(InitialValue(R1), FieldRegion(R2, F))
1058 : -> InitialValue(FieldRegion(R1, F)). */
1059 564 : if (const field_region *field_reg = subregion->dyn_cast_field_region ())
1060 : {
1061 358 : const region *field_reg_new
1062 358 : = get_field_region (init_sval->get_region (),
1063 : field_reg->get_field ());
1064 358 : return get_or_create_initial_value (field_reg_new);
1065 : }
1066 : /* SUB(INIT(r)[ELEMENT] -> INIT(e[ELEMENT])
1067 : i.e.
1068 : Subvalue(InitialValue(R1), ElementRegion(R2, IDX))
1069 : -> InitialValue(ElementRegion(R1, IDX)). */
1070 206 : if (const element_region *element_reg = subregion->dyn_cast_element_region ())
1071 : {
1072 124 : const region *element_reg_new
1073 124 : = get_element_region (init_sval->get_region (),
1074 : element_reg->get_type (),
1075 : element_reg->get_index ());
1076 124 : return get_or_create_initial_value (element_reg_new);
1077 : }
1078 : }
1079 :
1080 32562 : if (const repeated_svalue *repeated_sval
1081 16281 : = parent_svalue->dyn_cast_repeated_svalue ())
1082 100 : if (type)
1083 96 : return get_or_create_cast (type, repeated_sval->get_inner_svalue ());
1084 :
1085 : return nullptr;
1086 : }
1087 :
1088 : /* Return the svalue * for extracting a subvalue of type TYPE from
1089 : PARENT_SVALUE based on SUBREGION, creating it if necessary. */
1090 :
1091 : const svalue *
1092 63420 : region_model_manager::get_or_create_sub_svalue (tree type,
1093 : const svalue *parent_svalue,
1094 : const region *subregion)
1095 : {
1096 126840 : if (const svalue *folded
1097 63420 : = maybe_fold_sub_svalue (type, parent_svalue, subregion))
1098 : return folded;
1099 :
1100 16185 : sub_svalue::key_t key (type, parent_svalue, subregion);
1101 16185 : if (sub_svalue **slot = m_sub_values_map.get (key))
1102 13854 : return *slot;
1103 2331 : sub_svalue *sub_sval
1104 2331 : = new sub_svalue (alloc_symbol_id (), type, parent_svalue, subregion);
1105 2331 : RETURN_UNKNOWN_IF_TOO_COMPLEX (sub_sval);
1106 2331 : m_sub_values_map.put (key, sub_sval);
1107 2331 : return sub_sval;
1108 : }
1109 :
1110 : /* Subroutine of region_model_manager::get_or_create_repeated_svalue.
1111 : Return a folded svalue, or nullptr. */
1112 :
1113 : const svalue *
1114 2950 : region_model_manager::maybe_fold_repeated_svalue (tree type,
1115 : const svalue *outer_size,
1116 : const svalue *inner_svalue)
1117 : {
1118 : /* Repeated "unknown"/"poisoned" is unknown. */
1119 2950 : if (!outer_size->can_have_associated_state_p ()
1120 2950 : || !inner_svalue->can_have_associated_state_p ())
1121 8 : return get_or_create_unknown_svalue (type);
1122 :
1123 : /* If INNER_SVALUE is the same size as OUTER_SIZE,
1124 : turn into simply a cast. */
1125 2942 : if (inner_svalue->get_type ())
1126 2930 : if (tree cst_outer_num_bytes = outer_size->maybe_get_constant ())
1127 : {
1128 2776 : HOST_WIDE_INT num_bytes_inner_svalue
1129 2776 : = int_size_in_bytes (inner_svalue->get_type ());
1130 2776 : if (num_bytes_inner_svalue != -1)
1131 2776 : if (num_bytes_inner_svalue
1132 2776 : == (HOST_WIDE_INT)tree_to_uhwi (cst_outer_num_bytes))
1133 : {
1134 261 : if (type)
1135 241 : return get_or_create_cast (type, inner_svalue);
1136 : else
1137 : return inner_svalue;
1138 : }
1139 : }
1140 :
1141 : /* Handle zero-fill of a specific type. */
1142 2681 : if (tree cst = inner_svalue->maybe_get_constant ())
1143 2658 : if (zerop (cst) && type)
1144 665 : return get_or_create_cast (type, inner_svalue);
1145 :
1146 : return nullptr;
1147 : }
1148 :
1149 : /* Return the svalue * of type TYPE in which INNER_SVALUE is repeated
1150 : enough times to be of size OUTER_SIZE, creating it if necessary.
1151 : e.g. for filling buffers with a constant value. */
1152 :
1153 : const svalue *
1154 2950 : region_model_manager::get_or_create_repeated_svalue (tree type,
1155 : const svalue *outer_size,
1156 : const svalue *inner_svalue)
1157 : {
1158 5900 : if (const svalue *folded
1159 2950 : = maybe_fold_repeated_svalue (type, outer_size, inner_svalue))
1160 : return folded;
1161 :
1162 2016 : repeated_svalue::key_t key (type, outer_size, inner_svalue);
1163 2016 : if (repeated_svalue **slot = m_repeated_values_map.get (key))
1164 1504 : return *slot;
1165 512 : repeated_svalue *repeated_sval
1166 512 : = new repeated_svalue (alloc_symbol_id (), type, outer_size, inner_svalue);
1167 512 : RETURN_UNKNOWN_IF_TOO_COMPLEX (repeated_sval);
1168 512 : m_repeated_values_map.put (key, repeated_sval);
1169 512 : return repeated_sval;
1170 : }
1171 :
1172 : /* Attempt to get the bit_range for FIELD within a RECORD_TYPE.
1173 : Return true and write the result to OUT if successful.
1174 : Return false otherwise. */
1175 :
1176 : static bool
1177 947 : get_bit_range_for_field (tree field, bit_range *out)
1178 : {
1179 947 : bit_size_t bit_size;
1180 947 : if (!int_size_in_bits (TREE_TYPE (field), &bit_size))
1181 : return false;
1182 947 : int field_bit_offset = int_bit_position (field);
1183 947 : *out = bit_range (field_bit_offset, bit_size);
1184 947 : return true;
1185 : }
1186 :
1187 : /* Attempt to get the byte_range for FIELD within a RECORD_TYPE.
1188 : Return true and write the result to OUT if successful.
1189 : Return false otherwise. */
1190 :
1191 : static bool
1192 947 : get_byte_range_for_field (tree field, byte_range *out)
1193 : {
1194 947 : bit_range field_bits (0, 0);
1195 947 : if (!get_bit_range_for_field (field, &field_bits))
1196 : return false;
1197 947 : return field_bits.as_byte_range (out);
1198 : }
1199 :
1200 : /* Attempt to determine if there is a specific field within RECORD_TYPE
1201 : at BYTES. If so, return it, and write the location of BYTES relative
1202 : to the field to *OUT_RANGE_WITHIN_FIELD.
1203 : Otherwise, return NULL_TREE.
1204 : For example, given:
1205 : struct foo { uint32 a; uint32; b};
1206 : and
1207 : bytes = {bytes 6-7} (of foo)
1208 : we have bytes 3-4 of field b. */
1209 :
1210 : static tree
1211 947 : get_field_at_byte_range (tree record_type, const byte_range &bytes,
1212 : byte_range *out_range_within_field)
1213 : {
1214 947 : bit_offset_t bit_offset = bytes.m_start_byte_offset * BITS_PER_UNIT;
1215 :
1216 947 : tree field = get_field_at_bit_offset (record_type, bit_offset);
1217 947 : if (!field)
1218 : return NULL_TREE;
1219 :
1220 947 : byte_range field_bytes (0,0);
1221 947 : if (!get_byte_range_for_field (field, &field_bytes))
1222 : return NULL_TREE;
1223 :
1224 : /* Is BYTES fully within field_bytes? */
1225 941 : byte_range bytes_within_field (0,0);
1226 941 : if (!field_bytes.contains_p (bytes, &bytes_within_field))
1227 : return NULL_TREE;
1228 :
1229 394 : *out_range_within_field = bytes_within_field;
1230 394 : return field;
1231 : }
1232 :
1233 : /* Subroutine of region_model_manager::get_or_create_bits_within.
1234 : Return a folded svalue, or NULL. */
1235 :
1236 : const svalue *
1237 21418 : region_model_manager::maybe_fold_bits_within_svalue (tree type,
1238 : const bit_range &bits,
1239 : const svalue *inner_svalue)
1240 : {
1241 21418 : tree inner_type = inner_svalue->get_type ();
1242 : /* Fold:
1243 : BITS_WITHIN ((0, sizeof (VAL), VAL))
1244 : to:
1245 : CAST(TYPE, VAL). */
1246 21418 : if (bits.m_start_bit_offset == 0 && inner_type)
1247 : {
1248 1589 : bit_size_t inner_type_size;
1249 1589 : if (int_size_in_bits (inner_type, &inner_type_size))
1250 1589 : if (inner_type_size == bits.m_size_in_bits)
1251 : {
1252 561 : if (type)
1253 561 : return get_or_create_cast (type, inner_svalue);
1254 : else
1255 : return inner_svalue;
1256 : }
1257 : }
1258 :
1259 : /* Kind-specific folding. */
1260 41714 : if (const svalue *sval
1261 20857 : = inner_svalue->maybe_fold_bits_within (type, bits, this))
1262 : return sval;
1263 :
1264 1885 : byte_range bytes (0,0);
1265 1885 : if (bits.as_byte_range (&bytes) && inner_type)
1266 1737 : switch (TREE_CODE (inner_type))
1267 : {
1268 : default:
1269 : break;
1270 629 : case ARRAY_TYPE:
1271 629 : {
1272 : /* Fold:
1273 : BITS_WITHIN (range, KIND(REG))
1274 : to:
1275 : BITS_WITHIN (range - offsetof(ELEMENT), KIND(REG.ELEMENT))
1276 : if range1 is a byte-range fully within one ELEMENT. */
1277 629 : tree element_type = TREE_TYPE (inner_type);
1278 629 : HOST_WIDE_INT element_byte_size
1279 629 : = int_size_in_bytes (element_type);
1280 629 : if (element_byte_size > 0)
1281 : {
1282 629 : HOST_WIDE_INT start_idx
1283 629 : = (bytes.get_start_byte_offset ().to_shwi ()
1284 629 : / element_byte_size);
1285 629 : HOST_WIDE_INT last_idx
1286 629 : = (bytes.get_last_byte_offset ().to_shwi ()
1287 629 : / element_byte_size);
1288 629 : if (start_idx == last_idx)
1289 : {
1290 754 : if (const initial_svalue *initial_sval
1291 377 : = inner_svalue->dyn_cast_initial_svalue ())
1292 : {
1293 162 : bit_offset_t start_of_element
1294 162 : = start_idx * element_byte_size * BITS_PER_UNIT;
1295 162 : bit_range bits_within_element
1296 162 : (bits.m_start_bit_offset - start_of_element,
1297 162 : bits.m_size_in_bits);
1298 162 : const svalue *idx_sval
1299 162 : = get_or_create_int_cst (integer_type_node, start_idx);
1300 162 : const region *element_reg =
1301 162 : get_element_region (initial_sval->get_region (),
1302 : element_type, idx_sval);
1303 162 : const svalue *element_reg_sval
1304 162 : = get_or_create_initial_value (element_reg);
1305 162 : return get_or_create_bits_within (type,
1306 : bits_within_element,
1307 : element_reg_sval);
1308 : }
1309 : }
1310 : }
1311 : }
1312 : break;
1313 947 : case RECORD_TYPE:
1314 947 : {
1315 : /* Fold:
1316 : BYTES_WITHIN (range, KIND(REG))
1317 : to:
1318 : BYTES_WITHIN (range - offsetof(FIELD), KIND(REG.FIELD))
1319 : if range1 is fully within FIELD. */
1320 947 : byte_range bytes_within_field (0, 0);
1321 947 : if (tree field = get_field_at_byte_range (inner_type, bytes,
1322 : &bytes_within_field))
1323 : {
1324 788 : if (const initial_svalue *initial_sval
1325 394 : = inner_svalue->dyn_cast_initial_svalue ())
1326 : {
1327 217 : const region *field_reg =
1328 217 : get_field_region (initial_sval->get_region (), field);
1329 217 : const svalue *initial_reg_sval
1330 217 : = get_or_create_initial_value (field_reg);
1331 217 : return get_or_create_bits_within
1332 217 : (type,
1333 434 : bytes_within_field.as_bit_range (),
1334 : initial_reg_sval);
1335 : }
1336 : }
1337 : }
1338 730 : break;
1339 : }
1340 : return nullptr;
1341 : }
1342 :
1343 : /* Return the svalue * of type TYPE for extracting BITS from INNER_SVALUE,
1344 : creating it if necessary. */
1345 :
1346 : const svalue *
1347 21418 : region_model_manager::get_or_create_bits_within (tree type,
1348 : const bit_range &bits,
1349 : const svalue *inner_svalue)
1350 : {
1351 42836 : if (const svalue *folded
1352 21418 : = maybe_fold_bits_within_svalue (type, bits, inner_svalue))
1353 : return folded;
1354 :
1355 1506 : bits_within_svalue::key_t key (type, bits, inner_svalue);
1356 1506 : if (bits_within_svalue **slot = m_bits_within_values_map.get (key))
1357 743 : return *slot;
1358 763 : bits_within_svalue *bits_within_sval
1359 763 : = new bits_within_svalue (alloc_symbol_id (), type, bits, inner_svalue);
1360 763 : RETURN_UNKNOWN_IF_TOO_COMPLEX (bits_within_sval);
1361 763 : m_bits_within_values_map.put (key, bits_within_sval);
1362 763 : return bits_within_sval;
1363 : }
1364 :
1365 : /* Return the svalue * that decorates ARG as being unmergeable,
1366 : creating it if necessary. */
1367 :
1368 : const svalue *
1369 869 : region_model_manager::get_or_create_unmergeable (const svalue *arg)
1370 : {
1371 869 : if (arg->get_kind () == SK_UNMERGEABLE)
1372 : return arg;
1373 :
1374 869 : if (unmergeable_svalue **slot = m_unmergeable_values_map.get (arg))
1375 659 : return *slot;
1376 210 : unmergeable_svalue *unmergeable_sval
1377 210 : = new unmergeable_svalue (alloc_symbol_id (), arg);
1378 210 : RETURN_UNKNOWN_IF_TOO_COMPLEX (unmergeable_sval);
1379 210 : m_unmergeable_values_map.put (arg, unmergeable_sval);
1380 210 : return unmergeable_sval;
1381 : }
1382 :
1383 : /* Return the svalue * of type TYPE for the merger of value BASE_SVAL
1384 : and ITER_SVAL at SNODE, creating it if necessary. */
1385 :
1386 : const svalue *
1387 5761 : region_model_manager::
1388 : get_or_create_widening_svalue (tree type,
1389 : const supernode *snode,
1390 : const svalue *base_sval,
1391 : const svalue *iter_sval)
1392 : {
1393 5761 : gcc_assert (base_sval->get_kind () != SK_WIDENING);
1394 5761 : gcc_assert (iter_sval->get_kind () != SK_WIDENING);
1395 5761 : widening_svalue::key_t key (type, snode, base_sval, iter_sval);
1396 5761 : if (widening_svalue **slot = m_widening_values_map.get (key))
1397 3406 : return *slot;
1398 2355 : widening_svalue *widening_sval
1399 : = new widening_svalue (alloc_symbol_id (), type, snode, base_sval,
1400 2355 : iter_sval);
1401 2355 : RETURN_UNKNOWN_IF_TOO_COMPLEX (widening_sval);
1402 2355 : m_widening_values_map.put (key, widening_sval);
1403 2355 : return widening_sval;
1404 : }
1405 :
1406 : /* Return the svalue * of type TYPE for the compound values in MAP,
1407 : creating it if necessary. */
1408 :
1409 : const svalue *
1410 3557 : region_model_manager::get_or_create_compound_svalue (tree type,
1411 : concrete_binding_map &&map)
1412 : {
1413 3557 : compound_svalue::key_t tmp_key (type, &map);
1414 3557 : if (compound_svalue **slot = m_compound_values_map.get (tmp_key))
1415 2995 : return *slot;
1416 562 : compound_svalue *compound_sval
1417 562 : = new compound_svalue (alloc_symbol_id (), type, std::move (map));
1418 562 : RETURN_UNKNOWN_IF_TOO_COMPLEX (compound_sval);
1419 : /* Use make_key rather than reusing the key, so that we use a
1420 : ptr to compound_sval's binding_map, rather than the MAP param. */
1421 562 : m_compound_values_map.put (compound_sval->make_key (), compound_sval);
1422 562 : return compound_sval;
1423 : }
1424 :
1425 : /* Return the svalue * of type TYPE for the compound values in MAP,
1426 : creating it if necessary. */
1427 :
1428 : const svalue *
1429 220 : region_model_manager::get_or_create_compound_svalue (tree type,
1430 : const concrete_binding_map &map)
1431 : {
1432 220 : compound_svalue::key_t tmp_key (type, &map);
1433 220 : if (compound_svalue **slot = m_compound_values_map.get (tmp_key))
1434 170 : return *slot;
1435 50 : compound_svalue *compound_sval
1436 50 : = new compound_svalue (alloc_symbol_id (), type, map);
1437 50 : RETURN_UNKNOWN_IF_TOO_COMPLEX (compound_sval);
1438 : /* Use make_key rather than reusing the key, so that we use a
1439 : ptr to compound_sval's binding_map, rather than the MAP param. */
1440 50 : m_compound_values_map.put (compound_sval->make_key (), compound_sval);
1441 50 : return compound_sval;
1442 : }
1443 :
1444 : /* class conjured_purge. */
1445 :
1446 : /* Purge state relating to SVAL. */
1447 :
1448 : void
1449 29998 : conjured_purge::purge (const conjured_svalue *sval) const
1450 : {
1451 29998 : m_model->purge_state_involving (sval, m_ctxt);
1452 29998 : }
1453 :
1454 : /* Return the svalue * of type TYPE for the value conjured for ID_REG
1455 : at STMT (using IDX for any further disambiguation),
1456 : creating it if necessary.
1457 : Use P to purge existing state from the svalue, for the case where a
1458 : conjured_svalue would be reused along an execution path. */
1459 :
1460 : const svalue *
1461 56293 : region_model_manager::get_or_create_conjured_svalue (tree type,
1462 : const gimple *stmt,
1463 : const region *id_reg,
1464 : const conjured_purge &p,
1465 : unsigned idx)
1466 : {
1467 56293 : conjured_svalue::key_t key (type, stmt, id_reg, idx);
1468 56293 : if (conjured_svalue **slot = m_conjured_values_map.get (key))
1469 : {
1470 29998 : const conjured_svalue *sval = *slot;
1471 : /* We're reusing an existing conjured_svalue, perhaps from a different
1472 : state within this analysis, or perhaps from an earlier state on this
1473 : execution path. For the latter, purge any state involving the "new"
1474 : svalue from the current program_state. */
1475 29998 : p.purge (sval);
1476 29998 : return sval;
1477 : }
1478 26295 : conjured_svalue *conjured_sval
1479 26295 : = new conjured_svalue (alloc_symbol_id (), type, stmt, id_reg, idx);
1480 26295 : RETURN_UNKNOWN_IF_TOO_COMPLEX (conjured_sval);
1481 26225 : m_conjured_values_map.put (key, conjured_sval);
1482 26225 : return conjured_sval;
1483 : }
1484 :
1485 : /* Subroutine of region_model_manager::get_or_create_asm_output_svalue.
1486 : Return a folded svalue, or nullptr. */
1487 :
1488 : const svalue *
1489 367 : region_model_manager::
1490 : maybe_fold_asm_output_svalue (tree type,
1491 : const vec<const svalue *> &inputs)
1492 : {
1493 : /* Unknown inputs should lead to unknown results. */
1494 1653 : for (const auto &iter : inputs)
1495 609 : if (iter->get_kind () == SK_UNKNOWN)
1496 9 : return get_or_create_unknown_svalue (type);
1497 :
1498 : return nullptr;
1499 : }
1500 :
1501 : /* Return the svalue * of type TYPE for OUTPUT_IDX of the deterministic
1502 : asm stmt ASM_STMT, given INPUTS as inputs. */
1503 :
1504 : const svalue *
1505 289 : region_model_manager::
1506 : get_or_create_asm_output_svalue (tree type,
1507 : const gasm *asm_stmt,
1508 : unsigned output_idx,
1509 : const vec<const svalue *> &inputs)
1510 : {
1511 289 : gcc_assert (inputs.length () <= asm_output_svalue::MAX_INPUTS);
1512 :
1513 578 : if (const svalue *folded
1514 289 : = maybe_fold_asm_output_svalue (type, inputs))
1515 : return folded;
1516 :
1517 280 : const char *asm_string = gimple_asm_string (asm_stmt);
1518 280 : const unsigned noutputs = gimple_asm_noutputs (asm_stmt);
1519 :
1520 280 : asm_output_svalue::key_t key (type, asm_string, output_idx, inputs);
1521 280 : if (asm_output_svalue **slot = m_asm_output_values_map.get (key))
1522 108 : return *slot;
1523 172 : asm_output_svalue *asm_output_sval
1524 : = new asm_output_svalue (alloc_symbol_id (), type, asm_string, output_idx,
1525 172 : noutputs, inputs);
1526 172 : RETURN_UNKNOWN_IF_TOO_COMPLEX (asm_output_sval);
1527 167 : m_asm_output_values_map.put (key, asm_output_sval);
1528 167 : return asm_output_sval;
1529 : }
1530 :
1531 : /* Return the svalue * of type TYPE for OUTPUT_IDX of a deterministic
1532 : asm stmt with string ASM_STRING with NUM_OUTPUTS outputs, given
1533 : INPUTS as inputs. */
1534 :
1535 : const svalue *
1536 78 : region_model_manager::
1537 : get_or_create_asm_output_svalue (tree type,
1538 : const char *asm_string,
1539 : unsigned output_idx,
1540 : unsigned num_outputs,
1541 : const vec<const svalue *> &inputs)
1542 : {
1543 78 : gcc_assert (inputs.length () <= asm_output_svalue::MAX_INPUTS);
1544 :
1545 156 : if (const svalue *folded
1546 78 : = maybe_fold_asm_output_svalue (type, inputs))
1547 : return folded;
1548 :
1549 78 : asm_output_svalue::key_t key (type, asm_string, output_idx, inputs);
1550 78 : if (asm_output_svalue **slot = m_asm_output_values_map.get (key))
1551 58 : return *slot;
1552 20 : asm_output_svalue *asm_output_sval
1553 : = new asm_output_svalue (alloc_symbol_id (), type, asm_string, output_idx,
1554 20 : num_outputs, inputs);
1555 20 : RETURN_UNKNOWN_IF_TOO_COMPLEX (asm_output_sval);
1556 20 : m_asm_output_values_map.put (key, asm_output_sval);
1557 20 : return asm_output_sval;
1558 : }
1559 :
1560 : /* Return the svalue * of type TYPE for the result of a call to FNDECL
1561 : with __attribute__((const)), given INPUTS as inputs. */
1562 :
1563 : const svalue *
1564 955 : region_model_manager::
1565 : get_or_create_const_fn_result_svalue (tree type,
1566 : tree fndecl,
1567 : const vec<const svalue *> &inputs)
1568 : {
1569 955 : gcc_assert (fndecl);
1570 955 : gcc_assert (DECL_P (fndecl));
1571 955 : gcc_assert (TREE_READONLY (fndecl));
1572 955 : gcc_assert (inputs.length () <= const_fn_result_svalue::MAX_INPUTS);
1573 :
1574 955 : const_fn_result_svalue::key_t key (type, fndecl, inputs);
1575 955 : if (const_fn_result_svalue **slot = m_const_fn_result_values_map.get (key))
1576 768 : return *slot;
1577 187 : const_fn_result_svalue *const_fn_result_sval
1578 187 : = new const_fn_result_svalue (alloc_symbol_id (), type, fndecl, inputs);
1579 187 : RETURN_UNKNOWN_IF_TOO_COMPLEX (const_fn_result_sval);
1580 187 : m_const_fn_result_values_map.put (key, const_fn_result_sval);
1581 187 : return const_fn_result_sval;
1582 : }
1583 :
1584 : /* Given DATA_CST (a STRING_CST or RAW_DATA_CST) and BYTE_OFFSET_CST a constant,
1585 : attempt to get the character at that offset, returning either
1586 : the svalue for the character constant, or nullptr if unsuccessful. */
1587 :
1588 : const svalue *
1589 6117 : region_model_manager::maybe_get_char_from_cst (tree data_cst,
1590 : tree byte_offset_cst)
1591 : {
1592 6117 : switch (TREE_CODE (data_cst))
1593 : {
1594 0 : default: gcc_unreachable ();
1595 4625 : case STRING_CST:
1596 4625 : return maybe_get_char_from_string_cst (data_cst, byte_offset_cst);
1597 1492 : case RAW_DATA_CST:
1598 1492 : return maybe_get_char_from_raw_data_cst (data_cst, byte_offset_cst);
1599 : }
1600 : }
1601 :
1602 : /* Get a tree for the size of STRING_CST, or NULL_TREE.
1603 : Note that this may be larger than TREE_STRING_LENGTH (implying
1604 : a run of trailing zero bytes from TREE_STRING_LENGTH up to this
1605 : higher limit). */
1606 :
1607 : tree
1608 5498 : get_string_cst_size (const_tree string_cst)
1609 : {
1610 5498 : gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1611 5498 : gcc_assert (TREE_CODE (TREE_TYPE (string_cst)) == ARRAY_TYPE);
1612 :
1613 5498 : return TYPE_SIZE_UNIT (TREE_TYPE (string_cst));
1614 : }
1615 :
1616 : /* Given STRING_CST, a STRING_CST and BYTE_OFFSET_CST a constant,
1617 : attempt to get the character at that offset, returning either
1618 : the svalue for the character constant, or nullptr if unsuccessful. */
1619 :
1620 : const svalue *
1621 5035 : region_model_manager::maybe_get_char_from_string_cst (tree string_cst,
1622 : tree byte_offset_cst)
1623 : {
1624 5035 : gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1625 :
1626 : /* Adapted from fold_read_from_constant_string. */
1627 5035 : scalar_int_mode char_mode;
1628 5035 : if (TREE_CODE (byte_offset_cst) == INTEGER_CST
1629 10070 : && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string_cst))),
1630 : &char_mode)
1631 10070 : && GET_MODE_SIZE (char_mode) == 1)
1632 : {
1633 : /* If we're beyond the string_cst, the read is unsuccessful. */
1634 5035 : if (compare_constants (byte_offset_cst,
1635 : GE_EXPR,
1636 5035 : get_string_cst_size (string_cst)).is_true ())
1637 : return nullptr;
1638 :
1639 5003 : int char_val;
1640 10006 : if (compare_tree_int (byte_offset_cst,
1641 5003 : TREE_STRING_LENGTH (string_cst)) < 0)
1642 : /* We're within the area defined by TREE_STRING_POINTER. */
1643 5001 : char_val = (TREE_STRING_POINTER (string_cst)
1644 5001 : [TREE_INT_CST_LOW (byte_offset_cst)]);
1645 : else
1646 : /* We're in the padding area of trailing zeroes. */
1647 : char_val = 0;
1648 5003 : tree char_cst
1649 5003 : = build_int_cst_type (TREE_TYPE (TREE_TYPE (string_cst)), char_val);
1650 5003 : return get_or_create_constant_svalue (char_cst);
1651 : }
1652 : return nullptr;
1653 : }
1654 :
1655 : /* Given RAW_DATA_CST, a RAW_DATA_CST and BYTE_OFFSET_CST a constant,
1656 : attempt to get the character at that offset, returning either
1657 : the svalue for the character constant, or nullptr if unsuccessful. */
1658 :
1659 : const svalue *
1660 1492 : region_model_manager::maybe_get_char_from_raw_data_cst (tree raw_data_cst,
1661 : tree byte_offset_cst)
1662 : {
1663 1492 : gcc_assert (TREE_CODE (raw_data_cst) == RAW_DATA_CST);
1664 1492 : gcc_assert (TREE_CODE (byte_offset_cst) == INTEGER_CST);
1665 :
1666 1492 : offset_int o = (wi::to_offset (byte_offset_cst));
1667 1492 : if (o >= 0 && o < RAW_DATA_LENGTH (raw_data_cst))
1668 1484 : return get_or_create_int_cst
1669 1484 : (TREE_TYPE (raw_data_cst),
1670 2968 : RAW_DATA_UCHAR_ELT (raw_data_cst, o.to_uhwi ()));
1671 : return nullptr;
1672 : }
1673 :
1674 : /* region consolidation. */
1675 :
1676 : /* Return the region for FNDECL, creating it if necessary. */
1677 :
1678 : const function_region *
1679 896706 : region_model_manager::get_region_for_fndecl (tree fndecl)
1680 : {
1681 896706 : gcc_assert (TREE_CODE (fndecl) == FUNCTION_DECL);
1682 :
1683 896706 : function_region **slot = m_fndecls_map.get (fndecl);
1684 896706 : if (slot)
1685 887246 : return *slot;
1686 9460 : function_region *reg
1687 9460 : = new function_region (alloc_symbol_id (), &m_code_region, fndecl);
1688 9460 : m_fndecls_map.put (fndecl, reg);
1689 9460 : return reg;
1690 : }
1691 :
1692 : /* Return the region for LABEL, creating it if necessary. */
1693 :
1694 : const label_region *
1695 465 : region_model_manager::get_region_for_label (tree label)
1696 : {
1697 465 : gcc_assert (TREE_CODE (label) == LABEL_DECL);
1698 :
1699 465 : label_region **slot = m_labels_map.get (label);
1700 465 : if (slot)
1701 415 : return *slot;
1702 :
1703 50 : tree fndecl = DECL_CONTEXT (label);
1704 50 : gcc_assert (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL);
1705 :
1706 50 : const function_region *func_reg = get_region_for_fndecl (fndecl);
1707 50 : label_region *reg
1708 50 : = new label_region (alloc_symbol_id (), func_reg, label);
1709 50 : m_labels_map.put (label, reg);
1710 50 : return reg;
1711 : }
1712 :
1713 : /* Return the region for EXPR, creating it if necessary. */
1714 :
1715 : const decl_region *
1716 52261 : region_model_manager::get_region_for_global (tree expr)
1717 : {
1718 52261 : gcc_assert (VAR_P (expr));
1719 :
1720 52261 : decl_region **slot = m_globals_map.get (expr);
1721 52261 : if (slot)
1722 45883 : return *slot;
1723 6378 : decl_region *reg
1724 6378 : = new decl_region (alloc_symbol_id (), &m_globals_region, expr);
1725 6378 : m_globals_map.put (expr, reg);
1726 6378 : return reg;
1727 : }
1728 :
1729 : /* Return the region for an unknown access of type REGION_TYPE,
1730 : creating it if necessary.
1731 : This is a symbolic_region, where the pointer is an unknown_svalue
1732 : of type ®ION_TYPE. */
1733 :
1734 : const region *
1735 12977 : region_model_manager::get_unknown_symbolic_region (tree region_type)
1736 : {
1737 12977 : tree ptr_type = region_type ? build_pointer_type (region_type) : NULL_TREE;
1738 12977 : const svalue *unknown_ptr = get_or_create_unknown_svalue (ptr_type);
1739 12977 : return get_symbolic_region (unknown_ptr);
1740 : }
1741 :
1742 : /* Return the region that describes accessing field FIELD of PARENT,
1743 : creating it if necessary. */
1744 :
1745 : const region *
1746 50181 : region_model_manager::get_field_region (const region *parent, tree field)
1747 : {
1748 50181 : gcc_assert (parent);
1749 50181 : gcc_assert (field);
1750 50181 : gcc_assert (TREE_CODE (field) == FIELD_DECL);
1751 :
1752 : /* (*UNKNOWN_PTR).field is (*UNKNOWN_PTR_OF_&FIELD_TYPE). */
1753 50181 : if (parent->symbolic_for_unknown_ptr_p ())
1754 4203 : return get_unknown_symbolic_region (TREE_TYPE (field));
1755 :
1756 45978 : field_region::key_t key (parent, field);
1757 84142 : if (field_region *reg = m_field_regions.get (key))
1758 : return reg;
1759 :
1760 7814 : field_region *field_reg
1761 7814 : = new field_region (alloc_symbol_id (), parent, field);
1762 7814 : m_field_regions.put (key, field_reg);
1763 7814 : return field_reg;
1764 : }
1765 :
1766 : /* Return the region that describes accessing the element of type
1767 : ELEMENT_TYPE at index INDEX of PARENT, creating it if necessary. */
1768 :
1769 : const region *
1770 28908 : region_model_manager::get_element_region (const region *parent,
1771 : tree element_type,
1772 : const svalue *index)
1773 : {
1774 : /* (UNKNOWN_PTR[IDX]) is (UNKNOWN_PTR). */
1775 28908 : if (parent->symbolic_for_unknown_ptr_p ())
1776 52 : return get_unknown_symbolic_region (element_type);
1777 :
1778 28856 : element_region::key_t key (parent, element_type, index);
1779 53592 : if (element_region *reg = m_element_regions.get (key))
1780 : return reg;
1781 :
1782 4120 : element_region *element_reg
1783 4120 : = new element_region (alloc_symbol_id (), parent, element_type, index);
1784 4120 : m_element_regions.put (key, element_reg);
1785 4120 : return element_reg;
1786 : }
1787 :
1788 : /* Return the region that describes accessing the subregion of type
1789 : ELEMENT_TYPE at offset BYTE_OFFSET within PARENT, creating it if
1790 : necessary. */
1791 :
1792 : const region *
1793 99088 : region_model_manager::get_offset_region (const region *parent,
1794 : tree type,
1795 : const svalue *byte_offset)
1796 : {
1797 : /* (UNKNOWN_PTR + OFFSET) is (UNKNOWN_PTR). */
1798 100213 : if (parent->symbolic_for_unknown_ptr_p ())
1799 8237 : return get_unknown_symbolic_region (type);
1800 :
1801 : /* If BYTE_OFFSET is zero, return PARENT. */
1802 91976 : if (tree cst_offset = byte_offset->maybe_get_constant ())
1803 82331 : if (zerop (cst_offset))
1804 67353 : return get_cast_region (parent, type);
1805 :
1806 : /* Fold OFFSET_REGION(OFFSET_REGION(REG, X), Y)
1807 : to OFFSET_REGION(REG, (X + Y)). */
1808 49246 : if (const offset_region *parent_offset_reg
1809 24623 : = parent->dyn_cast_offset_region ())
1810 : {
1811 1125 : const svalue *sval_x = parent_offset_reg->get_byte_offset ();
1812 1125 : const svalue *sval_sum
1813 1125 : = get_or_create_binop (byte_offset->get_type (),
1814 : POINTER_PLUS_EXPR, sval_x, byte_offset);
1815 1125 : return get_offset_region (parent->get_parent_region (), type, sval_sum);
1816 : }
1817 :
1818 23498 : offset_region::key_t key (parent, type, byte_offset);
1819 43400 : if (offset_region *reg = m_offset_regions.get (key))
1820 : return reg;
1821 :
1822 3596 : offset_region *offset_reg
1823 3596 : = new offset_region (alloc_symbol_id (), parent, type, byte_offset);
1824 3596 : m_offset_regions.put (key, offset_reg);
1825 3596 : return offset_reg;
1826 : }
1827 :
1828 : /* Return the region that describes accessing the subregion of type
1829 : TYPE of size BYTE_SIZE_SVAL within PARENT, creating it if necessary. */
1830 :
1831 : const region *
1832 10661 : region_model_manager::get_sized_region (const region *parent,
1833 : tree type,
1834 : const svalue *byte_size_sval)
1835 : {
1836 10661 : if (parent->symbolic_for_unknown_ptr_p ())
1837 273 : return get_unknown_symbolic_region (type);
1838 :
1839 10388 : if (byte_size_sval->get_type () != size_type_node)
1840 2480 : byte_size_sval = get_or_create_cast (size_type_node, byte_size_sval);
1841 :
1842 : /* If PARENT is already that size, return it. */
1843 10388 : const svalue *parent_byte_size_sval = parent->get_byte_size_sval (this);
1844 10388 : if (tree parent_size_cst = parent_byte_size_sval->maybe_get_constant ())
1845 3088 : if (tree size_cst = byte_size_sval->maybe_get_constant ())
1846 : {
1847 2303 : tree comparison
1848 2303 : = fold_binary (EQ_EXPR, boolean_type_node, parent_size_cst, size_cst);
1849 2303 : if (comparison == boolean_true_node)
1850 : return parent;
1851 : }
1852 :
1853 9026 : sized_region::key_t key (parent, type, byte_size_sval);
1854 13988 : if (sized_region *reg = m_sized_regions.get (key))
1855 : return reg;
1856 :
1857 4064 : sized_region *sized_reg
1858 4064 : = new sized_region (alloc_symbol_id (), parent, type, byte_size_sval);
1859 4064 : m_sized_regions.put (key, sized_reg);
1860 4064 : return sized_reg;
1861 : }
1862 :
1863 : /* Return the region that describes accessing PARENT_REGION as if
1864 : it were of type TYPE, creating it if necessary. */
1865 :
1866 : const region *
1867 70110 : region_model_manager::get_cast_region (const region *original_region,
1868 : tree type)
1869 : {
1870 : /* If types match, return ORIGINAL_REGION. */
1871 70110 : if (type == original_region->get_type ())
1872 : return original_region;
1873 :
1874 19155 : if (original_region->symbolic_for_unknown_ptr_p ())
1875 96 : return get_unknown_symbolic_region (type);
1876 :
1877 19059 : cast_region::key_t key (original_region, type);
1878 35366 : if (cast_region *reg = m_cast_regions.get (key))
1879 : return reg;
1880 :
1881 2752 : cast_region *cast_reg
1882 2752 : = new cast_region (alloc_symbol_id (), original_region, type);
1883 2752 : m_cast_regions.put (key, cast_reg);
1884 2752 : return cast_reg;
1885 : }
1886 :
1887 : /* Return the frame_region for call to FUN from CALLING_FRAME, creating it
1888 : if necessary. CALLING_FRAME may be nullptr. */
1889 :
1890 : const frame_region *
1891 38778 : region_model_manager::get_frame_region (const frame_region *calling_frame,
1892 : const function &fun)
1893 : {
1894 38778 : int index = calling_frame ? calling_frame->get_index () + 1 : 0;
1895 :
1896 38778 : frame_region::key_t key (calling_frame, fun);
1897 62406 : if (frame_region *reg = m_frame_regions.get (key))
1898 : return reg;
1899 :
1900 15150 : frame_region *frame_reg
1901 : = new frame_region (alloc_symbol_id (), &m_stack_region, calling_frame,
1902 15150 : fun, index);
1903 15150 : m_frame_regions.put (key, frame_reg);
1904 15150 : return frame_reg;
1905 : }
1906 :
1907 : /* Return the region that describes dereferencing SVAL, creating it
1908 : if necessary. */
1909 :
1910 : const region *
1911 77278 : region_model_manager::get_symbolic_region (const svalue *sval)
1912 : {
1913 77278 : symbolic_region::key_t key (&m_root_region, sval);
1914 146240 : if (symbolic_region *reg = m_symbolic_regions.get (key))
1915 : return reg;
1916 :
1917 8316 : symbolic_region *symbolic_reg
1918 8316 : = new symbolic_region (alloc_symbol_id (), &m_root_region, sval);
1919 8316 : m_symbolic_regions.put (key, symbolic_reg);
1920 8316 : return symbolic_reg;
1921 : }
1922 :
1923 : /* Return the region that describes accessing STRING_CST, creating it
1924 : if necessary. */
1925 :
1926 : const string_region *
1927 15613 : region_model_manager::get_region_for_string (tree string_cst)
1928 : {
1929 15613 : gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1930 :
1931 15613 : string_region **slot = m_string_map.get (string_cst);
1932 15613 : if (slot)
1933 11830 : return *slot;
1934 3783 : string_region *reg
1935 3783 : = new string_region (alloc_symbol_id (), &m_root_region, string_cst);
1936 3783 : m_string_map.put (string_cst, reg);
1937 3783 : return reg;
1938 : }
1939 :
1940 : /* Return the region that describes accessing BITS within PARENT as TYPE,
1941 : creating it if necessary. */
1942 :
1943 : const region *
1944 247 : region_model_manager::get_bit_range (const region *parent, tree type,
1945 : const bit_range &bits)
1946 : {
1947 247 : gcc_assert (parent);
1948 :
1949 247 : if (parent->symbolic_for_unknown_ptr_p ())
1950 0 : return get_unknown_symbolic_region (type);
1951 :
1952 247 : bit_range_region::key_t key (parent, type, bits);
1953 327 : if (bit_range_region *reg = m_bit_range_regions.get (key))
1954 : return reg;
1955 :
1956 167 : bit_range_region *bit_range_reg
1957 167 : = new bit_range_region (alloc_symbol_id (), parent, type, bits);
1958 167 : m_bit_range_regions.put (key, bit_range_reg);
1959 167 : return bit_range_reg;
1960 : }
1961 :
1962 : /* Return the region that describes accessing the IDX-th variadic argument
1963 : within PARENT_FRAME, creating it if necessary. */
1964 :
1965 : const var_arg_region *
1966 1311 : region_model_manager::get_var_arg_region (const frame_region *parent_frame,
1967 : unsigned idx)
1968 : {
1969 1311 : gcc_assert (parent_frame);
1970 :
1971 1311 : var_arg_region::key_t key (parent_frame, idx);
1972 2126 : if (var_arg_region *reg = m_var_arg_regions.get (key))
1973 : return reg;
1974 :
1975 496 : var_arg_region *var_arg_reg
1976 496 : = new var_arg_region (alloc_symbol_id (), parent_frame, idx);
1977 496 : m_var_arg_regions.put (key, var_arg_reg);
1978 496 : return var_arg_reg;
1979 : }
1980 :
1981 : /* If we see a tree code we don't know how to handle, rather than
1982 : ICE or generate bogus results, create a dummy region, and notify
1983 : CTXT so that it can mark the new state as being not properly
1984 : modelled. The exploded graph can then stop exploring that path,
1985 : since any diagnostics we might issue will have questionable
1986 : validity. */
1987 :
1988 : const region *
1989 84 : region_model_manager::
1990 : get_region_for_unexpected_tree_code (region_model_context *ctxt,
1991 : tree t,
1992 : const dump_location_t &loc)
1993 : {
1994 84 : tree type = TYPE_P (t) ? t : TREE_TYPE (t);
1995 84 : region *new_reg
1996 84 : = new unknown_region (alloc_symbol_id (), &m_root_region, type);
1997 84 : if (ctxt)
1998 52 : ctxt->on_unexpected_tree_code (t, loc);
1999 84 : return new_reg;
2000 : }
2001 :
2002 : /* Return a region describing a heap-allocated block of memory.
2003 : Reuse an existing heap_allocated_region is its id is not within
2004 : BASE_REGS_IN_USE. */
2005 :
2006 : const region *
2007 20799 : region_model_manager::
2008 : get_or_create_region_for_heap_alloc (const bitmap &base_regs_in_use)
2009 : {
2010 : /* Try to reuse an existing region, if it's unreferenced in the
2011 : client state. */
2012 79298 : for (auto existing_reg : m_managed_dynamic_regions)
2013 38389 : if (!bitmap_bit_p (base_regs_in_use, existing_reg->get_id ()))
2014 19193 : if (existing_reg->get_kind () == RK_HEAP_ALLOCATED)
2015 : return existing_reg;
2016 :
2017 : /* All existing ones (if any) are in use; create a new one. */
2018 1709 : region *reg
2019 1709 : = new heap_allocated_region (alloc_symbol_id (), &m_heap_region);
2020 1709 : m_managed_dynamic_regions.safe_push (reg);
2021 1709 : return reg;
2022 : }
2023 :
2024 : /* Return a new region describing a block of memory allocated within FRAME. */
2025 :
2026 : const region *
2027 1139 : region_model_manager::create_region_for_alloca (const frame_region *frame)
2028 : {
2029 1139 : gcc_assert (frame);
2030 1139 : region *reg = new alloca_region (alloc_symbol_id (), frame);
2031 1139 : m_managed_dynamic_regions.safe_push (reg);
2032 1139 : return reg;
2033 : }
2034 :
2035 : /* Log OBJ to LOGGER. */
2036 :
2037 : template <typename T>
2038 : static void
2039 211 : log_managed_object (logger *logger, const T *obj)
2040 : {
2041 211 : logger->start_log_line ();
2042 211 : pretty_printer *pp = logger->get_printer ();
2043 211 : pp_string (pp, " ");
2044 211 : obj->dump_to_pp (pp, true);
2045 211 : logger->end_log_line ();
2046 211 : }
2047 :
2048 : /* Specialization for frame_region, which also logs the count of locals
2049 : managed by the frame_region. */
2050 :
2051 : template <>
2052 : void
2053 6 : log_managed_object (logger *logger, const frame_region *obj)
2054 : {
2055 6 : logger->start_log_line ();
2056 6 : pretty_printer *pp = logger->get_printer ();
2057 6 : pp_string (pp, " ");
2058 6 : obj->dump_to_pp (pp, true);
2059 6 : pp_printf (pp, " [with %i region(s) for locals]", obj->get_num_locals ());
2060 6 : logger->end_log_line ();
2061 6 : }
2062 :
2063 : /* Dump the number of objects that were managed by UNIQ_MAP to LOGGER.
2064 : If SHOW_OBJS is true, also dump the objects themselves. */
2065 :
2066 : template <typename K, typename T>
2067 : static void
2068 105 : log_uniq_map (logger *logger, bool show_objs, const char *title,
2069 : const hash_map<K, T*> &uniq_map)
2070 : {
2071 105 : logger->log (" # %s: %li", title, (long)uniq_map.elements ());
2072 105 : if (!show_objs)
2073 0 : return;
2074 105 : auto_vec<const T *> vec_objs (uniq_map.elements ());
2075 105 : for (typename hash_map<K, T*>::iterator iter = uniq_map.begin ();
2076 376 : iter != uniq_map.end (); ++iter)
2077 166 : vec_objs.quick_push ((*iter).second);
2078 :
2079 271 : vec_objs.qsort (T::cmp_ptr_ptr);
2080 :
2081 : unsigned i;
2082 : const T *obj;
2083 306 : FOR_EACH_VEC_ELT (vec_objs, i, obj)
2084 166 : log_managed_object<T> (logger, obj);
2085 105 : }
2086 :
2087 : /* Dump the number of objects that were managed by MAP to LOGGER.
2088 : If SHOW_OBJS is true, also dump the objects themselves. */
2089 :
2090 : template <typename T>
2091 : static void
2092 55 : log_uniq_map (logger *logger, bool show_objs, const char *title,
2093 : const consolidation_map<T> &map)
2094 : {
2095 55 : logger->log (" # %s: %li", title, (long)map.elements ());
2096 55 : if (!show_objs)
2097 0 : return;
2098 :
2099 55 : auto_vec<const T *> vec_objs (map.elements ());
2100 55 : for (typename consolidation_map<T>::iterator iter = map.begin ();
2101 158 : iter != map.end (); ++iter)
2102 48 : vec_objs.quick_push ((*iter).second);
2103 :
2104 103 : vec_objs.qsort (T::cmp_ptr_ptr);
2105 :
2106 : unsigned i;
2107 : const T *obj;
2108 125 : FOR_EACH_VEC_ELT (vec_objs, i, obj)
2109 48 : log_managed_object<T> (logger, obj);
2110 55 : }
2111 :
2112 : /* Dump the number of objects of each class that were managed by this
2113 : manager to LOGGER.
2114 : If SHOW_OBJS is true, also dump the objects themselves. */
2115 :
2116 : void
2117 5 : region_model_manager::log_stats (logger *logger, bool show_objs) const
2118 : {
2119 5 : LOG_SCOPE (logger);
2120 5 : logger->log ("call string consolidation");
2121 5 : m_empty_call_string.recursive_log (logger);
2122 5 : logger->log ("next symbol id: %i", m_next_symbol_id);
2123 5 : logger->log ("svalue consolidation");
2124 5 : log_uniq_map (logger, show_objs, "constant_svalue", m_constants_map);
2125 5 : log_uniq_map (logger, show_objs, "unknown_svalue", m_unknowns_map);
2126 5 : if (m_unknown_NULL)
2127 3 : log_managed_object (logger, m_unknown_NULL);
2128 5 : log_uniq_map (logger, show_objs, "poisoned_svalue", m_poisoned_values_map);
2129 5 : log_uniq_map (logger, show_objs, "setjmp_svalue", m_setjmp_values_map);
2130 5 : log_uniq_map (logger, show_objs, "initial_svalue", m_initial_values_map);
2131 5 : log_uniq_map (logger, show_objs, "region_svalue", m_pointer_values_map);
2132 5 : log_uniq_map (logger, show_objs, "unaryop_svalue", m_unaryop_values_map);
2133 5 : log_uniq_map (logger, show_objs, "binop_svalue", m_binop_values_map);
2134 5 : log_uniq_map (logger, show_objs, "sub_svalue", m_sub_values_map);
2135 5 : log_uniq_map (logger, show_objs, "repeated_svalue", m_repeated_values_map);
2136 5 : log_uniq_map (logger, show_objs, "bits_within_svalue",
2137 5 : m_bits_within_values_map);
2138 5 : log_uniq_map (logger, show_objs, "unmergeable_svalue",
2139 5 : m_unmergeable_values_map);
2140 5 : log_uniq_map (logger, show_objs, "widening_svalue", m_widening_values_map);
2141 5 : log_uniq_map (logger, show_objs, "compound_svalue", m_compound_values_map);
2142 5 : log_uniq_map (logger, show_objs, "conjured_svalue", m_conjured_values_map);
2143 5 : log_uniq_map (logger, show_objs, "asm_output_svalue",
2144 5 : m_asm_output_values_map);
2145 5 : log_uniq_map (logger, show_objs, "const_fn_result_svalue",
2146 5 : m_const_fn_result_values_map);
2147 :
2148 5 : logger->log ("max accepted svalue num_nodes: %i",
2149 5 : m_max_complexity.m_num_nodes);
2150 5 : logger->log ("max accepted svalue max_depth: %i",
2151 5 : m_max_complexity.m_max_depth);
2152 :
2153 5 : logger->log ("region consolidation");
2154 5 : log_uniq_map (logger, show_objs, "function_region", m_fndecls_map);
2155 5 : log_uniq_map (logger, show_objs, "label_region", m_labels_map);
2156 5 : log_uniq_map (logger, show_objs, "decl_region for globals", m_globals_map);
2157 5 : log_uniq_map (logger, show_objs, "field_region", m_field_regions);
2158 5 : log_uniq_map (logger, show_objs, "element_region", m_element_regions);
2159 5 : log_uniq_map (logger, show_objs, "offset_region", m_offset_regions);
2160 5 : log_uniq_map (logger, show_objs, "sized_region", m_sized_regions);
2161 5 : log_uniq_map (logger, show_objs, "cast_region", m_cast_regions);
2162 5 : log_uniq_map (logger, show_objs, "frame_region", m_frame_regions);
2163 5 : log_uniq_map (logger, show_objs, "symbolic_region", m_symbolic_regions);
2164 5 : log_uniq_map (logger, show_objs, "string_region", m_string_map);
2165 5 : log_uniq_map (logger, show_objs, "bit_range_region", m_bit_range_regions);
2166 5 : log_uniq_map (logger, show_objs, "var_arg_region", m_var_arg_regions);
2167 5 : logger->log (" # managed dynamic regions: %i",
2168 : m_managed_dynamic_regions.length ());
2169 5 : m_store_mgr.log_stats (logger, show_objs);
2170 5 : m_range_mgr->log_stats (logger, show_objs);
2171 5 : }
2172 :
2173 : /* Dump the number of objects of each class that were managed by this
2174 : manager to LOGGER.
2175 : If SHOW_OBJS is true, also dump the objects themselves.
2176 : This is here so it can use log_uniq_map. */
2177 :
2178 : void
2179 5 : store_manager::log_stats (logger *logger, bool show_objs) const
2180 : {
2181 5 : LOG_SCOPE (logger);
2182 5 : log_uniq_map (logger, show_objs, "concrete_binding",
2183 5 : m_concrete_binding_key_mgr);
2184 5 : log_uniq_map (logger, show_objs, "symbolic_binding",
2185 5 : m_symbolic_binding_key_mgr);
2186 5 : }
2187 :
2188 : /* Emit a warning showing DECL_REG->tracked_p () for use in DejaGnu tests
2189 : (using -fdump-analyzer-untracked). */
2190 :
2191 : static void
2192 162 : dump_untracked_region (const decl_region *decl_reg)
2193 : {
2194 162 : tree decl = decl_reg->get_decl ();
2195 162 : if (TREE_CODE (decl) != VAR_DECL)
2196 : return;
2197 : /* For now, don't emit the status of decls in the constant pool, to avoid
2198 : differences in DejaGnu test results between targets that use these vs
2199 : those that don't.
2200 : (Eventually these decls should probably be untracked and we should test
2201 : for that, but that's not stage 4 material). */
2202 56 : if (DECL_IN_CONSTANT_POOL (decl))
2203 : return;
2204 85 : warning_at (DECL_SOURCE_LOCATION (decl), 0,
2205 : "track %qD: %s",
2206 56 : decl, (decl_reg->tracked_p () ? "yes" : "no"));
2207 : }
2208 :
2209 : /* Implementation of -fdump-analyzer-untracked. */
2210 :
2211 : void
2212 23 : region_model_manager::dump_untracked_regions () const
2213 : {
2214 74 : for (auto iter : m_globals_map)
2215 : {
2216 51 : const decl_region *decl_reg = iter.second;
2217 51 : dump_untracked_region (decl_reg);
2218 : }
2219 75 : for (auto frame_iter : m_frame_regions)
2220 : {
2221 52 : const frame_region *frame_reg = frame_iter.second;
2222 52 : frame_reg->dump_untracked_regions ();
2223 : }
2224 23 : }
2225 :
2226 : void
2227 52 : frame_region::dump_untracked_regions () const
2228 : {
2229 163 : for (auto iter : m_locals)
2230 : {
2231 111 : const decl_region *decl_reg = iter.second;
2232 111 : dump_untracked_region (decl_reg);
2233 : }
2234 52 : }
2235 :
2236 : } // namespace ana
2237 :
2238 : #endif /* #if ENABLE_ANALYZER */
|