Line data Source code
1 : /* Consolidation of svalues and regions.
2 : Copyright (C) 2020-2026 Free Software Foundation, Inc.
3 : Contributed by David Malcolm <dmalcolm@redhat.com>.
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify it
8 : under the terms of the GNU General Public License as published by
9 : the Free Software Foundation; either version 3, or (at your option)
10 : any later version.
11 :
12 : GCC is distributed in the hope that it will be useful, but
13 : WITHOUT ANY WARRANTY; without even the implied warranty of
14 : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 : General Public License for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 : #include "analyzer/common.h"
22 :
23 : #include "fold-const.h"
24 : #include "ordered-hash-map.h"
25 : #include "options.h"
26 : #include "analyzer/supergraph.h"
27 : #include "sbitmap.h"
28 : #include "target.h"
29 :
30 : #include "analyzer/analyzer-logging.h"
31 : #include "analyzer/call-string.h"
32 : #include "analyzer/program-point.h"
33 : #include "analyzer/store.h"
34 : #include "analyzer/region-model.h"
35 : #include "analyzer/constraint-manager.h"
36 :
37 : #if ENABLE_ANALYZER
38 :
39 : namespace ana {
40 :
41 : /* class region_model_manager. */
42 :
43 : /* region_model_manager's ctor. */
44 :
45 3956 : region_model_manager::region_model_manager (logger *logger)
46 3956 : : m_logger (logger),
47 3956 : m_next_symbol_id (0),
48 3956 : m_empty_call_string (),
49 3956 : m_root_region (alloc_symbol_id ()),
50 3956 : m_stack_region (alloc_symbol_id (), &m_root_region),
51 3956 : m_heap_region (alloc_symbol_id (), &m_root_region),
52 3956 : m_unknown_NULL (nullptr),
53 3956 : m_checking_feasibility (false),
54 3956 : m_max_complexity (0, 0),
55 3956 : m_code_region (alloc_symbol_id (), &m_root_region),
56 3956 : m_fndecls_map (), m_labels_map (),
57 3956 : m_globals_region (alloc_symbol_id (), &m_root_region),
58 3956 : m_globals_map (),
59 3956 : m_thread_local_region (alloc_symbol_id (), &m_root_region),
60 3956 : m_errno_region (alloc_symbol_id (), &m_thread_local_region),
61 3956 : m_store_mgr (this),
62 3956 : m_range_mgr (new bounded_ranges_manager ()),
63 11868 : m_known_fn_mgr (logger)
64 : {
65 3956 : }
66 :
67 : /* region_model_manager's dtor. Delete all of the managed svalues
68 : and regions. */
69 :
70 7912 : region_model_manager::~region_model_manager ()
71 : {
72 : /* Delete consolidated svalues. */
73 59096 : for (constants_map_t::iterator iter = m_constants_map.begin ();
74 114236 : iter != m_constants_map.end (); ++iter)
75 55140 : delete (*iter).second;
76 12477 : for (unknowns_map_t::iterator iter = m_unknowns_map.begin ();
77 20998 : iter != m_unknowns_map.end (); ++iter)
78 8521 : delete (*iter).second;
79 3956 : delete m_unknown_NULL;
80 9878 : for (poisoned_values_map_t::iterator iter = m_poisoned_values_map.begin ();
81 15800 : iter != m_poisoned_values_map.end (); ++iter)
82 5922 : delete (*iter).second;
83 3990 : for (setjmp_values_map_t::iterator iter = m_setjmp_values_map.begin ();
84 4024 : iter != m_setjmp_values_map.end (); ++iter)
85 34 : delete (*iter).second;
86 30325 : for (initial_values_map_t::iterator iter = m_initial_values_map.begin ();
87 56694 : iter != m_initial_values_map.end (); ++iter)
88 26369 : delete (*iter).second;
89 28622 : for (pointer_values_map_t::iterator iter = m_pointer_values_map.begin ();
90 53288 : iter != m_pointer_values_map.end (); ++iter)
91 24666 : delete (*iter).second;
92 12180 : for (unaryop_values_map_t::iterator iter = m_unaryop_values_map.begin ();
93 20404 : iter != m_unaryop_values_map.end (); ++iter)
94 8224 : delete (*iter).second;
95 24208 : for (binop_values_map_t::iterator iter = m_binop_values_map.begin ();
96 44460 : iter != m_binop_values_map.end (); ++iter)
97 20252 : delete (*iter).second;
98 6287 : for (sub_values_map_t::iterator iter = m_sub_values_map.begin ();
99 8618 : iter != m_sub_values_map.end (); ++iter)
100 2331 : delete (*iter).second;
101 4980 : for (auto iter : m_repeated_values_map)
102 512 : delete iter.second;
103 5500 : for (auto iter : m_bits_within_values_map)
104 772 : delete iter.second;
105 4160 : for (unmergeable_values_map_t::iterator iter
106 3956 : = m_unmergeable_values_map.begin ();
107 4364 : iter != m_unmergeable_values_map.end (); ++iter)
108 204 : delete (*iter).second;
109 6316 : for (widening_values_map_t::iterator iter = m_widening_values_map.begin ();
110 8676 : iter != m_widening_values_map.end (); ++iter)
111 2360 : delete (*iter).second;
112 4568 : for (compound_values_map_t::iterator iter = m_compound_values_map.begin ();
113 5180 : iter != m_compound_values_map.end (); ++iter)
114 612 : delete (*iter).second;
115 29789 : for (conjured_values_map_t::iterator iter = m_conjured_values_map.begin ();
116 55622 : iter != m_conjured_values_map.end (); ++iter)
117 25833 : delete (*iter).second;
118 4330 : for (auto iter : m_asm_output_values_map)
119 187 : delete iter.second;
120 4330 : for (auto iter : m_const_fn_result_values_map)
121 187 : delete iter.second;
122 :
123 : /* Delete consolidated regions. */
124 13347 : for (fndecls_map_t::iterator iter = m_fndecls_map.begin ();
125 22738 : iter != m_fndecls_map.end (); ++iter)
126 9391 : delete (*iter).second;
127 4006 : for (labels_map_t::iterator iter = m_labels_map.begin ();
128 4056 : iter != m_labels_map.end (); ++iter)
129 50 : delete (*iter).second;
130 10334 : for (globals_map_t::iterator iter = m_globals_map.begin ();
131 16712 : iter != m_globals_map.end (); ++iter)
132 6378 : delete (*iter).second;
133 7704 : for (string_map_t::iterator iter = m_string_map.begin ();
134 11452 : iter != m_string_map.end (); ++iter)
135 3748 : delete (*iter).second;
136 :
137 3956 : delete m_range_mgr;
138 7912 : }
139 :
140 : /* Return true if C exceeds the complexity limit for svalues. */
141 :
142 : bool
143 168321 : region_model_manager::too_complex_p (const complexity &c) const
144 : {
145 168321 : if (c.m_max_depth > (unsigned)param_analyzer_max_svalue_depth)
146 1989 : return true;
147 : return false;
148 : }
149 :
150 : /* If SVAL exceeds the complexity limit for svalues, delete it
151 : and return true.
152 : Otherwise update m_max_complexity and return false. */
153 :
154 : bool
155 175594 : region_model_manager::reject_if_too_complex (svalue *sval)
156 : {
157 175594 : if (m_checking_feasibility)
158 : return false;
159 :
160 168321 : const complexity &c = sval->get_complexity ();
161 168321 : if (!too_complex_p (c))
162 : {
163 166332 : if (m_max_complexity.m_num_nodes < c.m_num_nodes)
164 11601 : m_max_complexity.m_num_nodes = c.m_num_nodes;
165 166332 : if (m_max_complexity.m_max_depth < c.m_max_depth)
166 10651 : m_max_complexity.m_max_depth = c.m_max_depth;
167 166332 : return false;
168 : }
169 :
170 1989 : pretty_printer pp;
171 1989 : pp_format_decoder (&pp) = default_tree_printer;
172 1989 : sval->dump_to_pp (&pp, true);
173 1989 : if (warning_at (input_location, OPT_Wanalyzer_symbol_too_complex,
174 : "symbol too complicated: %qs",
175 : pp_formatted_text (&pp)))
176 2 : inform (input_location,
177 : "max_depth %i exceeds --param=analyzer-max-svalue-depth=%i",
178 2 : c.m_max_depth, param_analyzer_max_svalue_depth);
179 :
180 1989 : delete sval;
181 1989 : return true;
182 1989 : }
183 :
184 : /* Macro for imposing a complexity limit on svalues, for use within
185 : region_model_manager member functions.
186 :
187 : If SVAL exceeds the complexity limit, delete it and return an UNKNOWN
188 : value of the same type.
189 : Otherwise update m_max_complexity and carry on. */
190 :
191 : #define RETURN_UNKNOWN_IF_TOO_COMPLEX(SVAL) \
192 : do { \
193 : svalue *sval_ = (SVAL); \
194 : tree type_ = sval_->get_type (); \
195 : if (reject_if_too_complex (sval_)) \
196 : return get_or_create_unknown_svalue (type_); \
197 : } while (0)
198 :
199 : /* svalue consolidation. */
200 :
201 : /* Return the svalue * for a constant_svalue for CST_EXPR,
202 : creating it if necessary.
203 : The constant_svalue instances are reused, based on pointer equality
204 : of trees */
205 :
206 : const svalue *
207 2512974 : region_model_manager::get_or_create_constant_svalue (tree type, tree cst_expr)
208 : {
209 2512974 : gcc_assert (cst_expr);
210 2512974 : gcc_assert (CONSTANT_CLASS_P (cst_expr));
211 2512974 : gcc_assert (type == TREE_TYPE (cst_expr) || type == NULL_TREE);
212 :
213 2512974 : constant_svalue::key_t key (type, cst_expr);
214 2512974 : constant_svalue **slot = m_constants_map.get (key);
215 2512974 : if (slot)
216 2457030 : return *slot;
217 55944 : constant_svalue *cst_sval
218 55944 : = new constant_svalue (alloc_symbol_id (), type, cst_expr);
219 55944 : RETURN_UNKNOWN_IF_TOO_COMPLEX (cst_sval);
220 55140 : m_constants_map.put (key, cst_sval);
221 55140 : return cst_sval;
222 : }
223 :
224 : const svalue *
225 1613427 : region_model_manager::get_or_create_constant_svalue (tree cst_expr)
226 : {
227 1613427 : tree type = TREE_TYPE (cst_expr);
228 1613427 : if (TREE_CODE (cst_expr) == RAW_DATA_CST)
229 : /* The type of a RAW_DATA_CST is the type of each element, rather than
230 : that of the constant as a whole, so use NULL_TREE for simplicity. */
231 14 : type = NULL_TREE;
232 1613427 : return get_or_create_constant_svalue (type, cst_expr);
233 : }
234 :
235 : /* Return the svalue * for a constant_svalue for the INTEGER_CST
236 : for VAL of type TYPE, creating it if necessary. */
237 :
238 : const svalue *
239 863083 : region_model_manager::get_or_create_int_cst (tree type,
240 : const poly_wide_int_ref &cst)
241 : {
242 863083 : tree effective_type = type;
243 863083 : if (!type)
244 6717 : effective_type = ptrdiff_type_node;
245 863083 : gcc_assert (INTEGRAL_TYPE_P (effective_type)
246 : || POINTER_TYPE_P (effective_type));
247 863083 : tree tree_cst = wide_int_to_tree (effective_type, cst);
248 863083 : return get_or_create_constant_svalue (type, tree_cst);
249 : }
250 :
251 : /* Return the svalue * for the constant_svalue for the NULL pointer
252 : of POINTER_TYPE, creating it if necessary. */
253 :
254 : const svalue *
255 1229 : region_model_manager::get_or_create_null_ptr (tree pointer_type)
256 : {
257 1229 : gcc_assert (pointer_type);
258 1229 : gcc_assert (POINTER_TYPE_P (pointer_type));
259 1229 : return get_or_create_int_cst (pointer_type, 0);
260 : }
261 :
262 : /* Return the svalue * for a unknown_svalue for TYPE (which can be NULL_TREE),
263 : creating it if necessary.
264 : The unknown_svalue instances are reused, based on pointer equality
265 : of the types */
266 :
267 : const svalue *
268 853134 : region_model_manager::get_or_create_unknown_svalue (tree type)
269 : {
270 : /* Don't create unknown values when doing feasibility testing;
271 : instead, create a unique svalue. */
272 853134 : if (m_checking_feasibility)
273 12369 : return create_unique_svalue (type);
274 :
275 : /* Special-case NULL, so that the hash_map can use NULL as the
276 : "empty" value. */
277 840765 : if (type == NULL_TREE)
278 : {
279 117591 : if (!m_unknown_NULL)
280 1711 : m_unknown_NULL = new unknown_svalue (alloc_symbol_id (), type);
281 117591 : return m_unknown_NULL;
282 : }
283 :
284 723174 : unknown_svalue **slot = m_unknowns_map.get (type);
285 723174 : if (slot)
286 714653 : return *slot;
287 8521 : unknown_svalue *sval = new unknown_svalue (alloc_symbol_id (), type);
288 8521 : m_unknowns_map.put (type, sval);
289 8521 : return sval;
290 : }
291 :
292 : /* Return a freshly-allocated svalue of TYPE, owned by this manager. */
293 :
294 : const svalue *
295 12369 : region_model_manager::create_unique_svalue (tree type)
296 : {
297 12369 : svalue *sval = new placeholder_svalue (alloc_symbol_id (), type, "unique");
298 12369 : m_managed_dynamic_svalues.safe_push (sval);
299 12369 : return sval;
300 : }
301 :
302 : /* Return the svalue * for the initial value of REG, creating it if
303 : necessary. */
304 :
305 : const svalue *
306 3077608 : region_model_manager::get_or_create_initial_value (const region *reg,
307 : bool check_poisoned)
308 : {
309 3077608 : if (!reg->can_have_initial_svalue_p () && check_poisoned)
310 191544 : return get_or_create_poisoned_svalue (poison_kind::uninit,
311 191544 : reg->get_type ());
312 :
313 : /* The initial value of a cast is a cast of the initial value. */
314 2886064 : if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
315 : {
316 2225 : const region *original_reg = cast_reg->get_parent_region ();
317 2225 : return get_or_create_cast (cast_reg->get_type (),
318 2225 : get_or_create_initial_value (original_reg));
319 : }
320 :
321 : /* Simplify:
322 : INIT_VAL(ELEMENT_REG(STRING_REG), CONSTANT_SVAL)
323 : to:
324 : CONSTANT_SVAL(STRING[N]). */
325 2883839 : if (const element_region *element_reg = reg->dyn_cast_element_region ())
326 7098 : if (tree cst_idx = element_reg->get_index ()->maybe_get_constant ())
327 8106 : if (const string_region *string_reg
328 4053 : = element_reg->get_parent_region ()->dyn_cast_string_region ())
329 702 : if (tree_fits_shwi_p (cst_idx))
330 : {
331 702 : HOST_WIDE_INT idx = tree_to_shwi (cst_idx);
332 702 : tree string_cst = string_reg->get_string_cst ();
333 1404 : if (idx >= 0 && idx <= TREE_STRING_LENGTH (string_cst))
334 : {
335 702 : int ch = TREE_STRING_POINTER (string_cst)[idx];
336 702 : return get_or_create_int_cst (reg->get_type (), ch);
337 : }
338 : }
339 :
340 : /* INIT_VAL (*UNKNOWN_PTR) -> UNKNOWN_VAL. */
341 2883137 : if (reg->symbolic_for_unknown_ptr_p ())
342 7450 : return get_or_create_unknown_svalue (reg->get_type ());
343 :
344 2875687 : if (initial_svalue **slot = m_initial_values_map.get (reg))
345 2848927 : return *slot;
346 26760 : initial_svalue *initial_sval
347 26760 : = new initial_svalue (alloc_symbol_id (), reg->get_type (), reg);
348 26760 : RETURN_UNKNOWN_IF_TOO_COMPLEX (initial_sval);
349 26369 : m_initial_values_map.put (reg, initial_sval);
350 26369 : return initial_sval;
351 : }
352 :
353 : /* Return the svalue * for R using type TYPE, creating it if
354 : necessary. */
355 :
356 : const svalue *
357 34 : region_model_manager::get_or_create_setjmp_svalue (const setjmp_record &r,
358 : tree type)
359 : {
360 34 : setjmp_svalue::key_t key (r, type);
361 34 : if (setjmp_svalue **slot = m_setjmp_values_map.get (key))
362 0 : return *slot;
363 34 : setjmp_svalue *setjmp_sval = new setjmp_svalue (r, alloc_symbol_id (), type);
364 34 : RETURN_UNKNOWN_IF_TOO_COMPLEX (setjmp_sval);
365 34 : m_setjmp_values_map.put (key, setjmp_sval);
366 34 : return setjmp_sval;
367 : }
368 :
369 : /* Return the svalue * for a poisoned value of KIND and TYPE, creating it if
370 : necessary. */
371 :
372 : const svalue *
373 198736 : region_model_manager::get_or_create_poisoned_svalue (enum poison_kind kind,
374 : tree type)
375 : {
376 198736 : poisoned_svalue::key_t key (kind, type);
377 198736 : if (poisoned_svalue **slot = m_poisoned_values_map.get (key))
378 192814 : return *slot;
379 5922 : poisoned_svalue *poisoned_sval
380 5922 : = new poisoned_svalue (kind, alloc_symbol_id (), type);
381 5922 : RETURN_UNKNOWN_IF_TOO_COMPLEX (poisoned_sval);
382 5922 : m_poisoned_values_map.put (key, poisoned_sval);
383 5922 : return poisoned_sval;
384 : }
385 :
386 : /* Return the svalue * for a pointer to POINTEE of type PTR_TYPE,
387 : creating it if necessary. */
388 :
389 : const svalue *
390 963169 : region_model_manager::get_ptr_svalue (tree ptr_type, const region *pointee)
391 : {
392 : /* If this is a symbolic region from dereferencing a pointer, and the types
393 : match, then return the original pointer. */
394 963169 : if (const symbolic_region *sym_reg = pointee->dyn_cast_symbolic_region ())
395 159 : if (ptr_type == sym_reg->get_pointer ()->get_type ())
396 : return sym_reg->get_pointer ();
397 :
398 963010 : region_svalue::key_t key (ptr_type, pointee);
399 963010 : if (region_svalue **slot = m_pointer_values_map.get (key))
400 937832 : return *slot;
401 25178 : region_svalue *sval
402 25178 : = new region_svalue (alloc_symbol_id (), ptr_type, pointee);
403 25178 : RETURN_UNKNOWN_IF_TOO_COMPLEX (sval);
404 24666 : m_pointer_values_map.put (key, sval);
405 24666 : return sval;
406 : }
407 :
408 : /* Subroutine of region_model_manager::maybe_fold_unaryop
409 : when the arg is a binop_svalue.
410 : Invert comparisons e.g. "!(x == y)" => "x != y".
411 : Otherwise, return nullptr. */
412 :
413 : const svalue *
414 16 : region_model_manager::
415 : maybe_invert_comparison_in_unaryop (tree result_type,
416 : const binop_svalue *binop)
417 : {
418 16 : if (TREE_CODE_CLASS (binop->get_op ()) == tcc_comparison)
419 : {
420 16 : enum tree_code inv_op
421 16 : = invert_tree_comparison (binop->get_op (),
422 16 : HONOR_NANS (binop->get_type ()));
423 16 : if (inv_op != ERROR_MARK)
424 16 : return get_or_create_cast
425 16 : (result_type,
426 : get_or_create_binop (binop->get_type (), inv_op,
427 : binop->get_arg0 (),
428 16 : binop->get_arg1 ()));
429 : }
430 : return nullptr;
431 : }
432 :
433 : /* Subroutine of region_model_manager::get_or_create_unaryop.
434 : Attempt to fold the inputs and return a simpler svalue *.
435 : Otherwise, return nullptr. */
436 :
437 : const svalue *
438 314648 : region_model_manager::maybe_fold_unaryop (tree type, enum tree_code op,
439 : const svalue *arg)
440 : {
441 : /* Ops on "unknown" are also unknown. */
442 324226 : if (arg->get_kind () == SK_UNKNOWN)
443 42029 : return get_or_create_unknown_svalue (type);
444 : /* Likewise for "poisoned". */
445 564394 : else if (const poisoned_svalue *poisoned_sval
446 282197 : = arg->dyn_cast_poisoned_svalue ())
447 1362 : return get_or_create_poisoned_svalue (poisoned_sval->get_poison_kind (),
448 1362 : type);
449 :
450 280835 : gcc_assert (arg->can_have_associated_state_p ());
451 :
452 280835 : switch (op)
453 : {
454 : default: break;
455 279711 : case VIEW_CONVERT_EXPR:
456 279711 : case NOP_EXPR:
457 279711 : {
458 279711 : if (!type)
459 : return nullptr;
460 :
461 : /* Handle redundant casts. */
462 278816 : if (arg->get_type ()
463 278816 : && useless_type_conversion_p (arg->get_type (), type))
464 : return arg;
465 :
466 : /* Fold "cast<TYPE> (cast <INNER_TYPE> (innermost_arg))
467 : => "cast<TYPE> (innermost_arg)",
468 : unless INNER_TYPE is narrower than TYPE. */
469 145653 : if (const svalue *innermost_arg = arg->maybe_undo_cast ())
470 : {
471 21683 : if (tree inner_type = arg->get_type ())
472 21563 : if (TYPE_SIZE (type)
473 21563 : && TYPE_SIZE (inner_type)
474 43126 : && (fold_binary (LE_EXPR, boolean_type_node,
475 : TYPE_SIZE (type), TYPE_SIZE (inner_type))
476 21563 : == boolean_true_node))
477 : return maybe_fold_unaryop (type, op, innermost_arg);
478 : }
479 : /* Avoid creating symbolic regions for pointer casts by
480 : simplifying (T*)(®ION) to ((T*)®ION). */
481 136075 : if (const region_svalue *region_sval = arg->dyn_cast_region_svalue ())
482 4010 : if (POINTER_TYPE_P (type)
483 312 : && region_sval->get_type ()
484 4155 : && POINTER_TYPE_P (region_sval->get_type ()))
485 55 : return get_ptr_svalue (type, region_sval->get_pointee ());
486 :
487 : /* Casting all zeroes should give all zeroes. */
488 136020 : if (type
489 136020 : && arg->all_zeroes_p ()
490 5987 : && (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)))
491 5354 : return get_or_create_int_cst (type, 0);
492 : }
493 : break;
494 8 : case TRUTH_NOT_EXPR:
495 8 : {
496 : /* Invert comparisons e.g. "!(x == y)" => "x != y". */
497 8 : if (const binop_svalue *binop = arg->dyn_cast_binop_svalue ())
498 16 : if (const svalue *folded
499 8 : = maybe_invert_comparison_in_unaryop (type, binop))
500 : return folded;
501 : }
502 : break;
503 513 : case NEGATE_EXPR:
504 513 : {
505 : /* -(-(VAL)) is VAL, for integer types. */
506 513 : if (const unaryop_svalue *unaryop = arg->dyn_cast_unaryop_svalue ())
507 10 : if (unaryop->get_op () == NEGATE_EXPR
508 1 : && type == unaryop->get_type ()
509 1 : && type
510 11 : && INTEGRAL_TYPE_P (type))
511 1 : return unaryop->get_arg ();
512 : }
513 : break;
514 198 : case BIT_NOT_EXPR:
515 198 : {
516 : /* Invert comparisons for e.g. "~(x == y)" => "x != y". */
517 198 : if (type
518 198 : && TREE_CODE (type) == BOOLEAN_TYPE
519 131 : && arg->get_type ()
520 329 : && TREE_CODE (arg->get_type ()) == BOOLEAN_TYPE)
521 131 : if (const binop_svalue *binop = arg->dyn_cast_binop_svalue ())
522 16 : if (const svalue *folded
523 8 : = maybe_invert_comparison_in_unaryop (type, binop))
524 : return folded;
525 : }
526 : break;
527 : }
528 :
529 : /* Constants. */
530 131773 : if (type)
531 131557 : if (tree cst = arg->maybe_get_constant ())
532 17668 : if (tree result = fold_unary (op, type, cst))
533 : {
534 16854 : if (CONSTANT_CLASS_P (result))
535 16854 : return get_or_create_constant_svalue (result);
536 :
537 : /* fold_unary can return casts of constants; try to handle them. */
538 0 : if (op != NOP_EXPR
539 0 : && TREE_CODE (result) == NOP_EXPR
540 0 : && CONSTANT_CLASS_P (TREE_OPERAND (result, 0)))
541 : {
542 0 : const svalue *inner_cst
543 0 : = get_or_create_constant_svalue (TREE_OPERAND (result, 0));
544 0 : return get_or_create_cast (type,
545 0 : get_or_create_cast (TREE_TYPE (result),
546 0 : inner_cst));
547 : }
548 : }
549 :
550 : return nullptr;
551 : }
552 :
553 : /* Return the svalue * for an unary operation OP on ARG with a result of
554 : type TYPE, creating it if necessary. */
555 :
556 : const svalue *
557 314648 : region_model_manager::get_or_create_unaryop (tree type, enum tree_code op,
558 : const svalue *arg)
559 : {
560 314648 : if (const svalue *folded = maybe_fold_unaryop (type, op, arg))
561 : return folded;
562 115814 : unaryop_svalue::key_t key (type, op, arg);
563 115814 : if (unaryop_svalue **slot = m_unaryop_values_map.get (key))
564 107488 : return *slot;
565 8326 : unaryop_svalue *unaryop_sval
566 8326 : = new unaryop_svalue (alloc_symbol_id (), type, op, arg);
567 8326 : RETURN_UNKNOWN_IF_TOO_COMPLEX (unaryop_sval);
568 8224 : m_unaryop_values_map.put (key, unaryop_sval);
569 8224 : return unaryop_sval;
570 : }
571 :
572 : /* Get a tree code for a cast to DST_TYPE from SRC_TYPE.
573 : Use NOP_EXPR if possible (e.g. to help fold_unary convert casts
574 : of 0 to (T*) to simple pointer constants), but use FIX_TRUNC_EXPR
575 : and VIEW_CONVERT_EXPR for cases that fold_unary would otherwise crash
576 : on. */
577 :
578 : static enum tree_code
579 241108 : get_code_for_cast (tree dst_type, tree src_type)
580 : {
581 0 : if (!dst_type)
582 : return NOP_EXPR;
583 0 : if (!src_type)
584 : return NOP_EXPR;
585 :
586 236261 : if (SCALAR_FLOAT_TYPE_P (src_type))
587 : {
588 352 : if (TREE_CODE (dst_type) == INTEGER_TYPE)
589 : return FIX_TRUNC_EXPR;
590 : else
591 348 : return VIEW_CONVERT_EXPR;
592 : }
593 :
594 : return NOP_EXPR;
595 : }
596 :
597 : /* Return the svalue * for a cast of ARG to type TYPE, creating it
598 : if necessary. */
599 :
600 : const svalue *
601 1214188 : region_model_manager::get_or_create_cast (tree type, const svalue *arg)
602 : {
603 : /* No-op if the types are the same. */
604 1214188 : if (type == arg->get_type ())
605 : return arg;
606 :
607 : /* Don't attempt to handle casts involving vector types for now. */
608 250625 : if (type)
609 241192 : if (VECTOR_TYPE_P (type)
610 241192 : || (arg->get_type ()
611 236261 : && VECTOR_TYPE_P (arg->get_type ())))
612 84 : return get_or_create_unknown_svalue (type);
613 :
614 241108 : enum tree_code op = get_code_for_cast (type, arg->get_type ());
615 250541 : return get_or_create_unaryop (type, op, arg);
616 : }
617 :
618 : /* Subroutine of region_model_manager::maybe_fold_binop for handling
619 : (TYPE)(COMPOUND_SVAL BIT_AND_EXPR CST) that may have been generated by
620 : optimize_bit_field_compare, where CST is from ARG1.
621 :
622 : Support masking out bits from a compound_svalue for comparing a bitfield
623 : against a value, as generated by optimize_bit_field_compare for
624 : BITFIELD == VALUE.
625 :
626 : If COMPOUND_SVAL has a value for the appropriate bits, return it,
627 : shifted accordingly.
628 : Otherwise return nullptr. */
629 :
630 : const svalue *
631 58 : region_model_manager::
632 : maybe_undo_optimize_bit_field_compare (tree type,
633 : const compound_svalue *compound_sval,
634 : tree cst,
635 : const svalue *arg1)
636 : {
637 58 : if (!type)
638 : return nullptr;
639 58 : if (!INTEGRAL_TYPE_P (type))
640 : return nullptr;
641 :
642 56 : const binding_map &map = compound_sval->get_map ();
643 56 : unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (cst);
644 : /* If "mask" is a contiguous range of set bits, see if the
645 : compound_sval has a value for those bits. */
646 56 : bit_range bits (0, 0);
647 56 : if (!bit_range::from_mask (mask, &bits))
648 : return nullptr;
649 :
650 56 : bit_range bound_bits (bits);
651 56 : if (BYTES_BIG_ENDIAN)
652 : bound_bits = bit_range (BITS_PER_UNIT - bits.get_next_bit_offset (),
653 : bits.m_size_in_bits);
654 56 : const concrete_binding *conc
655 56 : = get_store_manager ()->get_concrete_binding (bound_bits);
656 56 : const svalue *sval = map.get (conc);
657 56 : if (!sval)
658 : return nullptr;
659 :
660 : /* We have a value;
661 : shift it by the correct number of bits. */
662 56 : const svalue *lhs = get_or_create_cast (type, sval);
663 56 : HOST_WIDE_INT bit_offset = bits.get_start_bit_offset ().to_shwi ();
664 56 : const svalue *shift_sval = get_or_create_int_cst (type, bit_offset);
665 56 : const svalue *shifted_sval = get_or_create_binop (type, LSHIFT_EXPR,
666 : lhs, shift_sval);
667 : /* Reapply the mask (needed for negative
668 : signed bitfields). */
669 56 : return get_or_create_binop (type, BIT_AND_EXPR,
670 56 : shifted_sval, arg1);
671 : }
672 :
673 : /* Subroutine of region_model_manager::get_or_create_binop.
674 : Attempt to fold the inputs and return a simpler svalue *.
675 : Otherwise, return nullptr. */
676 :
677 : const svalue *
678 227332 : region_model_manager::maybe_fold_binop (tree type, enum tree_code op,
679 : const svalue *arg0,
680 : const svalue *arg1)
681 : {
682 227332 : tree cst0 = arg0->maybe_get_constant ();
683 227332 : tree cst1 = arg1->maybe_get_constant ();
684 : /* (CST OP CST). */
685 227332 : if (cst0 && cst1)
686 : {
687 76228 : if (type)
688 : {
689 39764 : if (tree result = fold_binary (op, type, cst0, cst1))
690 39730 : if (CONSTANT_CLASS_P (result))
691 39728 : return get_or_create_constant_svalue (result);
692 : }
693 : else
694 : {
695 36464 : if (tree result = int_const_binop (op, cst0, cst1, -1))
696 36464 : return get_or_create_constant_svalue (NULL_TREE, result);
697 : }
698 : }
699 :
700 104306 : if ((type && FLOAT_TYPE_P (type))
701 150689 : || (arg0->get_type () && FLOAT_TYPE_P (arg0->get_type ()))
702 301777 : || (arg1->get_type () && FLOAT_TYPE_P (arg1->get_type ())))
703 : return nullptr;
704 :
705 150627 : switch (op)
706 : {
707 : default:
708 : break;
709 95367 : case POINTER_PLUS_EXPR:
710 95367 : case PLUS_EXPR:
711 : /* (VAL + 0) -> VAL. */
712 95367 : if (cst1 && zerop (cst1))
713 15003 : return get_or_create_cast (type, arg0);
714 : /* X + (-X) -> 0. */
715 80364 : if (const unaryop_svalue *unary_op = arg1->dyn_cast_unaryop_svalue ())
716 1958 : if (unary_op->get_op () == NEGATE_EXPR
717 780 : && unary_op->get_arg () == arg0
718 2080 : && type && (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)))
719 120 : return get_or_create_int_cst (type, 0);
720 : /* X + (Y - X) -> Y. */
721 80244 : if (const binop_svalue *bin_op = arg1->dyn_cast_binop_svalue ())
722 8773 : if (bin_op->get_op () == MINUS_EXPR)
723 203 : if (bin_op->get_arg1 () == arg0)
724 8 : return get_or_create_cast (type, bin_op->get_arg0 ());
725 : break;
726 3879 : case MINUS_EXPR:
727 : /* (VAL - 0) -> VAL. */
728 3879 : if (cst1 && zerop (cst1))
729 47 : return get_or_create_cast (type, arg0);
730 : /* (0 - VAL) -> -VAL. */
731 3832 : if (cst0 && zerop (cst0))
732 17 : return get_or_create_unaryop (type, NEGATE_EXPR, arg1);
733 : /* (X + Y) - X -> Y. */
734 3815 : if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
735 860 : if (binop->get_op () == PLUS_EXPR)
736 125 : if (binop->get_arg0 () == arg1)
737 15 : return get_or_create_cast (type, binop->get_arg1 ());
738 : break;
739 25734 : case MULT_EXPR:
740 : /* (VAL * 0). */
741 25734 : if (cst1
742 25134 : && zerop (cst1)
743 25798 : && (type == NULL_TREE || INTEGRAL_TYPE_P (type)))
744 56 : return get_or_create_int_cst (type, 0);
745 : /* (VAL * 1) -> VAL. */
746 25678 : if (cst1 && integer_onep (cst1))
747 304 : return get_or_create_cast (type, arg0);
748 : break;
749 3080 : case BIT_AND_EXPR:
750 3080 : if (cst1)
751 : {
752 2557 : if (zerop (cst1)
753 2557 : && (type == NULL_TREE || INTEGRAL_TYPE_P (type)))
754 : /* "(ARG0 & 0)" -> "0". */
755 58 : return get_or_create_int_cst (type, 0);
756 :
757 4998 : if (const compound_svalue *compound_sval
758 2499 : = arg0->dyn_cast_compound_svalue ())
759 116 : if (const svalue *sval
760 58 : = maybe_undo_optimize_bit_field_compare (type,
761 : compound_sval,
762 : cst1, arg1))
763 : return sval;
764 : }
765 2966 : if (arg0->get_type () == boolean_type_node
766 2966 : && arg1->get_type () == boolean_type_node)
767 : {
768 : /* If the LHS are both _Bool, then... */
769 : /* ..."(1 & x) -> x". */
770 447 : if (cst0 && !zerop (cst0))
771 0 : return get_or_create_cast (type, arg1);
772 : /* ..."(x & 1) -> x". */
773 447 : if (cst1 && !zerop (cst1))
774 159 : return get_or_create_cast (type, arg0);
775 : /* ..."(0 & x) -> 0". */
776 288 : if (cst0 && zerop (cst0))
777 0 : return get_or_create_int_cst (type, 0);
778 : /* ..."(x & 0) -> 0". */
779 288 : if (cst1 && zerop (cst1))
780 0 : return get_or_create_int_cst (type, 0);
781 : }
782 : break;
783 3168 : case BIT_IOR_EXPR:
784 3168 : if (arg0->get_type () == boolean_type_node
785 3168 : && arg1->get_type () == boolean_type_node)
786 : {
787 : /* If the LHS are both _Bool, then... */
788 : /* ..."(1 | x) -> 1". */
789 117 : if (cst0 && !zerop (cst0))
790 0 : return get_or_create_int_cst (type, 1);
791 : /* ..."(x | 1) -> 1". */
792 117 : if (cst1 && !zerop (cst1))
793 38 : return get_or_create_int_cst (type, 1);
794 : /* ..."(0 | x) -> x". */
795 79 : if (cst0 && zerop (cst0))
796 0 : return get_or_create_cast (type, arg1);
797 : /* ..."(x | 0) -> x". */
798 79 : if (cst1 && zerop (cst1))
799 20 : return get_or_create_cast (type, arg0);
800 : }
801 : break;
802 12 : case TRUTH_ANDIF_EXPR:
803 12 : case TRUTH_AND_EXPR:
804 12 : if (cst1)
805 : {
806 12 : if (zerop (cst1) && INTEGRAL_TYPE_P (type))
807 : /* "(ARG0 && 0)" -> "0". */
808 4 : return get_or_create_constant_svalue (build_int_cst (type, 0));
809 : else
810 : /* "(ARG0 && nonzero-cst)" -> "ARG0". */
811 8 : return get_or_create_cast (type, arg0);
812 : }
813 : break;
814 12 : case TRUTH_ORIF_EXPR:
815 12 : case TRUTH_OR_EXPR:
816 12 : if (cst1)
817 : {
818 12 : if (zerop (cst1))
819 : /* "(ARG0 || 0)" -> "ARG0". */
820 8 : return get_or_create_cast (type, arg0);
821 : else
822 : /* "(ARG0 && nonzero-cst)" -> "nonzero-cst". */
823 4 : return get_or_create_cast (type, arg1);
824 : }
825 : break;
826 :
827 1162 : case TRUNC_DIV_EXPR:
828 1162 : case CEIL_DIV_EXPR:
829 1162 : case FLOOR_DIV_EXPR:
830 1162 : case ROUND_DIV_EXPR:
831 1162 : case TRUNC_MOD_EXPR:
832 1162 : case CEIL_MOD_EXPR:
833 1162 : case FLOOR_MOD_EXPR:
834 1162 : case ROUND_MOD_EXPR:
835 1162 : case RDIV_EXPR:
836 1162 : case EXACT_DIV_EXPR:
837 1162 : {
838 1162 : value_range arg1_vr;
839 1162 : if (arg1->maybe_get_value_range (arg1_vr))
840 1069 : if (arg1_vr.zero_p ())
841 0 : return get_or_create_unknown_svalue (type);
842 1162 : }
843 1162 : break;
844 : }
845 :
846 : /* For associative ops, fold "(X op CST_A) op CST_B)" to
847 : "X op (CST_A op CST_B)". */
848 134702 : if (cst1 && associative_tree_code (op))
849 63053 : if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
850 16650 : if (binop->get_op () == op
851 16650 : && binop->get_arg1 ()->maybe_get_constant ())
852 2556 : return get_or_create_binop
853 2556 : (type, op, binop->get_arg0 (),
854 : get_or_create_binop (type, op,
855 2556 : binop->get_arg1 (), arg1));
856 :
857 : /* associative_tree_code is false for POINTER_PLUS_EXPR, but we
858 : can fold:
859 : "(PTR ptr+ CST_A) ptr+ CST_B)" to "PTR ptr+ (CST_A ptr+ CST_B)"
860 : e.g. in data-model-1.c: test_4c. */
861 132146 : if (cst1 && op == POINTER_PLUS_EXPR)
862 26232 : if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
863 1573 : if (binop->get_op () == POINTER_PLUS_EXPR)
864 1425 : if (binop->get_arg1 ()->maybe_get_constant ())
865 1110 : return get_or_create_binop
866 1110 : (type, op, binop->get_arg0 (),
867 : get_or_create_binop (size_type_node, op,
868 1110 : binop->get_arg1 (), arg1));
869 :
870 : /* Distribute multiplication by a constant through addition/subtraction:
871 : (X + Y) * CST => (X * CST) + (Y * CST). */
872 131036 : if (cst1 && op == MULT_EXPR)
873 24611 : if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
874 2729 : if (binop->get_op () == PLUS_EXPR
875 2729 : || binop->get_op () == MINUS_EXPR)
876 : {
877 2062 : return get_or_create_binop
878 2062 : (type, binop->get_op (),
879 : get_or_create_binop (type, op,
880 : binop->get_arg0 (), arg1),
881 : get_or_create_binop (type, op,
882 2062 : binop->get_arg1 (), arg1));
883 : }
884 :
885 :
886 : /* Typeless operations, assumed to be effectively arbitrary sized
887 : integers following normal arithmetic rules. */
888 128974 : if (!type)
889 34361 : switch (op)
890 : {
891 : default:
892 : break;
893 827 : case MINUS_EXPR:
894 827 : {
895 : /* (X - X) -> 0. */
896 827 : if (arg0 == arg1)
897 751 : return get_or_create_int_cst (type, 0);
898 :
899 : /* (X + A) - (A + B) -> (A - B). */
900 76 : if (const binop_svalue *binop0 = arg0->dyn_cast_binop_svalue ())
901 60 : if (const binop_svalue *binop1 = arg1->dyn_cast_binop_svalue ())
902 36 : if (binop0->get_op () == PLUS_EXPR
903 28 : && binop1->get_op () == PLUS_EXPR
904 56 : && binop0->get_arg0 () == binop1->get_arg0 ())
905 20 : return get_or_create_binop (NULL_TREE, op,
906 : binop0->get_arg1 (),
907 20 : binop1->get_arg1 ());
908 : }
909 : break;
910 :
911 73 : case EXACT_DIV_EXPR:
912 73 : {
913 73 : if (const unaryop_svalue *unaryop0 = arg0->dyn_cast_unaryop_svalue ())
914 : {
915 0 : if (unaryop0->get_op () == NOP_EXPR)
916 0 : if (const svalue *sval = maybe_fold_binop (NULL_TREE, op,
917 : unaryop0->get_arg (),
918 : arg1))
919 : return sval;
920 : }
921 73 : if (const binop_svalue *binop0 = arg0->dyn_cast_binop_svalue ())
922 : {
923 72 : switch (binop0->get_op ())
924 : {
925 : default:
926 : break;
927 :
928 26 : case PLUS_EXPR:
929 26 : case MINUS_EXPR:
930 : /* (A op B) / C -> (A / C) op (B / C). */
931 26 : {
932 26 : if (const svalue *op_on_a
933 26 : = maybe_fold_binop (NULL_TREE, op,
934 : binop0->get_arg0 (), arg1))
935 26 : if (const svalue *op_on_b
936 26 : = maybe_fold_binop (NULL_TREE, op,
937 : binop0->get_arg1 (), arg1))
938 26 : return get_or_create_binop (NULL_TREE,
939 : binop0->get_op (),
940 26 : op_on_a, op_on_b);
941 : }
942 : break;
943 :
944 46 : case MULT_EXPR:
945 : /* (A * B) / C -> A * (B / C) if C is a divisor of B.
946 : In particular, this should also handle the case
947 : (A * B) / B -> A. */
948 46 : if (const svalue *b_div_c
949 46 : = maybe_fold_binop (NULL_TREE, op,
950 : binop0->get_arg1 (), arg1))
951 46 : return get_or_create_binop (NULL_TREE, binop0->get_op (),
952 46 : binop0->get_arg0 (), b_div_c);
953 : }
954 : }
955 : }
956 : break;
957 : }
958 :
959 : /* etc. */
960 :
961 : return nullptr;
962 : }
963 :
964 : /* Return the svalue * for an binary operation OP on ARG0 and ARG1
965 : with a result of type TYPE, creating it if necessary. */
966 :
967 : const svalue *
968 227053 : region_model_manager::get_or_create_binop (tree type, enum tree_code op,
969 : const svalue *arg0,
970 : const svalue *arg1)
971 : {
972 : /* For commutative ops, put any constant on the RHS. */
973 227053 : if (arg0->maybe_get_constant () && commutative_tree_code (op))
974 : std::swap (arg0, arg1);
975 :
976 227053 : if (const svalue *folded = maybe_fold_binop (type, op, arg0, arg1))
977 : return folded;
978 :
979 : /* Ops on "unknown"/"poisoned" are unknown (unless we were able to fold
980 : it via an identity in maybe_fold_binop). */
981 128643 : if (!arg0->can_have_associated_state_p ()
982 128643 : || !arg1->can_have_associated_state_p ())
983 24690 : return get_or_create_unknown_svalue (type);
984 :
985 103953 : binop_svalue::key_t key (type, op, arg0, arg1);
986 103953 : if (binop_svalue **slot = m_binop_values_map.get (key))
987 83596 : return *slot;
988 20357 : binop_svalue *binop_sval
989 20357 : = new binop_svalue (alloc_symbol_id (), type, op, arg0, arg1);
990 20357 : RETURN_UNKNOWN_IF_TOO_COMPLEX (binop_sval);
991 20252 : m_binop_values_map.put (key, binop_sval);
992 20252 : return binop_sval;
993 : }
994 :
995 : /* Subroutine of region_model_manager::get_or_create_sub_svalue.
996 : Return a folded svalue, or nullptr. */
997 :
998 : const svalue *
999 62853 : region_model_manager::maybe_fold_sub_svalue (tree type,
1000 : const svalue *parent_svalue,
1001 : const region *subregion)
1002 : {
1003 : /* Subvalues of "unknown"/"poisoned" are unknown. */
1004 62853 : if (!parent_svalue->can_have_associated_state_p ())
1005 39726 : return get_or_create_unknown_svalue (type);
1006 :
1007 : /* If we have a subvalue of a zero constant, it's zero. */
1008 23127 : if (tree cst = parent_svalue->maybe_get_constant ())
1009 6396 : if (TREE_CODE (cst) == INTEGER_CST)
1010 108 : if (zerop (cst))
1011 100 : return get_or_create_cast (type, parent_svalue);
1012 :
1013 : /* If we have a subregion of a zero-fill, it's zero. */
1014 46054 : if (const unaryop_svalue *unary
1015 23027 : = parent_svalue->dyn_cast_unaryop_svalue ())
1016 : {
1017 566 : if (unary->get_op () == NOP_EXPR
1018 566 : || unary->get_op () == VIEW_CONVERT_EXPR)
1019 566 : if (tree cst = unary->get_arg ()->maybe_get_constant ())
1020 566 : if (zerop (cst) && type)
1021 : {
1022 554 : const svalue *cst_sval
1023 554 : = get_or_create_constant_svalue (cst);
1024 554 : return get_or_create_cast (type, cst_sval);
1025 : }
1026 : }
1027 :
1028 : /* Handle getting individual chars from a STRING_CST or RAW_DATA_CST. */
1029 22473 : if (tree cst = parent_svalue->maybe_get_constant ())
1030 6296 : if (TREE_CODE (cst) == STRING_CST
1031 6296 : || TREE_CODE (cst) == RAW_DATA_CST)
1032 : {
1033 : /* If we have a concrete 1-byte access within the parent region... */
1034 6275 : byte_range subregion_bytes (0, 0);
1035 6275 : if (subregion->get_relative_concrete_byte_range (&subregion_bytes)
1036 6275 : && subregion_bytes.m_size_in_bytes == 1
1037 12320 : && type)
1038 : {
1039 : /* ...then attempt to get that char from the constant. */
1040 6045 : HOST_WIDE_INT hwi_start_byte
1041 6045 : = subregion_bytes.m_start_byte_offset.to_shwi ();
1042 6045 : tree cst_idx
1043 6045 : = build_int_cst_type (size_type_node, hwi_start_byte);
1044 12090 : if (const svalue *char_sval
1045 6045 : = maybe_get_char_from_cst (cst, cst_idx))
1046 6013 : return get_or_create_cast (type, char_sval);
1047 : }
1048 : }
1049 :
1050 32920 : if (const initial_svalue *init_sval
1051 16460 : = parent_svalue->dyn_cast_initial_svalue ())
1052 : {
1053 : /* SUB(INIT(r)).FIELD -> INIT(r.FIELD)
1054 : i.e.
1055 : Subvalue(InitialValue(R1), FieldRegion(R2, F))
1056 : -> InitialValue(FieldRegion(R1, F)). */
1057 548 : if (const field_region *field_reg = subregion->dyn_cast_field_region ())
1058 : {
1059 342 : const region *field_reg_new
1060 342 : = get_field_region (init_sval->get_region (),
1061 : field_reg->get_field ());
1062 342 : return get_or_create_initial_value (field_reg_new);
1063 : }
1064 : /* SUB(INIT(r)[ELEMENT] -> INIT(e[ELEMENT])
1065 : i.e.
1066 : Subvalue(InitialValue(R1), ElementRegion(R2, IDX))
1067 : -> InitialValue(ElementRegion(R1, IDX)). */
1068 206 : if (const element_region *element_reg = subregion->dyn_cast_element_region ())
1069 : {
1070 124 : const region *element_reg_new
1071 124 : = get_element_region (init_sval->get_region (),
1072 : element_reg->get_type (),
1073 : element_reg->get_index ());
1074 124 : return get_or_create_initial_value (element_reg_new);
1075 : }
1076 : }
1077 :
1078 31988 : if (const repeated_svalue *repeated_sval
1079 15994 : = parent_svalue->dyn_cast_repeated_svalue ())
1080 100 : if (type)
1081 96 : return get_or_create_cast (type, repeated_sval->get_inner_svalue ());
1082 :
1083 : return nullptr;
1084 : }
1085 :
1086 : /* Return the svalue * for extracting a subvalue of type TYPE from
1087 : PARENT_SVALUE based on SUBREGION, creating it if necessary. */
1088 :
1089 : const svalue *
1090 62853 : region_model_manager::get_or_create_sub_svalue (tree type,
1091 : const svalue *parent_svalue,
1092 : const region *subregion)
1093 : {
1094 125706 : if (const svalue *folded
1095 62853 : = maybe_fold_sub_svalue (type, parent_svalue, subregion))
1096 : return folded;
1097 :
1098 15898 : sub_svalue::key_t key (type, parent_svalue, subregion);
1099 15898 : if (sub_svalue **slot = m_sub_values_map.get (key))
1100 13567 : return *slot;
1101 2331 : sub_svalue *sub_sval
1102 2331 : = new sub_svalue (alloc_symbol_id (), type, parent_svalue, subregion);
1103 2331 : RETURN_UNKNOWN_IF_TOO_COMPLEX (sub_sval);
1104 2331 : m_sub_values_map.put (key, sub_sval);
1105 2331 : return sub_sval;
1106 : }
1107 :
1108 : /* Subroutine of region_model_manager::get_or_create_repeated_svalue.
1109 : Return a folded svalue, or nullptr. */
1110 :
1111 : const svalue *
1112 2539 : region_model_manager::maybe_fold_repeated_svalue (tree type,
1113 : const svalue *outer_size,
1114 : const svalue *inner_svalue)
1115 : {
1116 : /* Repeated "unknown"/"poisoned" is unknown. */
1117 2539 : if (!outer_size->can_have_associated_state_p ()
1118 2539 : || !inner_svalue->can_have_associated_state_p ())
1119 4 : return get_or_create_unknown_svalue (type);
1120 :
1121 : /* If INNER_SVALUE is the same size as OUTER_SIZE,
1122 : turn into simply a cast. */
1123 2535 : if (inner_svalue->get_type ())
1124 2523 : if (tree cst_outer_num_bytes = outer_size->maybe_get_constant ())
1125 : {
1126 2391 : HOST_WIDE_INT num_bytes_inner_svalue
1127 2391 : = int_size_in_bytes (inner_svalue->get_type ());
1128 2391 : if (num_bytes_inner_svalue != -1)
1129 2391 : if (num_bytes_inner_svalue
1130 2391 : == (HOST_WIDE_INT)tree_to_uhwi (cst_outer_num_bytes))
1131 : {
1132 237 : if (type)
1133 217 : return get_or_create_cast (type, inner_svalue);
1134 : else
1135 : return inner_svalue;
1136 : }
1137 : }
1138 :
1139 : /* Handle zero-fill of a specific type. */
1140 2298 : if (tree cst = inner_svalue->maybe_get_constant ())
1141 2275 : if (zerop (cst) && type)
1142 585 : return get_or_create_cast (type, inner_svalue);
1143 :
1144 : return nullptr;
1145 : }
1146 :
1147 : /* Return the svalue * of type TYPE in which INNER_SVALUE is repeated
1148 : enough times to be of size OUTER_SIZE, creating it if necessary.
1149 : e.g. for filling buffers with a constant value. */
1150 :
1151 : const svalue *
1152 2539 : region_model_manager::get_or_create_repeated_svalue (tree type,
1153 : const svalue *outer_size,
1154 : const svalue *inner_svalue)
1155 : {
1156 5078 : if (const svalue *folded
1157 2539 : = maybe_fold_repeated_svalue (type, outer_size, inner_svalue))
1158 : return folded;
1159 :
1160 1713 : repeated_svalue::key_t key (type, outer_size, inner_svalue);
1161 1713 : if (repeated_svalue **slot = m_repeated_values_map.get (key))
1162 1201 : return *slot;
1163 512 : repeated_svalue *repeated_sval
1164 512 : = new repeated_svalue (alloc_symbol_id (), type, outer_size, inner_svalue);
1165 512 : RETURN_UNKNOWN_IF_TOO_COMPLEX (repeated_sval);
1166 512 : m_repeated_values_map.put (key, repeated_sval);
1167 512 : return repeated_sval;
1168 : }
1169 :
1170 : /* Attempt to get the bit_range for FIELD within a RECORD_TYPE.
1171 : Return true and write the result to OUT if successful.
1172 : Return false otherwise. */
1173 :
1174 : static bool
1175 935 : get_bit_range_for_field (tree field, bit_range *out)
1176 : {
1177 935 : bit_size_t bit_size;
1178 935 : if (!int_size_in_bits (TREE_TYPE (field), &bit_size))
1179 : return false;
1180 935 : int field_bit_offset = int_bit_position (field);
1181 935 : *out = bit_range (field_bit_offset, bit_size);
1182 935 : return true;
1183 : }
1184 :
1185 : /* Attempt to get the byte_range for FIELD within a RECORD_TYPE.
1186 : Return true and write the result to OUT if successful.
1187 : Return false otherwise. */
1188 :
1189 : static bool
1190 935 : get_byte_range_for_field (tree field, byte_range *out)
1191 : {
1192 935 : bit_range field_bits (0, 0);
1193 935 : if (!get_bit_range_for_field (field, &field_bits))
1194 : return false;
1195 935 : return field_bits.as_byte_range (out);
1196 : }
1197 :
1198 : /* Attempt to determine if there is a specific field within RECORD_TYPE
1199 : at BYTES. If so, return it, and write the location of BYTES relative
1200 : to the field to *OUT_RANGE_WITHIN_FIELD.
1201 : Otherwise, return NULL_TREE.
1202 : For example, given:
1203 : struct foo { uint32 a; uint32; b};
1204 : and
1205 : bytes = {bytes 6-7} (of foo)
1206 : we have bytes 3-4 of field b. */
1207 :
1208 : static tree
1209 935 : get_field_at_byte_range (tree record_type, const byte_range &bytes,
1210 : byte_range *out_range_within_field)
1211 : {
1212 935 : bit_offset_t bit_offset = bytes.m_start_byte_offset * BITS_PER_UNIT;
1213 :
1214 935 : tree field = get_field_at_bit_offset (record_type, bit_offset);
1215 935 : if (!field)
1216 : return NULL_TREE;
1217 :
1218 935 : byte_range field_bytes (0,0);
1219 935 : if (!get_byte_range_for_field (field, &field_bytes))
1220 : return NULL_TREE;
1221 :
1222 : /* Is BYTES fully within field_bytes? */
1223 929 : byte_range bytes_within_field (0,0);
1224 929 : if (!field_bytes.contains_p (bytes, &bytes_within_field))
1225 : return NULL_TREE;
1226 :
1227 394 : *out_range_within_field = bytes_within_field;
1228 394 : return field;
1229 : }
1230 :
1231 : /* Subroutine of region_model_manager::get_or_create_bits_within.
1232 : Return a folded svalue, or NULL. */
1233 :
1234 : const svalue *
1235 20976 : region_model_manager::maybe_fold_bits_within_svalue (tree type,
1236 : const bit_range &bits,
1237 : const svalue *inner_svalue)
1238 : {
1239 20976 : tree inner_type = inner_svalue->get_type ();
1240 : /* Fold:
1241 : BITS_WITHIN ((0, sizeof (VAL), VAL))
1242 : to:
1243 : CAST(TYPE, VAL). */
1244 20976 : if (bits.m_start_bit_offset == 0 && inner_type)
1245 : {
1246 1553 : bit_size_t inner_type_size;
1247 1553 : if (int_size_in_bits (inner_type, &inner_type_size))
1248 1553 : if (inner_type_size == bits.m_size_in_bits)
1249 : {
1250 565 : if (type)
1251 565 : return get_or_create_cast (type, inner_svalue);
1252 : else
1253 : return inner_svalue;
1254 : }
1255 : }
1256 :
1257 : /* Kind-specific folding. */
1258 40822 : if (const svalue *sval
1259 20411 : = inner_svalue->maybe_fold_bits_within (type, bits, this))
1260 : return sval;
1261 :
1262 1818 : byte_range bytes (0,0);
1263 1818 : if (bits.as_byte_range (&bytes) && inner_type)
1264 1674 : switch (TREE_CODE (inner_type))
1265 : {
1266 : default:
1267 : break;
1268 582 : case ARRAY_TYPE:
1269 582 : {
1270 : /* Fold:
1271 : BITS_WITHIN (range, KIND(REG))
1272 : to:
1273 : BITS_WITHIN (range - offsetof(ELEMENT), KIND(REG.ELEMENT))
1274 : if range1 is a byte-range fully within one ELEMENT. */
1275 582 : tree element_type = TREE_TYPE (inner_type);
1276 582 : HOST_WIDE_INT element_byte_size
1277 582 : = int_size_in_bytes (element_type);
1278 582 : if (element_byte_size > 0)
1279 : {
1280 582 : HOST_WIDE_INT start_idx
1281 582 : = (bytes.get_start_byte_offset ().to_shwi ()
1282 582 : / element_byte_size);
1283 582 : HOST_WIDE_INT last_idx
1284 582 : = (bytes.get_last_byte_offset ().to_shwi ()
1285 582 : / element_byte_size);
1286 582 : if (start_idx == last_idx)
1287 : {
1288 708 : if (const initial_svalue *initial_sval
1289 354 : = inner_svalue->dyn_cast_initial_svalue ())
1290 : {
1291 162 : bit_offset_t start_of_element
1292 162 : = start_idx * element_byte_size * BITS_PER_UNIT;
1293 162 : bit_range bits_within_element
1294 162 : (bits.m_start_bit_offset - start_of_element,
1295 162 : bits.m_size_in_bits);
1296 162 : const svalue *idx_sval
1297 162 : = get_or_create_int_cst (integer_type_node, start_idx);
1298 162 : const region *element_reg =
1299 162 : get_element_region (initial_sval->get_region (),
1300 : element_type, idx_sval);
1301 162 : const svalue *element_reg_sval
1302 162 : = get_or_create_initial_value (element_reg);
1303 162 : return get_or_create_bits_within (type,
1304 : bits_within_element,
1305 : element_reg_sval);
1306 : }
1307 : }
1308 : }
1309 : }
1310 : break;
1311 935 : case RECORD_TYPE:
1312 935 : {
1313 : /* Fold:
1314 : BYTES_WITHIN (range, KIND(REG))
1315 : to:
1316 : BYTES_WITHIN (range - offsetof(FIELD), KIND(REG.FIELD))
1317 : if range1 is fully within FIELD. */
1318 935 : byte_range bytes_within_field (0, 0);
1319 935 : if (tree field = get_field_at_byte_range (inner_type, bytes,
1320 : &bytes_within_field))
1321 : {
1322 788 : if (const initial_svalue *initial_sval
1323 394 : = inner_svalue->dyn_cast_initial_svalue ())
1324 : {
1325 221 : const region *field_reg =
1326 221 : get_field_region (initial_sval->get_region (), field);
1327 221 : const svalue *initial_reg_sval
1328 221 : = get_or_create_initial_value (field_reg);
1329 221 : return get_or_create_bits_within
1330 221 : (type,
1331 442 : bytes_within_field.as_bit_range (),
1332 : initial_reg_sval);
1333 : }
1334 : }
1335 : }
1336 714 : break;
1337 : }
1338 : return nullptr;
1339 : }
1340 :
1341 : /* Return the svalue * of type TYPE for extracting BITS from INNER_SVALUE,
1342 : creating it if necessary. */
1343 :
1344 : const svalue *
1345 20976 : region_model_manager::get_or_create_bits_within (tree type,
1346 : const bit_range &bits,
1347 : const svalue *inner_svalue)
1348 : {
1349 41952 : if (const svalue *folded
1350 20976 : = maybe_fold_bits_within_svalue (type, bits, inner_svalue))
1351 : return folded;
1352 :
1353 1435 : bits_within_svalue::key_t key (type, bits, inner_svalue);
1354 1435 : if (bits_within_svalue **slot = m_bits_within_values_map.get (key))
1355 663 : return *slot;
1356 772 : bits_within_svalue *bits_within_sval
1357 772 : = new bits_within_svalue (alloc_symbol_id (), type, bits, inner_svalue);
1358 772 : RETURN_UNKNOWN_IF_TOO_COMPLEX (bits_within_sval);
1359 772 : m_bits_within_values_map.put (key, bits_within_sval);
1360 772 : return bits_within_sval;
1361 : }
1362 :
1363 : /* Return the svalue * that decorates ARG as being unmergeable,
1364 : creating it if necessary. */
1365 :
1366 : const svalue *
1367 832 : region_model_manager::get_or_create_unmergeable (const svalue *arg)
1368 : {
1369 832 : if (arg->get_kind () == SK_UNMERGEABLE)
1370 : return arg;
1371 :
1372 832 : if (unmergeable_svalue **slot = m_unmergeable_values_map.get (arg))
1373 628 : return *slot;
1374 204 : unmergeable_svalue *unmergeable_sval
1375 204 : = new unmergeable_svalue (alloc_symbol_id (), arg);
1376 204 : RETURN_UNKNOWN_IF_TOO_COMPLEX (unmergeable_sval);
1377 204 : m_unmergeable_values_map.put (arg, unmergeable_sval);
1378 204 : return unmergeable_sval;
1379 : }
1380 :
1381 : /* Return the svalue * of type TYPE for the merger of value BASE_SVAL
1382 : and ITER_SVAL at SNODE, creating it if necessary. */
1383 :
1384 : const svalue *
1385 5797 : region_model_manager::
1386 : get_or_create_widening_svalue (tree type,
1387 : const supernode *snode,
1388 : const svalue *base_sval,
1389 : const svalue *iter_sval)
1390 : {
1391 5797 : gcc_assert (base_sval->get_kind () != SK_WIDENING);
1392 5797 : gcc_assert (iter_sval->get_kind () != SK_WIDENING);
1393 5797 : widening_svalue::key_t key (type, snode, base_sval, iter_sval);
1394 5797 : if (widening_svalue **slot = m_widening_values_map.get (key))
1395 3437 : return *slot;
1396 2360 : widening_svalue *widening_sval
1397 : = new widening_svalue (alloc_symbol_id (), type, snode, base_sval,
1398 2360 : iter_sval);
1399 2360 : RETURN_UNKNOWN_IF_TOO_COMPLEX (widening_sval);
1400 2360 : m_widening_values_map.put (key, widening_sval);
1401 2360 : return widening_sval;
1402 : }
1403 :
1404 : /* Return the svalue * of type TYPE for the compound values in MAP,
1405 : creating it if necessary. */
1406 :
1407 : const svalue *
1408 3749 : region_model_manager::get_or_create_compound_svalue (tree type,
1409 : const binding_map &map)
1410 : {
1411 3749 : compound_svalue::key_t tmp_key (type, &map);
1412 3749 : if (compound_svalue **slot = m_compound_values_map.get (tmp_key))
1413 3137 : return *slot;
1414 612 : compound_svalue *compound_sval
1415 612 : = new compound_svalue (alloc_symbol_id (), type, map);
1416 612 : RETURN_UNKNOWN_IF_TOO_COMPLEX (compound_sval);
1417 : /* Use make_key rather than reusing the key, so that we use a
1418 : ptr to compound_sval's binding_map, rather than the MAP param. */
1419 612 : m_compound_values_map.put (compound_sval->make_key (), compound_sval);
1420 612 : return compound_sval;
1421 : }
1422 :
1423 : /* class conjured_purge. */
1424 :
1425 : /* Purge state relating to SVAL. */
1426 :
1427 : void
1428 26805 : conjured_purge::purge (const conjured_svalue *sval) const
1429 : {
1430 26805 : m_model->purge_state_involving (sval, m_ctxt);
1431 26805 : }
1432 :
1433 : /* Return the svalue * of type TYPE for the value conjured for ID_REG
1434 : at STMT (using IDX for any further disambiguation),
1435 : creating it if necessary.
1436 : Use P to purge existing state from the svalue, for the case where a
1437 : conjured_svalue would be reused along an execution path. */
1438 :
1439 : const svalue *
1440 52708 : region_model_manager::get_or_create_conjured_svalue (tree type,
1441 : const gimple *stmt,
1442 : const region *id_reg,
1443 : const conjured_purge &p,
1444 : unsigned idx)
1445 : {
1446 52708 : conjured_svalue::key_t key (type, stmt, id_reg, idx);
1447 52708 : if (conjured_svalue **slot = m_conjured_values_map.get (key))
1448 : {
1449 26805 : const conjured_svalue *sval = *slot;
1450 : /* We're reusing an existing conjured_svalue, perhaps from a different
1451 : state within this analysis, or perhaps from an earlier state on this
1452 : execution path. For the latter, purge any state involving the "new"
1453 : svalue from the current program_state. */
1454 26805 : p.purge (sval);
1455 26805 : return sval;
1456 : }
1457 25903 : conjured_svalue *conjured_sval
1458 25903 : = new conjured_svalue (alloc_symbol_id (), type, stmt, id_reg, idx);
1459 25903 : RETURN_UNKNOWN_IF_TOO_COMPLEX (conjured_sval);
1460 25833 : m_conjured_values_map.put (key, conjured_sval);
1461 25833 : return conjured_sval;
1462 : }
1463 :
1464 : /* Subroutine of region_model_manager::get_or_create_asm_output_svalue.
1465 : Return a folded svalue, or nullptr. */
1466 :
1467 : const svalue *
1468 367 : region_model_manager::
1469 : maybe_fold_asm_output_svalue (tree type,
1470 : const vec<const svalue *> &inputs)
1471 : {
1472 : /* Unknown inputs should lead to unknown results. */
1473 1653 : for (const auto &iter : inputs)
1474 609 : if (iter->get_kind () == SK_UNKNOWN)
1475 9 : return get_or_create_unknown_svalue (type);
1476 :
1477 : return nullptr;
1478 : }
1479 :
1480 : /* Return the svalue * of type TYPE for OUTPUT_IDX of the deterministic
1481 : asm stmt ASM_STMT, given INPUTS as inputs. */
1482 :
1483 : const svalue *
1484 289 : region_model_manager::
1485 : get_or_create_asm_output_svalue (tree type,
1486 : const gasm *asm_stmt,
1487 : unsigned output_idx,
1488 : const vec<const svalue *> &inputs)
1489 : {
1490 289 : gcc_assert (inputs.length () <= asm_output_svalue::MAX_INPUTS);
1491 :
1492 578 : if (const svalue *folded
1493 289 : = maybe_fold_asm_output_svalue (type, inputs))
1494 : return folded;
1495 :
1496 280 : const char *asm_string = gimple_asm_string (asm_stmt);
1497 280 : const unsigned noutputs = gimple_asm_noutputs (asm_stmt);
1498 :
1499 280 : asm_output_svalue::key_t key (type, asm_string, output_idx, inputs);
1500 280 : if (asm_output_svalue **slot = m_asm_output_values_map.get (key))
1501 108 : return *slot;
1502 172 : asm_output_svalue *asm_output_sval
1503 : = new asm_output_svalue (alloc_symbol_id (), type, asm_string, output_idx,
1504 172 : noutputs, inputs);
1505 172 : RETURN_UNKNOWN_IF_TOO_COMPLEX (asm_output_sval);
1506 167 : m_asm_output_values_map.put (key, asm_output_sval);
1507 167 : return asm_output_sval;
1508 : }
1509 :
1510 : /* Return the svalue * of type TYPE for OUTPUT_IDX of a deterministic
1511 : asm stmt with string ASM_STRING with NUM_OUTPUTS outputs, given
1512 : INPUTS as inputs. */
1513 :
1514 : const svalue *
1515 78 : region_model_manager::
1516 : get_or_create_asm_output_svalue (tree type,
1517 : const char *asm_string,
1518 : unsigned output_idx,
1519 : unsigned num_outputs,
1520 : const vec<const svalue *> &inputs)
1521 : {
1522 78 : gcc_assert (inputs.length () <= asm_output_svalue::MAX_INPUTS);
1523 :
1524 156 : if (const svalue *folded
1525 78 : = maybe_fold_asm_output_svalue (type, inputs))
1526 : return folded;
1527 :
1528 78 : asm_output_svalue::key_t key (type, asm_string, output_idx, inputs);
1529 78 : if (asm_output_svalue **slot = m_asm_output_values_map.get (key))
1530 58 : return *slot;
1531 20 : asm_output_svalue *asm_output_sval
1532 : = new asm_output_svalue (alloc_symbol_id (), type, asm_string, output_idx,
1533 20 : num_outputs, inputs);
1534 20 : RETURN_UNKNOWN_IF_TOO_COMPLEX (asm_output_sval);
1535 20 : m_asm_output_values_map.put (key, asm_output_sval);
1536 20 : return asm_output_sval;
1537 : }
1538 :
1539 : /* Return the svalue * of type TYPE for the result of a call to FNDECL
1540 : with __attribute__((const)), given INPUTS as inputs. */
1541 :
1542 : const svalue *
1543 891 : region_model_manager::
1544 : get_or_create_const_fn_result_svalue (tree type,
1545 : tree fndecl,
1546 : const vec<const svalue *> &inputs)
1547 : {
1548 891 : gcc_assert (fndecl);
1549 891 : gcc_assert (DECL_P (fndecl));
1550 891 : gcc_assert (TREE_READONLY (fndecl));
1551 891 : gcc_assert (inputs.length () <= const_fn_result_svalue::MAX_INPUTS);
1552 :
1553 891 : const_fn_result_svalue::key_t key (type, fndecl, inputs);
1554 891 : if (const_fn_result_svalue **slot = m_const_fn_result_values_map.get (key))
1555 704 : return *slot;
1556 187 : const_fn_result_svalue *const_fn_result_sval
1557 187 : = new const_fn_result_svalue (alloc_symbol_id (), type, fndecl, inputs);
1558 187 : RETURN_UNKNOWN_IF_TOO_COMPLEX (const_fn_result_sval);
1559 187 : m_const_fn_result_values_map.put (key, const_fn_result_sval);
1560 187 : return const_fn_result_sval;
1561 : }
1562 :
1563 : /* Given DATA_CST (a STRING_CST or RAW_DATA_CST) and BYTE_OFFSET_CST a constant,
1564 : attempt to get the character at that offset, returning either
1565 : the svalue for the character constant, or nullptr if unsuccessful. */
1566 :
1567 : const svalue *
1568 6045 : region_model_manager::maybe_get_char_from_cst (tree data_cst,
1569 : tree byte_offset_cst)
1570 : {
1571 6045 : switch (TREE_CODE (data_cst))
1572 : {
1573 0 : default: gcc_unreachable ();
1574 4570 : case STRING_CST:
1575 4570 : return maybe_get_char_from_string_cst (data_cst, byte_offset_cst);
1576 1475 : case RAW_DATA_CST:
1577 1475 : return maybe_get_char_from_raw_data_cst (data_cst, byte_offset_cst);
1578 : }
1579 : }
1580 :
1581 : /* Get a tree for the size of STRING_CST, or NULL_TREE.
1582 : Note that this may be larger than TREE_STRING_LENGTH (implying
1583 : a run of trailing zero bytes from TREE_STRING_LENGTH up to this
1584 : higher limit). */
1585 :
1586 : tree
1587 5234 : get_string_cst_size (const_tree string_cst)
1588 : {
1589 5234 : gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1590 5234 : gcc_assert (TREE_CODE (TREE_TYPE (string_cst)) == ARRAY_TYPE);
1591 :
1592 5234 : return TYPE_SIZE_UNIT (TREE_TYPE (string_cst));
1593 : }
1594 :
1595 : /* Given STRING_CST, a STRING_CST and BYTE_OFFSET_CST a constant,
1596 : attempt to get the character at that offset, returning either
1597 : the svalue for the character constant, or nullptr if unsuccessful. */
1598 :
1599 : const svalue *
1600 4950 : region_model_manager::maybe_get_char_from_string_cst (tree string_cst,
1601 : tree byte_offset_cst)
1602 : {
1603 4950 : gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1604 :
1605 : /* Adapted from fold_read_from_constant_string. */
1606 4950 : scalar_int_mode char_mode;
1607 4950 : if (TREE_CODE (byte_offset_cst) == INTEGER_CST
1608 9900 : && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string_cst))),
1609 : &char_mode)
1610 9900 : && GET_MODE_SIZE (char_mode) == 1)
1611 : {
1612 : /* If we're beyond the string_cst, the read is unsuccessful. */
1613 4950 : if (compare_constants (byte_offset_cst,
1614 : GE_EXPR,
1615 4950 : get_string_cst_size (string_cst)).is_true ())
1616 : return nullptr;
1617 :
1618 4921 : int char_val;
1619 9842 : if (compare_tree_int (byte_offset_cst,
1620 4921 : TREE_STRING_LENGTH (string_cst)) < 0)
1621 : /* We're within the area defined by TREE_STRING_POINTER. */
1622 4919 : char_val = (TREE_STRING_POINTER (string_cst)
1623 4919 : [TREE_INT_CST_LOW (byte_offset_cst)]);
1624 : else
1625 : /* We're in the padding area of trailing zeroes. */
1626 : char_val = 0;
1627 4921 : tree char_cst
1628 4921 : = build_int_cst_type (TREE_TYPE (TREE_TYPE (string_cst)), char_val);
1629 4921 : return get_or_create_constant_svalue (char_cst);
1630 : }
1631 : return nullptr;
1632 : }
1633 :
1634 : /* Given RAW_DATA_CST, a RAW_DATA_CST and BYTE_OFFSET_CST a constant,
1635 : attempt to get the character at that offset, returning either
1636 : the svalue for the character constant, or nullptr if unsuccessful. */
1637 :
1638 : const svalue *
1639 1475 : region_model_manager::maybe_get_char_from_raw_data_cst (tree raw_data_cst,
1640 : tree byte_offset_cst)
1641 : {
1642 1475 : gcc_assert (TREE_CODE (raw_data_cst) == RAW_DATA_CST);
1643 1475 : gcc_assert (TREE_CODE (byte_offset_cst) == INTEGER_CST);
1644 :
1645 1475 : offset_int o = (wi::to_offset (byte_offset_cst));
1646 1475 : if (o >= 0 && o < RAW_DATA_LENGTH (raw_data_cst))
1647 1468 : return get_or_create_int_cst
1648 1468 : (TREE_TYPE (raw_data_cst),
1649 2936 : RAW_DATA_UCHAR_ELT (raw_data_cst, o.to_uhwi ()));
1650 : return nullptr;
1651 : }
1652 :
1653 : /* region consolidation. */
1654 :
1655 : /* Return the region for FNDECL, creating it if necessary. */
1656 :
1657 : const function_region *
1658 877020 : region_model_manager::get_region_for_fndecl (tree fndecl)
1659 : {
1660 877020 : gcc_assert (TREE_CODE (fndecl) == FUNCTION_DECL);
1661 :
1662 877020 : function_region **slot = m_fndecls_map.get (fndecl);
1663 877020 : if (slot)
1664 867629 : return *slot;
1665 9391 : function_region *reg
1666 9391 : = new function_region (alloc_symbol_id (), &m_code_region, fndecl);
1667 9391 : m_fndecls_map.put (fndecl, reg);
1668 9391 : return reg;
1669 : }
1670 :
1671 : /* Return the region for LABEL, creating it if necessary. */
1672 :
1673 : const label_region *
1674 358 : region_model_manager::get_region_for_label (tree label)
1675 : {
1676 358 : gcc_assert (TREE_CODE (label) == LABEL_DECL);
1677 :
1678 358 : label_region **slot = m_labels_map.get (label);
1679 358 : if (slot)
1680 308 : return *slot;
1681 :
1682 50 : tree fndecl = DECL_CONTEXT (label);
1683 50 : gcc_assert (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL);
1684 :
1685 50 : const function_region *func_reg = get_region_for_fndecl (fndecl);
1686 50 : label_region *reg
1687 50 : = new label_region (alloc_symbol_id (), func_reg, label);
1688 50 : m_labels_map.put (label, reg);
1689 50 : return reg;
1690 : }
1691 :
1692 : /* Return the region for EXPR, creating it if necessary. */
1693 :
1694 : const decl_region *
1695 51245 : region_model_manager::get_region_for_global (tree expr)
1696 : {
1697 51245 : gcc_assert (VAR_P (expr));
1698 :
1699 51245 : decl_region **slot = m_globals_map.get (expr);
1700 51245 : if (slot)
1701 44867 : return *slot;
1702 6378 : decl_region *reg
1703 6378 : = new decl_region (alloc_symbol_id (), &m_globals_region, expr);
1704 6378 : m_globals_map.put (expr, reg);
1705 6378 : return reg;
1706 : }
1707 :
1708 : /* Return the region for an unknown access of type REGION_TYPE,
1709 : creating it if necessary.
1710 : This is a symbolic_region, where the pointer is an unknown_svalue
1711 : of type ®ION_TYPE. */
1712 :
1713 : const region *
1714 12777 : region_model_manager::get_unknown_symbolic_region (tree region_type)
1715 : {
1716 12777 : tree ptr_type = region_type ? build_pointer_type (region_type) : NULL_TREE;
1717 12777 : const svalue *unknown_ptr = get_or_create_unknown_svalue (ptr_type);
1718 12777 : return get_symbolic_region (unknown_ptr);
1719 : }
1720 :
1721 : /* Return the region that describes accessing field FIELD of PARENT,
1722 : creating it if necessary. */
1723 :
1724 : const region *
1725 47077 : region_model_manager::get_field_region (const region *parent, tree field)
1726 : {
1727 47077 : gcc_assert (parent);
1728 47077 : gcc_assert (field);
1729 47077 : gcc_assert (TREE_CODE (field) == FIELD_DECL);
1730 :
1731 : /* (*UNKNOWN_PTR).field is (*UNKNOWN_PTR_OF_&FIELD_TYPE). */
1732 47077 : if (parent->symbolic_for_unknown_ptr_p ())
1733 4179 : return get_unknown_symbolic_region (TREE_TYPE (field));
1734 :
1735 42898 : field_region::key_t key (parent, field);
1736 78036 : if (field_region *reg = m_field_regions.get (key))
1737 : return reg;
1738 :
1739 7760 : field_region *field_reg
1740 7760 : = new field_region (alloc_symbol_id (), parent, field);
1741 7760 : m_field_regions.put (key, field_reg);
1742 7760 : return field_reg;
1743 : }
1744 :
1745 : /* Return the region that describes accessing the element of type
1746 : ELEMENT_TYPE at index INDEX of PARENT, creating it if necessary. */
1747 :
1748 : const region *
1749 28241 : region_model_manager::get_element_region (const region *parent,
1750 : tree element_type,
1751 : const svalue *index)
1752 : {
1753 : /* (UNKNOWN_PTR[IDX]) is (UNKNOWN_PTR). */
1754 28241 : if (parent->symbolic_for_unknown_ptr_p ())
1755 52 : return get_unknown_symbolic_region (element_type);
1756 :
1757 28189 : element_region::key_t key (parent, element_type, index);
1758 52265 : if (element_region *reg = m_element_regions.get (key))
1759 : return reg;
1760 :
1761 4113 : element_region *element_reg
1762 4113 : = new element_region (alloc_symbol_id (), parent, element_type, index);
1763 4113 : m_element_regions.put (key, element_reg);
1764 4113 : return element_reg;
1765 : }
1766 :
1767 : /* Return the region that describes accessing the subregion of type
1768 : ELEMENT_TYPE at offset BYTE_OFFSET within PARENT, creating it if
1769 : necessary. */
1770 :
1771 : const region *
1772 93956 : region_model_manager::get_offset_region (const region *parent,
1773 : tree type,
1774 : const svalue *byte_offset)
1775 : {
1776 : /* (UNKNOWN_PTR + OFFSET) is (UNKNOWN_PTR). */
1777 95041 : if (parent->symbolic_for_unknown_ptr_p ())
1778 8177 : return get_unknown_symbolic_region (type);
1779 :
1780 : /* If BYTE_OFFSET is zero, return PARENT. */
1781 86864 : if (tree cst_offset = byte_offset->maybe_get_constant ())
1782 77600 : if (zerop (cst_offset))
1783 63088 : return get_cast_region (parent, type);
1784 :
1785 : /* Fold OFFSET_REGION(OFFSET_REGION(REG, X), Y)
1786 : to OFFSET_REGION(REG, (X + Y)). */
1787 47552 : if (const offset_region *parent_offset_reg
1788 23776 : = parent->dyn_cast_offset_region ())
1789 : {
1790 1085 : const svalue *sval_x = parent_offset_reg->get_byte_offset ();
1791 1085 : const svalue *sval_sum
1792 1085 : = get_or_create_binop (byte_offset->get_type (),
1793 : POINTER_PLUS_EXPR, sval_x, byte_offset);
1794 1085 : return get_offset_region (parent->get_parent_region (), type, sval_sum);
1795 : }
1796 :
1797 22691 : offset_region::key_t key (parent, type, byte_offset);
1798 41804 : if (offset_region *reg = m_offset_regions.get (key))
1799 : return reg;
1800 :
1801 3578 : offset_region *offset_reg
1802 3578 : = new offset_region (alloc_symbol_id (), parent, type, byte_offset);
1803 3578 : m_offset_regions.put (key, offset_reg);
1804 3578 : return offset_reg;
1805 : }
1806 :
1807 : /* Return the region that describes accessing the subregion of type
1808 : TYPE of size BYTE_SIZE_SVAL within PARENT, creating it if necessary. */
1809 :
1810 : const region *
1811 9468 : region_model_manager::get_sized_region (const region *parent,
1812 : tree type,
1813 : const svalue *byte_size_sval)
1814 : {
1815 9468 : if (parent->symbolic_for_unknown_ptr_p ())
1816 273 : return get_unknown_symbolic_region (type);
1817 :
1818 9195 : if (byte_size_sval->get_type () != size_type_node)
1819 2280 : byte_size_sval = get_or_create_cast (size_type_node, byte_size_sval);
1820 :
1821 : /* If PARENT is already that size, return it. */
1822 9195 : const svalue *parent_byte_size_sval = parent->get_byte_size_sval (this);
1823 9195 : if (tree parent_size_cst = parent_byte_size_sval->maybe_get_constant ())
1824 2836 : if (tree size_cst = byte_size_sval->maybe_get_constant ())
1825 : {
1826 2130 : tree comparison
1827 2130 : = fold_binary (EQ_EXPR, boolean_type_node, parent_size_cst, size_cst);
1828 2130 : if (comparison == boolean_true_node)
1829 : return parent;
1830 : }
1831 :
1832 7950 : sized_region::key_t key (parent, type, byte_size_sval);
1833 11950 : if (sized_region *reg = m_sized_regions.get (key))
1834 : return reg;
1835 :
1836 3950 : sized_region *sized_reg
1837 3950 : = new sized_region (alloc_symbol_id (), parent, type, byte_size_sval);
1838 3950 : m_sized_regions.put (key, sized_reg);
1839 3950 : return sized_reg;
1840 : }
1841 :
1842 : /* Return the region that describes accessing PARENT_REGION as if
1843 : it were of type TYPE, creating it if necessary. */
1844 :
1845 : const region *
1846 65825 : region_model_manager::get_cast_region (const region *original_region,
1847 : tree type)
1848 : {
1849 : /* If types match, return ORIGINAL_REGION. */
1850 65825 : if (type == original_region->get_type ())
1851 : return original_region;
1852 :
1853 17061 : if (original_region->symbolic_for_unknown_ptr_p ())
1854 96 : return get_unknown_symbolic_region (type);
1855 :
1856 16965 : cast_region::key_t key (original_region, type);
1857 31293 : if (cast_region *reg = m_cast_regions.get (key))
1858 : return reg;
1859 :
1860 2637 : cast_region *cast_reg
1861 2637 : = new cast_region (alloc_symbol_id (), original_region, type);
1862 2637 : m_cast_regions.put (key, cast_reg);
1863 2637 : return cast_reg;
1864 : }
1865 :
1866 : /* Return the frame_region for call to FUN from CALLING_FRAME, creating it
1867 : if necessary. CALLING_FRAME may be nullptr. */
1868 :
1869 : const frame_region *
1870 32673 : region_model_manager::get_frame_region (const frame_region *calling_frame,
1871 : const function &fun)
1872 : {
1873 32673 : int index = calling_frame ? calling_frame->get_index () + 1 : 0;
1874 :
1875 32673 : frame_region::key_t key (calling_frame, fun);
1876 50413 : if (frame_region *reg = m_frame_regions.get (key))
1877 : return reg;
1878 :
1879 14933 : frame_region *frame_reg
1880 : = new frame_region (alloc_symbol_id (), &m_stack_region, calling_frame,
1881 14933 : fun, index);
1882 14933 : m_frame_regions.put (key, frame_reg);
1883 14933 : return frame_reg;
1884 : }
1885 :
1886 : /* Return the region that describes dereferencing SVAL, creating it
1887 : if necessary. */
1888 :
1889 : const region *
1890 73121 : region_model_manager::get_symbolic_region (const svalue *sval)
1891 : {
1892 73121 : symbolic_region::key_t key (&m_root_region, sval);
1893 138027 : if (symbolic_region *reg = m_symbolic_regions.get (key))
1894 : return reg;
1895 :
1896 8215 : symbolic_region *symbolic_reg
1897 8215 : = new symbolic_region (alloc_symbol_id (), &m_root_region, sval);
1898 8215 : m_symbolic_regions.put (key, symbolic_reg);
1899 8215 : return symbolic_reg;
1900 : }
1901 :
1902 : /* Return the region that describes accessing STRING_CST, creating it
1903 : if necessary. */
1904 :
1905 : const string_region *
1906 15185 : region_model_manager::get_region_for_string (tree string_cst)
1907 : {
1908 15185 : gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1909 :
1910 15185 : string_region **slot = m_string_map.get (string_cst);
1911 15185 : if (slot)
1912 11437 : return *slot;
1913 3748 : string_region *reg
1914 3748 : = new string_region (alloc_symbol_id (), &m_root_region, string_cst);
1915 3748 : m_string_map.put (string_cst, reg);
1916 3748 : return reg;
1917 : }
1918 :
1919 : /* Return the region that describes accessing BITS within PARENT as TYPE,
1920 : creating it if necessary. */
1921 :
1922 : const region *
1923 247 : region_model_manager::get_bit_range (const region *parent, tree type,
1924 : const bit_range &bits)
1925 : {
1926 247 : gcc_assert (parent);
1927 :
1928 247 : if (parent->symbolic_for_unknown_ptr_p ())
1929 0 : return get_unknown_symbolic_region (type);
1930 :
1931 247 : bit_range_region::key_t key (parent, type, bits);
1932 327 : if (bit_range_region *reg = m_bit_range_regions.get (key))
1933 : return reg;
1934 :
1935 167 : bit_range_region *bit_range_reg
1936 167 : = new bit_range_region (alloc_symbol_id (), parent, type, bits);
1937 167 : m_bit_range_regions.put (key, bit_range_reg);
1938 167 : return bit_range_reg;
1939 : }
1940 :
1941 : /* Return the region that describes accessing the IDX-th variadic argument
1942 : within PARENT_FRAME, creating it if necessary. */
1943 :
1944 : const var_arg_region *
1945 1199 : region_model_manager::get_var_arg_region (const frame_region *parent_frame,
1946 : unsigned idx)
1947 : {
1948 1199 : gcc_assert (parent_frame);
1949 :
1950 1199 : var_arg_region::key_t key (parent_frame, idx);
1951 1901 : if (var_arg_region *reg = m_var_arg_regions.get (key))
1952 : return reg;
1953 :
1954 497 : var_arg_region *var_arg_reg
1955 497 : = new var_arg_region (alloc_symbol_id (), parent_frame, idx);
1956 497 : m_var_arg_regions.put (key, var_arg_reg);
1957 497 : return var_arg_reg;
1958 : }
1959 :
1960 : /* If we see a tree code we don't know how to handle, rather than
1961 : ICE or generate bogus results, create a dummy region, and notify
1962 : CTXT so that it can mark the new state as being not properly
1963 : modelled. The exploded graph can then stop exploring that path,
1964 : since any diagnostics we might issue will have questionable
1965 : validity. */
1966 :
1967 : const region *
1968 56 : region_model_manager::
1969 : get_region_for_unexpected_tree_code (region_model_context *ctxt,
1970 : tree t,
1971 : const dump_location_t &loc)
1972 : {
1973 56 : tree type = TYPE_P (t) ? t : TREE_TYPE (t);
1974 56 : region *new_reg
1975 56 : = new unknown_region (alloc_symbol_id (), &m_root_region, type);
1976 56 : if (ctxt)
1977 52 : ctxt->on_unexpected_tree_code (t, loc);
1978 56 : return new_reg;
1979 : }
1980 :
1981 : /* Return a region describing a heap-allocated block of memory.
1982 : Reuse an existing heap_allocated_region is its id is not within
1983 : BASE_REGS_IN_USE. */
1984 :
1985 : const region *
1986 19279 : region_model_manager::
1987 : get_or_create_region_for_heap_alloc (const bitmap &base_regs_in_use)
1988 : {
1989 : /* Try to reuse an existing region, if it's unreferenced in the
1990 : client state. */
1991 73809 : for (auto existing_reg : m_managed_dynamic_regions)
1992 35934 : if (!bitmap_bit_p (base_regs_in_use, existing_reg->get_id ()))
1993 17675 : if (existing_reg->get_kind () == RK_HEAP_ALLOCATED)
1994 : return existing_reg;
1995 :
1996 : /* All existing ones (if any) are in use; create a new one. */
1997 1703 : region *reg
1998 1703 : = new heap_allocated_region (alloc_symbol_id (), &m_heap_region);
1999 1703 : m_managed_dynamic_regions.safe_push (reg);
2000 1703 : return reg;
2001 : }
2002 :
2003 : /* Return a new region describing a block of memory allocated within FRAME. */
2004 :
2005 : const region *
2006 956 : region_model_manager::create_region_for_alloca (const frame_region *frame)
2007 : {
2008 956 : gcc_assert (frame);
2009 956 : region *reg = new alloca_region (alloc_symbol_id (), frame);
2010 956 : m_managed_dynamic_regions.safe_push (reg);
2011 956 : return reg;
2012 : }
2013 :
2014 : /* Log OBJ to LOGGER. */
2015 :
2016 : template <typename T>
2017 : static void
2018 211 : log_managed_object (logger *logger, const T *obj)
2019 : {
2020 211 : logger->start_log_line ();
2021 211 : pretty_printer *pp = logger->get_printer ();
2022 211 : pp_string (pp, " ");
2023 211 : obj->dump_to_pp (pp, true);
2024 211 : logger->end_log_line ();
2025 211 : }
2026 :
2027 : /* Specialization for frame_region, which also logs the count of locals
2028 : managed by the frame_region. */
2029 :
2030 : template <>
2031 : void
2032 6 : log_managed_object (logger *logger, const frame_region *obj)
2033 : {
2034 6 : logger->start_log_line ();
2035 6 : pretty_printer *pp = logger->get_printer ();
2036 6 : pp_string (pp, " ");
2037 6 : obj->dump_to_pp (pp, true);
2038 6 : pp_printf (pp, " [with %i region(s) for locals]", obj->get_num_locals ());
2039 6 : logger->end_log_line ();
2040 6 : }
2041 :
2042 : /* Dump the number of objects that were managed by UNIQ_MAP to LOGGER.
2043 : If SHOW_OBJS is true, also dump the objects themselves. */
2044 :
2045 : template <typename K, typename T>
2046 : static void
2047 105 : log_uniq_map (logger *logger, bool show_objs, const char *title,
2048 : const hash_map<K, T*> &uniq_map)
2049 : {
2050 105 : logger->log (" # %s: %li", title, (long)uniq_map.elements ());
2051 105 : if (!show_objs)
2052 0 : return;
2053 105 : auto_vec<const T *> vec_objs (uniq_map.elements ());
2054 105 : for (typename hash_map<K, T*>::iterator iter = uniq_map.begin ();
2055 376 : iter != uniq_map.end (); ++iter)
2056 166 : vec_objs.quick_push ((*iter).second);
2057 :
2058 271 : vec_objs.qsort (T::cmp_ptr_ptr);
2059 :
2060 : unsigned i;
2061 : const T *obj;
2062 306 : FOR_EACH_VEC_ELT (vec_objs, i, obj)
2063 166 : log_managed_object<T> (logger, obj);
2064 105 : }
2065 :
2066 : /* Dump the number of objects that were managed by MAP to LOGGER.
2067 : If SHOW_OBJS is true, also dump the objects themselves. */
2068 :
2069 : template <typename T>
2070 : static void
2071 55 : log_uniq_map (logger *logger, bool show_objs, const char *title,
2072 : const consolidation_map<T> &map)
2073 : {
2074 55 : logger->log (" # %s: %li", title, (long)map.elements ());
2075 55 : if (!show_objs)
2076 0 : return;
2077 :
2078 55 : auto_vec<const T *> vec_objs (map.elements ());
2079 55 : for (typename consolidation_map<T>::iterator iter = map.begin ();
2080 158 : iter != map.end (); ++iter)
2081 48 : vec_objs.quick_push ((*iter).second);
2082 :
2083 103 : vec_objs.qsort (T::cmp_ptr_ptr);
2084 :
2085 : unsigned i;
2086 : const T *obj;
2087 125 : FOR_EACH_VEC_ELT (vec_objs, i, obj)
2088 48 : log_managed_object<T> (logger, obj);
2089 55 : }
2090 :
2091 : /* Dump the number of objects of each class that were managed by this
2092 : manager to LOGGER.
2093 : If SHOW_OBJS is true, also dump the objects themselves. */
2094 :
2095 : void
2096 5 : region_model_manager::log_stats (logger *logger, bool show_objs) const
2097 : {
2098 5 : LOG_SCOPE (logger);
2099 5 : logger->log ("call string consolidation");
2100 5 : m_empty_call_string.recursive_log (logger);
2101 5 : logger->log ("next symbol id: %i", m_next_symbol_id);
2102 5 : logger->log ("svalue consolidation");
2103 5 : log_uniq_map (logger, show_objs, "constant_svalue", m_constants_map);
2104 5 : log_uniq_map (logger, show_objs, "unknown_svalue", m_unknowns_map);
2105 5 : if (m_unknown_NULL)
2106 3 : log_managed_object (logger, m_unknown_NULL);
2107 5 : log_uniq_map (logger, show_objs, "poisoned_svalue", m_poisoned_values_map);
2108 5 : log_uniq_map (logger, show_objs, "setjmp_svalue", m_setjmp_values_map);
2109 5 : log_uniq_map (logger, show_objs, "initial_svalue", m_initial_values_map);
2110 5 : log_uniq_map (logger, show_objs, "region_svalue", m_pointer_values_map);
2111 5 : log_uniq_map (logger, show_objs, "unaryop_svalue", m_unaryop_values_map);
2112 5 : log_uniq_map (logger, show_objs, "binop_svalue", m_binop_values_map);
2113 5 : log_uniq_map (logger, show_objs, "sub_svalue", m_sub_values_map);
2114 5 : log_uniq_map (logger, show_objs, "repeated_svalue", m_repeated_values_map);
2115 5 : log_uniq_map (logger, show_objs, "bits_within_svalue",
2116 5 : m_bits_within_values_map);
2117 5 : log_uniq_map (logger, show_objs, "unmergeable_svalue",
2118 5 : m_unmergeable_values_map);
2119 5 : log_uniq_map (logger, show_objs, "widening_svalue", m_widening_values_map);
2120 5 : log_uniq_map (logger, show_objs, "compound_svalue", m_compound_values_map);
2121 5 : log_uniq_map (logger, show_objs, "conjured_svalue", m_conjured_values_map);
2122 5 : log_uniq_map (logger, show_objs, "asm_output_svalue",
2123 5 : m_asm_output_values_map);
2124 5 : log_uniq_map (logger, show_objs, "const_fn_result_svalue",
2125 5 : m_const_fn_result_values_map);
2126 :
2127 5 : logger->log ("max accepted svalue num_nodes: %i",
2128 5 : m_max_complexity.m_num_nodes);
2129 5 : logger->log ("max accepted svalue max_depth: %i",
2130 5 : m_max_complexity.m_max_depth);
2131 :
2132 5 : logger->log ("region consolidation");
2133 5 : log_uniq_map (logger, show_objs, "function_region", m_fndecls_map);
2134 5 : log_uniq_map (logger, show_objs, "label_region", m_labels_map);
2135 5 : log_uniq_map (logger, show_objs, "decl_region for globals", m_globals_map);
2136 5 : log_uniq_map (logger, show_objs, "field_region", m_field_regions);
2137 5 : log_uniq_map (logger, show_objs, "element_region", m_element_regions);
2138 5 : log_uniq_map (logger, show_objs, "offset_region", m_offset_regions);
2139 5 : log_uniq_map (logger, show_objs, "sized_region", m_sized_regions);
2140 5 : log_uniq_map (logger, show_objs, "cast_region", m_cast_regions);
2141 5 : log_uniq_map (logger, show_objs, "frame_region", m_frame_regions);
2142 5 : log_uniq_map (logger, show_objs, "symbolic_region", m_symbolic_regions);
2143 5 : log_uniq_map (logger, show_objs, "string_region", m_string_map);
2144 5 : log_uniq_map (logger, show_objs, "bit_range_region", m_bit_range_regions);
2145 5 : log_uniq_map (logger, show_objs, "var_arg_region", m_var_arg_regions);
2146 5 : logger->log (" # managed dynamic regions: %i",
2147 : m_managed_dynamic_regions.length ());
2148 5 : m_store_mgr.log_stats (logger, show_objs);
2149 5 : m_range_mgr->log_stats (logger, show_objs);
2150 5 : }
2151 :
2152 : /* Dump the number of objects of each class that were managed by this
2153 : manager to LOGGER.
2154 : If SHOW_OBJS is true, also dump the objects themselves.
2155 : This is here so it can use log_uniq_map. */
2156 :
2157 : void
2158 5 : store_manager::log_stats (logger *logger, bool show_objs) const
2159 : {
2160 5 : LOG_SCOPE (logger);
2161 5 : log_uniq_map (logger, show_objs, "concrete_binding",
2162 5 : m_concrete_binding_key_mgr);
2163 5 : log_uniq_map (logger, show_objs, "symbolic_binding",
2164 5 : m_symbolic_binding_key_mgr);
2165 5 : }
2166 :
2167 : /* Emit a warning showing DECL_REG->tracked_p () for use in DejaGnu tests
2168 : (using -fdump-analyzer-untracked). */
2169 :
2170 : static void
2171 162 : dump_untracked_region (const decl_region *decl_reg)
2172 : {
2173 162 : tree decl = decl_reg->get_decl ();
2174 162 : if (TREE_CODE (decl) != VAR_DECL)
2175 : return;
2176 : /* For now, don't emit the status of decls in the constant pool, to avoid
2177 : differences in DejaGnu test results between targets that use these vs
2178 : those that don't.
2179 : (Eventually these decls should probably be untracked and we should test
2180 : for that, but that's not stage 4 material). */
2181 56 : if (DECL_IN_CONSTANT_POOL (decl))
2182 : return;
2183 85 : warning_at (DECL_SOURCE_LOCATION (decl), 0,
2184 : "track %qD: %s",
2185 56 : decl, (decl_reg->tracked_p () ? "yes" : "no"));
2186 : }
2187 :
2188 : /* Implementation of -fdump-analyzer-untracked. */
2189 :
2190 : void
2191 23 : region_model_manager::dump_untracked_regions () const
2192 : {
2193 74 : for (auto iter : m_globals_map)
2194 : {
2195 51 : const decl_region *decl_reg = iter.second;
2196 51 : dump_untracked_region (decl_reg);
2197 : }
2198 75 : for (auto frame_iter : m_frame_regions)
2199 : {
2200 52 : const frame_region *frame_reg = frame_iter.second;
2201 52 : frame_reg->dump_untracked_regions ();
2202 : }
2203 23 : }
2204 :
2205 : void
2206 52 : frame_region::dump_untracked_regions () const
2207 : {
2208 163 : for (auto iter : m_locals)
2209 : {
2210 111 : const decl_region *decl_reg = iter.second;
2211 111 : dump_untracked_region (decl_reg);
2212 : }
2213 52 : }
2214 :
2215 : } // namespace ana
2216 :
2217 : #endif /* #if ENABLE_ANALYZER */
|