Line data Source code
1 : /* Consolidation of svalues and regions.
2 : Copyright (C) 2020-2026 Free Software Foundation, Inc.
3 : Contributed by David Malcolm <dmalcolm@redhat.com>.
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify it
8 : under the terms of the GNU General Public License as published by
9 : the Free Software Foundation; either version 3, or (at your option)
10 : any later version.
11 :
12 : GCC is distributed in the hope that it will be useful, but
13 : WITHOUT ANY WARRANTY; without even the implied warranty of
14 : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 : General Public License for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 : #include "analyzer/common.h"
22 :
23 : #include "fold-const.h"
24 : #include "ordered-hash-map.h"
25 : #include "options.h"
26 : #include "analyzer/supergraph.h"
27 : #include "sbitmap.h"
28 : #include "target.h"
29 :
30 : #include "analyzer/analyzer-logging.h"
31 : #include "analyzer/call-string.h"
32 : #include "analyzer/program-point.h"
33 : #include "analyzer/store.h"
34 : #include "analyzer/region-model.h"
35 : #include "analyzer/constraint-manager.h"
36 :
37 : #if ENABLE_ANALYZER
38 :
39 : namespace ana {
40 :
41 : /* class region_model_manager. */
42 :
43 : /* region_model_manager's ctor. */
44 :
45 3937 : region_model_manager::region_model_manager (logger *logger)
46 3937 : : m_logger (logger),
47 3937 : m_next_symbol_id (0),
48 3937 : m_empty_call_string (),
49 3937 : m_root_region (alloc_symbol_id ()),
50 3937 : m_stack_region (alloc_symbol_id (), &m_root_region),
51 3937 : m_heap_region (alloc_symbol_id (), &m_root_region),
52 3937 : m_unknown_NULL (nullptr),
53 3937 : m_checking_feasibility (false),
54 3937 : m_max_complexity (0, 0),
55 3937 : m_code_region (alloc_symbol_id (), &m_root_region),
56 3937 : m_fndecls_map (), m_labels_map (),
57 3937 : m_globals_region (alloc_symbol_id (), &m_root_region),
58 3937 : m_globals_map (),
59 3937 : m_thread_local_region (alloc_symbol_id (), &m_root_region),
60 3937 : m_errno_region (alloc_symbol_id (), &m_thread_local_region),
61 3937 : m_store_mgr (this),
62 3937 : m_range_mgr (new bounded_ranges_manager ()),
63 11811 : m_known_fn_mgr (logger)
64 : {
65 3937 : }
66 :
67 : /* region_model_manager's dtor. Delete all of the managed svalues
68 : and regions. */
69 :
70 7874 : region_model_manager::~region_model_manager ()
71 : {
72 : /* Delete consolidated svalues. */
73 59024 : for (constants_map_t::iterator iter = m_constants_map.begin ();
74 114111 : iter != m_constants_map.end (); ++iter)
75 55087 : delete (*iter).second;
76 12443 : for (unknowns_map_t::iterator iter = m_unknowns_map.begin ();
77 20949 : iter != m_unknowns_map.end (); ++iter)
78 8506 : delete (*iter).second;
79 3937 : delete m_unknown_NULL;
80 9850 : for (poisoned_values_map_t::iterator iter = m_poisoned_values_map.begin ();
81 15763 : iter != m_poisoned_values_map.end (); ++iter)
82 5913 : delete (*iter).second;
83 3971 : for (setjmp_values_map_t::iterator iter = m_setjmp_values_map.begin ();
84 4005 : iter != m_setjmp_values_map.end (); ++iter)
85 34 : delete (*iter).second;
86 30298 : for (initial_values_map_t::iterator iter = m_initial_values_map.begin ();
87 56659 : iter != m_initial_values_map.end (); ++iter)
88 26361 : delete (*iter).second;
89 28593 : for (pointer_values_map_t::iterator iter = m_pointer_values_map.begin ();
90 53249 : iter != m_pointer_values_map.end (); ++iter)
91 24656 : delete (*iter).second;
92 12157 : for (unaryop_values_map_t::iterator iter = m_unaryop_values_map.begin ();
93 20377 : iter != m_unaryop_values_map.end (); ++iter)
94 8220 : delete (*iter).second;
95 24179 : for (binop_values_map_t::iterator iter = m_binop_values_map.begin ();
96 44421 : iter != m_binop_values_map.end (); ++iter)
97 20242 : delete (*iter).second;
98 6268 : for (sub_values_map_t::iterator iter = m_sub_values_map.begin ();
99 8599 : iter != m_sub_values_map.end (); ++iter)
100 2331 : delete (*iter).second;
101 4961 : for (auto iter : m_repeated_values_map)
102 512 : delete iter.second;
103 5481 : for (auto iter : m_bits_within_values_map)
104 772 : delete iter.second;
105 4141 : for (unmergeable_values_map_t::iterator iter
106 3937 : = m_unmergeable_values_map.begin ();
107 4345 : iter != m_unmergeable_values_map.end (); ++iter)
108 204 : delete (*iter).second;
109 6298 : for (widening_values_map_t::iterator iter = m_widening_values_map.begin ();
110 8659 : iter != m_widening_values_map.end (); ++iter)
111 2361 : delete (*iter).second;
112 4548 : for (compound_values_map_t::iterator iter = m_compound_values_map.begin ();
113 5159 : iter != m_compound_values_map.end (); ++iter)
114 611 : delete (*iter).second;
115 29767 : for (conjured_values_map_t::iterator iter = m_conjured_values_map.begin ();
116 55597 : iter != m_conjured_values_map.end (); ++iter)
117 25830 : delete (*iter).second;
118 4311 : for (auto iter : m_asm_output_values_map)
119 187 : delete iter.second;
120 4311 : for (auto iter : m_const_fn_result_values_map)
121 187 : delete iter.second;
122 :
123 : /* Delete consolidated regions. */
124 13322 : for (fndecls_map_t::iterator iter = m_fndecls_map.begin ();
125 22707 : iter != m_fndecls_map.end (); ++iter)
126 9385 : delete (*iter).second;
127 3983 : for (labels_map_t::iterator iter = m_labels_map.begin ();
128 4029 : iter != m_labels_map.end (); ++iter)
129 46 : delete (*iter).second;
130 10307 : for (globals_map_t::iterator iter = m_globals_map.begin ();
131 16677 : iter != m_globals_map.end (); ++iter)
132 6370 : delete (*iter).second;
133 7685 : for (string_map_t::iterator iter = m_string_map.begin ();
134 11433 : iter != m_string_map.end (); ++iter)
135 3748 : delete (*iter).second;
136 :
137 3937 : delete m_range_mgr;
138 7874 : }
139 :
140 : /* Return true if C exceeds the complexity limit for svalues. */
141 :
142 : bool
143 168228 : region_model_manager::too_complex_p (const complexity &c) const
144 : {
145 168228 : if (c.m_max_depth > (unsigned)param_analyzer_max_svalue_depth)
146 1987 : return true;
147 : return false;
148 : }
149 :
150 : /* If SVAL exceeds the complexity limit for svalues, delete it
151 : and return true.
152 : Otherwise update m_max_complexity and return false. */
153 :
154 : bool
155 175495 : region_model_manager::reject_if_too_complex (svalue *sval)
156 : {
157 175495 : if (m_checking_feasibility)
158 : return false;
159 :
160 168228 : const complexity &c = sval->get_complexity ();
161 168228 : if (!too_complex_p (c))
162 : {
163 166241 : if (m_max_complexity.m_num_nodes < c.m_num_nodes)
164 11584 : m_max_complexity.m_num_nodes = c.m_num_nodes;
165 166241 : if (m_max_complexity.m_max_depth < c.m_max_depth)
166 10634 : m_max_complexity.m_max_depth = c.m_max_depth;
167 166241 : return false;
168 : }
169 :
170 1987 : pretty_printer pp;
171 1987 : pp_format_decoder (&pp) = default_tree_printer;
172 1987 : sval->dump_to_pp (&pp, true);
173 1987 : if (warning_at (input_location, OPT_Wanalyzer_symbol_too_complex,
174 : "symbol too complicated: %qs",
175 : pp_formatted_text (&pp)))
176 2 : inform (input_location,
177 : "max_depth %i exceeds --param=analyzer-max-svalue-depth=%i",
178 2 : c.m_max_depth, param_analyzer_max_svalue_depth);
179 :
180 1987 : delete sval;
181 1987 : return true;
182 1987 : }
183 :
184 : /* Macro for imposing a complexity limit on svalues, for use within
185 : region_model_manager member functions.
186 :
187 : If SVAL exceeds the complexity limit, delete it and return an UNKNOWN
188 : value of the same type.
189 : Otherwise update m_max_complexity and carry on. */
190 :
191 : #define RETURN_UNKNOWN_IF_TOO_COMPLEX(SVAL) \
192 : do { \
193 : svalue *sval_ = (SVAL); \
194 : tree type_ = sval_->get_type (); \
195 : if (reject_if_too_complex (sval_)) \
196 : return get_or_create_unknown_svalue (type_); \
197 : } while (0)
198 :
199 : /* svalue consolidation. */
200 :
201 : /* Return the svalue * for a constant_svalue for CST_EXPR,
202 : creating it if necessary.
203 : The constant_svalue instances are reused, based on pointer equality
204 : of trees */
205 :
206 : const svalue *
207 2510142 : region_model_manager::get_or_create_constant_svalue (tree type, tree cst_expr)
208 : {
209 2510142 : gcc_assert (cst_expr);
210 2510142 : gcc_assert (CONSTANT_CLASS_P (cst_expr));
211 2510142 : gcc_assert (type == TREE_TYPE (cst_expr) || type == NULL_TREE);
212 :
213 2510142 : constant_svalue::key_t key (type, cst_expr);
214 2510142 : constant_svalue **slot = m_constants_map.get (key);
215 2510142 : if (slot)
216 2454251 : return *slot;
217 55891 : constant_svalue *cst_sval
218 55891 : = new constant_svalue (alloc_symbol_id (), type, cst_expr);
219 55891 : RETURN_UNKNOWN_IF_TOO_COMPLEX (cst_sval);
220 55087 : m_constants_map.put (key, cst_sval);
221 55087 : return cst_sval;
222 : }
223 :
224 : const svalue *
225 1610912 : region_model_manager::get_or_create_constant_svalue (tree cst_expr)
226 : {
227 1610912 : tree type = TREE_TYPE (cst_expr);
228 1610912 : if (TREE_CODE (cst_expr) == RAW_DATA_CST)
229 : /* The type of a RAW_DATA_CST is the type of each element, rather than
230 : that of the constant as a whole, so use NULL_TREE for simplicity. */
231 14 : type = NULL_TREE;
232 1610912 : return get_or_create_constant_svalue (type, cst_expr);
233 : }
234 :
235 : /* Return the svalue * for a constant_svalue for the INTEGER_CST
236 : for VAL of type TYPE, creating it if necessary. */
237 :
238 : const svalue *
239 862766 : region_model_manager::get_or_create_int_cst (tree type,
240 : const poly_wide_int_ref &cst)
241 : {
242 862766 : tree effective_type = type;
243 862766 : if (!type)
244 6717 : effective_type = ptrdiff_type_node;
245 862766 : gcc_assert (INTEGRAL_TYPE_P (effective_type)
246 : || POINTER_TYPE_P (effective_type));
247 862766 : tree tree_cst = wide_int_to_tree (effective_type, cst);
248 862766 : return get_or_create_constant_svalue (type, tree_cst);
249 : }
250 :
251 : /* Return the svalue * for the constant_svalue for the NULL pointer
252 : of POINTER_TYPE, creating it if necessary. */
253 :
254 : const svalue *
255 1229 : region_model_manager::get_or_create_null_ptr (tree pointer_type)
256 : {
257 1229 : gcc_assert (pointer_type);
258 1229 : gcc_assert (POINTER_TYPE_P (pointer_type));
259 1229 : return get_or_create_int_cst (pointer_type, 0);
260 : }
261 :
262 : /* Return the svalue * for a unknown_svalue for TYPE (which can be NULL_TREE),
263 : creating it if necessary.
264 : The unknown_svalue instances are reused, based on pointer equality
265 : of the types */
266 :
267 : const svalue *
268 837359 : region_model_manager::get_or_create_unknown_svalue (tree type)
269 : {
270 : /* Don't create unknown values when doing feasibility testing;
271 : instead, create a unique svalue. */
272 837359 : if (m_checking_feasibility)
273 12367 : return create_unique_svalue (type);
274 :
275 : /* Special-case NULL, so that the hash_map can use NULL as the
276 : "empty" value. */
277 824992 : if (type == NULL_TREE)
278 : {
279 117556 : if (!m_unknown_NULL)
280 1711 : m_unknown_NULL = new unknown_svalue (alloc_symbol_id (), type);
281 117556 : return m_unknown_NULL;
282 : }
283 :
284 707436 : unknown_svalue **slot = m_unknowns_map.get (type);
285 707436 : if (slot)
286 698930 : return *slot;
287 8506 : unknown_svalue *sval = new unknown_svalue (alloc_symbol_id (), type);
288 8506 : m_unknowns_map.put (type, sval);
289 8506 : return sval;
290 : }
291 :
292 : /* Return a freshly-allocated svalue of TYPE, owned by this manager. */
293 :
294 : const svalue *
295 12367 : region_model_manager::create_unique_svalue (tree type)
296 : {
297 12367 : svalue *sval = new placeholder_svalue (alloc_symbol_id (), type, "unique");
298 12367 : m_managed_dynamic_svalues.safe_push (sval);
299 12367 : return sval;
300 : }
301 :
302 : /* Return the svalue * for the initial value of REG, creating it if
303 : necessary. */
304 :
305 : const svalue *
306 3077152 : region_model_manager::get_or_create_initial_value (const region *reg,
307 : bool check_poisoned)
308 : {
309 3077152 : if (!reg->can_have_initial_svalue_p () && check_poisoned)
310 191333 : return get_or_create_poisoned_svalue (poison_kind::uninit,
311 191333 : reg->get_type ());
312 :
313 : /* The initial value of a cast is a cast of the initial value. */
314 2885819 : if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
315 : {
316 2225 : const region *original_reg = cast_reg->get_parent_region ();
317 2225 : return get_or_create_cast (cast_reg->get_type (),
318 2225 : get_or_create_initial_value (original_reg));
319 : }
320 :
321 : /* Simplify:
322 : INIT_VAL(ELEMENT_REG(STRING_REG), CONSTANT_SVAL)
323 : to:
324 : CONSTANT_SVAL(STRING[N]). */
325 2883594 : if (const element_region *element_reg = reg->dyn_cast_element_region ())
326 7193 : if (tree cst_idx = element_reg->get_index ()->maybe_get_constant ())
327 8296 : if (const string_region *string_reg
328 4148 : = element_reg->get_parent_region ()->dyn_cast_string_region ())
329 702 : if (tree_fits_shwi_p (cst_idx))
330 : {
331 702 : HOST_WIDE_INT idx = tree_to_shwi (cst_idx);
332 702 : tree string_cst = string_reg->get_string_cst ();
333 1404 : if (idx >= 0 && idx <= TREE_STRING_LENGTH (string_cst))
334 : {
335 702 : int ch = TREE_STRING_POINTER (string_cst)[idx];
336 702 : return get_or_create_int_cst (reg->get_type (), ch);
337 : }
338 : }
339 :
340 : /* INIT_VAL (*UNKNOWN_PTR) -> UNKNOWN_VAL. */
341 2882892 : if (reg->symbolic_for_unknown_ptr_p ())
342 7450 : return get_or_create_unknown_svalue (reg->get_type ());
343 :
344 2875442 : if (initial_svalue **slot = m_initial_values_map.get (reg))
345 2848692 : return *slot;
346 26750 : initial_svalue *initial_sval
347 26750 : = new initial_svalue (alloc_symbol_id (), reg->get_type (), reg);
348 26750 : RETURN_UNKNOWN_IF_TOO_COMPLEX (initial_sval);
349 26361 : m_initial_values_map.put (reg, initial_sval);
350 26361 : return initial_sval;
351 : }
352 :
353 : /* Return the svalue * for R using type TYPE, creating it if
354 : necessary. */
355 :
356 : const svalue *
357 34 : region_model_manager::get_or_create_setjmp_svalue (const setjmp_record &r,
358 : tree type)
359 : {
360 34 : setjmp_svalue::key_t key (r, type);
361 34 : if (setjmp_svalue **slot = m_setjmp_values_map.get (key))
362 0 : return *slot;
363 34 : setjmp_svalue *setjmp_sval = new setjmp_svalue (r, alloc_symbol_id (), type);
364 34 : RETURN_UNKNOWN_IF_TOO_COMPLEX (setjmp_sval);
365 34 : m_setjmp_values_map.put (key, setjmp_sval);
366 34 : return setjmp_sval;
367 : }
368 :
369 : /* Return the svalue * for a poisoned value of KIND and TYPE, creating it if
370 : necessary. */
371 :
372 : const svalue *
373 198516 : region_model_manager::get_or_create_poisoned_svalue (enum poison_kind kind,
374 : tree type)
375 : {
376 198516 : poisoned_svalue::key_t key (kind, type);
377 198516 : if (poisoned_svalue **slot = m_poisoned_values_map.get (key))
378 192603 : return *slot;
379 5913 : poisoned_svalue *poisoned_sval
380 5913 : = new poisoned_svalue (kind, alloc_symbol_id (), type);
381 5913 : RETURN_UNKNOWN_IF_TOO_COMPLEX (poisoned_sval);
382 5913 : m_poisoned_values_map.put (key, poisoned_sval);
383 5913 : return poisoned_sval;
384 : }
385 :
386 : /* Return the svalue * for a pointer to POINTEE of type PTR_TYPE,
387 : creating it if necessary. */
388 :
389 : const svalue *
390 962857 : region_model_manager::get_ptr_svalue (tree ptr_type, const region *pointee)
391 : {
392 : /* If this is a symbolic region from dereferencing a pointer, and the types
393 : match, then return the original pointer. */
394 962857 : if (const symbolic_region *sym_reg = pointee->dyn_cast_symbolic_region ())
395 159 : if (ptr_type == sym_reg->get_pointer ()->get_type ())
396 : return sym_reg->get_pointer ();
397 :
398 962698 : region_svalue::key_t key (ptr_type, pointee);
399 962698 : if (region_svalue **slot = m_pointer_values_map.get (key))
400 937530 : return *slot;
401 25168 : region_svalue *sval
402 25168 : = new region_svalue (alloc_symbol_id (), ptr_type, pointee);
403 25168 : RETURN_UNKNOWN_IF_TOO_COMPLEX (sval);
404 24656 : m_pointer_values_map.put (key, sval);
405 24656 : return sval;
406 : }
407 :
408 : /* Subroutine of region_model_manager::get_or_create_unaryop.
409 : Attempt to fold the inputs and return a simpler svalue *.
410 : Otherwise, return nullptr. */
411 :
412 : const svalue *
413 314473 : region_model_manager::maybe_fold_unaryop (tree type, enum tree_code op,
414 : const svalue *arg)
415 : {
416 : /* Ops on "unknown" are also unknown. */
417 324051 : if (arg->get_kind () == SK_UNKNOWN)
418 42017 : return get_or_create_unknown_svalue (type);
419 : /* Likewise for "poisoned". */
420 564068 : else if (const poisoned_svalue *poisoned_sval
421 282034 : = arg->dyn_cast_poisoned_svalue ())
422 1356 : return get_or_create_poisoned_svalue (poisoned_sval->get_poison_kind (),
423 1356 : type);
424 :
425 280678 : gcc_assert (arg->can_have_associated_state_p ());
426 :
427 280678 : switch (op)
428 : {
429 : default: break;
430 279558 : case VIEW_CONVERT_EXPR:
431 279558 : case NOP_EXPR:
432 279558 : {
433 279558 : if (!type)
434 : return nullptr;
435 :
436 : /* Handle redundant casts. */
437 278663 : if (arg->get_type ()
438 278663 : && useless_type_conversion_p (arg->get_type (), type))
439 : return arg;
440 :
441 : /* Fold "cast<TYPE> (cast <INNER_TYPE> (innermost_arg))
442 : => "cast<TYPE> (innermost_arg)",
443 : unless INNER_TYPE is narrower than TYPE. */
444 145577 : if (const svalue *innermost_arg = arg->maybe_undo_cast ())
445 : {
446 21683 : if (tree inner_type = arg->get_type ())
447 21563 : if (TYPE_SIZE (type)
448 21563 : && TYPE_SIZE (inner_type)
449 43126 : && (fold_binary (LE_EXPR, boolean_type_node,
450 : TYPE_SIZE (type), TYPE_SIZE (inner_type))
451 21563 : == boolean_true_node))
452 : return maybe_fold_unaryop (type, op, innermost_arg);
453 : }
454 : /* Avoid creating symbolic regions for pointer casts by
455 : simplifying (T*)(®ION) to ((T*)®ION). */
456 135999 : if (const region_svalue *region_sval = arg->dyn_cast_region_svalue ())
457 4010 : if (POINTER_TYPE_P (type)
458 312 : && region_sval->get_type ()
459 4155 : && POINTER_TYPE_P (region_sval->get_type ()))
460 55 : return get_ptr_svalue (type, region_sval->get_pointee ());
461 :
462 : /* Casting all zeroes should give all zeroes. */
463 135944 : if (type
464 135944 : && arg->all_zeroes_p ()
465 5984 : && (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)))
466 5351 : return get_or_create_int_cst (type, 0);
467 : }
468 : break;
469 8 : case TRUTH_NOT_EXPR:
470 8 : {
471 : /* Invert comparisons e.g. "!(x == y)" => "x != y". */
472 8 : if (const binop_svalue *binop = arg->dyn_cast_binop_svalue ())
473 8 : if (TREE_CODE_CLASS (binop->get_op ()) == tcc_comparison)
474 : {
475 8 : enum tree_code inv_op
476 8 : = invert_tree_comparison (binop->get_op (),
477 8 : HONOR_NANS (binop->get_type ()));
478 8 : if (inv_op != ERROR_MARK)
479 8 : return get_or_create_binop (binop->get_type (), inv_op,
480 : binop->get_arg0 (),
481 8 : binop->get_arg1 ());
482 : }
483 : }
484 : break;
485 513 : case NEGATE_EXPR:
486 513 : {
487 : /* -(-(VAL)) is VAL, for integer types. */
488 513 : if (const unaryop_svalue *unaryop = arg->dyn_cast_unaryop_svalue ())
489 10 : if (unaryop->get_op () == NEGATE_EXPR
490 1 : && type == unaryop->get_type ()
491 1 : && type
492 11 : && INTEGRAL_TYPE_P (type))
493 1 : return unaryop->get_arg ();
494 : }
495 : break;
496 : }
497 :
498 : /* Constants. */
499 131704 : if (type)
500 131488 : if (tree cst = arg->maybe_get_constant ())
501 17667 : if (tree result = fold_unary (op, type, cst))
502 : {
503 16853 : if (CONSTANT_CLASS_P (result))
504 16853 : return get_or_create_constant_svalue (result);
505 :
506 : /* fold_unary can return casts of constants; try to handle them. */
507 0 : if (op != NOP_EXPR
508 0 : && TREE_CODE (result) == NOP_EXPR
509 0 : && CONSTANT_CLASS_P (TREE_OPERAND (result, 0)))
510 : {
511 0 : const svalue *inner_cst
512 0 : = get_or_create_constant_svalue (TREE_OPERAND (result, 0));
513 0 : return get_or_create_cast (type,
514 0 : get_or_create_cast (TREE_TYPE (result),
515 0 : inner_cst));
516 : }
517 : }
518 :
519 : return nullptr;
520 : }
521 :
522 : /* Return the svalue * for an unary operation OP on ARG with a result of
523 : type TYPE, creating it if necessary. */
524 :
525 : const svalue *
526 314473 : region_model_manager::get_or_create_unaryop (tree type, enum tree_code op,
527 : const svalue *arg)
528 : {
529 314473 : if (const svalue *folded = maybe_fold_unaryop (type, op, arg))
530 : return folded;
531 115746 : unaryop_svalue::key_t key (type, op, arg);
532 115746 : if (unaryop_svalue **slot = m_unaryop_values_map.get (key))
533 107424 : return *slot;
534 8322 : unaryop_svalue *unaryop_sval
535 8322 : = new unaryop_svalue (alloc_symbol_id (), type, op, arg);
536 8322 : RETURN_UNKNOWN_IF_TOO_COMPLEX (unaryop_sval);
537 8220 : m_unaryop_values_map.put (key, unaryop_sval);
538 8220 : return unaryop_sval;
539 : }
540 :
541 : /* Get a tree code for a cast to DST_TYPE from SRC_TYPE.
542 : Use NOP_EXPR if possible (e.g. to help fold_unary convert casts
543 : of 0 to (T*) to simple pointer constants), but use FIX_TRUNC_EXPR
544 : and VIEW_CONVERT_EXPR for cases that fold_unary would otherwise crash
545 : on. */
546 :
547 : static enum tree_code
548 240975 : get_code_for_cast (tree dst_type, tree src_type)
549 : {
550 0 : if (!dst_type)
551 : return NOP_EXPR;
552 0 : if (!src_type)
553 : return NOP_EXPR;
554 :
555 236128 : if (SCALAR_FLOAT_TYPE_P (src_type))
556 : {
557 352 : if (TREE_CODE (dst_type) == INTEGER_TYPE)
558 : return FIX_TRUNC_EXPR;
559 : else
560 348 : return VIEW_CONVERT_EXPR;
561 : }
562 :
563 : return NOP_EXPR;
564 : }
565 :
566 : /* Return the svalue * for a cast of ARG to type TYPE, creating it
567 : if necessary. */
568 :
569 : const svalue *
570 1211041 : region_model_manager::get_or_create_cast (tree type, const svalue *arg)
571 : {
572 : /* No-op if the types are the same. */
573 1211041 : if (type == arg->get_type ())
574 : return arg;
575 :
576 : /* Don't attempt to handle casts involving vector types for now. */
577 250496 : if (type)
578 241063 : if (VECTOR_TYPE_P (type)
579 241063 : || (arg->get_type ()
580 236128 : && VECTOR_TYPE_P (arg->get_type ())))
581 88 : return get_or_create_unknown_svalue (type);
582 :
583 240975 : enum tree_code op = get_code_for_cast (type, arg->get_type ());
584 250408 : return get_or_create_unaryop (type, op, arg);
585 : }
586 :
587 : /* Subroutine of region_model_manager::maybe_fold_binop for handling
588 : (TYPE)(COMPOUND_SVAL BIT_AND_EXPR CST) that may have been generated by
589 : optimize_bit_field_compare, where CST is from ARG1.
590 :
591 : Support masking out bits from a compound_svalue for comparing a bitfield
592 : against a value, as generated by optimize_bit_field_compare for
593 : BITFIELD == VALUE.
594 :
595 : If COMPOUND_SVAL has a value for the appropriate bits, return it,
596 : shifted accordingly.
597 : Otherwise return nullptr. */
598 :
599 : const svalue *
600 58 : region_model_manager::
601 : maybe_undo_optimize_bit_field_compare (tree type,
602 : const compound_svalue *compound_sval,
603 : tree cst,
604 : const svalue *arg1)
605 : {
606 58 : if (!type)
607 : return nullptr;
608 58 : if (!INTEGRAL_TYPE_P (type))
609 : return nullptr;
610 :
611 56 : const binding_map &map = compound_sval->get_map ();
612 56 : unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (cst);
613 : /* If "mask" is a contiguous range of set bits, see if the
614 : compound_sval has a value for those bits. */
615 56 : bit_range bits (0, 0);
616 56 : if (!bit_range::from_mask (mask, &bits))
617 : return nullptr;
618 :
619 56 : bit_range bound_bits (bits);
620 56 : if (BYTES_BIG_ENDIAN)
621 : bound_bits = bit_range (BITS_PER_UNIT - bits.get_next_bit_offset (),
622 : bits.m_size_in_bits);
623 56 : const concrete_binding *conc
624 56 : = get_store_manager ()->get_concrete_binding (bound_bits);
625 56 : const svalue *sval = map.get (conc);
626 56 : if (!sval)
627 : return nullptr;
628 :
629 : /* We have a value;
630 : shift it by the correct number of bits. */
631 56 : const svalue *lhs = get_or_create_cast (type, sval);
632 56 : HOST_WIDE_INT bit_offset = bits.get_start_bit_offset ().to_shwi ();
633 56 : const svalue *shift_sval = get_or_create_int_cst (type, bit_offset);
634 56 : const svalue *shifted_sval = get_or_create_binop (type, LSHIFT_EXPR,
635 : lhs, shift_sval);
636 : /* Reapply the mask (needed for negative
637 : signed bitfields). */
638 56 : return get_or_create_binop (type, BIT_AND_EXPR,
639 56 : shifted_sval, arg1);
640 : }
641 :
642 : /* Subroutine of region_model_manager::get_or_create_binop.
643 : Attempt to fold the inputs and return a simpler svalue *.
644 : Otherwise, return nullptr. */
645 :
646 : const svalue *
647 227288 : region_model_manager::maybe_fold_binop (tree type, enum tree_code op,
648 : const svalue *arg0,
649 : const svalue *arg1)
650 : {
651 227288 : tree cst0 = arg0->maybe_get_constant ();
652 227288 : tree cst1 = arg1->maybe_get_constant ();
653 : /* (CST OP CST). */
654 227288 : if (cst0 && cst1)
655 : {
656 76228 : if (type)
657 : {
658 39764 : if (tree result = fold_binary (op, type, cst0, cst1))
659 39730 : if (CONSTANT_CLASS_P (result))
660 39728 : return get_or_create_constant_svalue (result);
661 : }
662 : else
663 : {
664 36464 : if (tree result = int_const_binop (op, cst0, cst1, -1))
665 36464 : return get_or_create_constant_svalue (NULL_TREE, result);
666 : }
667 : }
668 :
669 104262 : if ((type && FLOAT_TYPE_P (type))
670 150645 : || (arg0->get_type () && FLOAT_TYPE_P (arg0->get_type ()))
671 301689 : || (arg1->get_type () && FLOAT_TYPE_P (arg1->get_type ())))
672 : return nullptr;
673 :
674 150583 : switch (op)
675 : {
676 : default:
677 : break;
678 95352 : case POINTER_PLUS_EXPR:
679 95352 : case PLUS_EXPR:
680 : /* (VAL + 0) -> VAL. */
681 95352 : if (cst1 && zerop (cst1))
682 15000 : return get_or_create_cast (type, arg0);
683 : /* X + (-X) -> 0. */
684 80352 : if (const unaryop_svalue *unary_op = arg1->dyn_cast_unaryop_svalue ())
685 1952 : if (unary_op->get_op () == NEGATE_EXPR
686 780 : && unary_op->get_arg () == arg0
687 2074 : && type && (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)))
688 120 : return get_or_create_int_cst (type, 0);
689 : /* X + (Y - X) -> Y. */
690 80232 : if (const binop_svalue *bin_op = arg1->dyn_cast_binop_svalue ())
691 8773 : if (bin_op->get_op () == MINUS_EXPR)
692 203 : if (bin_op->get_arg1 () == arg0)
693 8 : return get_or_create_cast (type, bin_op->get_arg0 ());
694 : break;
695 3879 : case MINUS_EXPR:
696 : /* (VAL - 0) -> VAL. */
697 3879 : if (cst1 && zerop (cst1))
698 47 : return get_or_create_cast (type, arg0);
699 : /* (0 - VAL) -> -VAL. */
700 3832 : if (cst0 && zerop (cst0))
701 17 : return get_or_create_unaryop (type, NEGATE_EXPR, arg1);
702 : /* (X + Y) - X -> Y. */
703 3815 : if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
704 860 : if (binop->get_op () == PLUS_EXPR)
705 125 : if (binop->get_arg0 () == arg1)
706 15 : return get_or_create_cast (type, binop->get_arg1 ());
707 : break;
708 25734 : case MULT_EXPR:
709 : /* (VAL * 0). */
710 25734 : if (cst1
711 25134 : && zerop (cst1)
712 25798 : && (type == NULL_TREE || INTEGRAL_TYPE_P (type)))
713 56 : return get_or_create_int_cst (type, 0);
714 : /* (VAL * 1) -> VAL. */
715 25678 : if (cst1 && integer_onep (cst1))
716 304 : return get_or_create_cast (type, arg0);
717 : break;
718 3075 : case BIT_AND_EXPR:
719 3075 : if (cst1)
720 : {
721 2552 : if (zerop (cst1)
722 2552 : && (type == NULL_TREE || INTEGRAL_TYPE_P (type)))
723 : /* "(ARG0 & 0)" -> "0". */
724 58 : return get_or_create_int_cst (type, 0);
725 :
726 4988 : if (const compound_svalue *compound_sval
727 2494 : = arg0->dyn_cast_compound_svalue ())
728 116 : if (const svalue *sval
729 58 : = maybe_undo_optimize_bit_field_compare (type,
730 : compound_sval,
731 : cst1, arg1))
732 : return sval;
733 : }
734 2961 : if (arg0->get_type () == boolean_type_node
735 2961 : && arg1->get_type () == boolean_type_node)
736 : {
737 : /* If the LHS are both _Bool, then... */
738 : /* ..."(1 & x) -> x". */
739 447 : if (cst0 && !zerop (cst0))
740 0 : return get_or_create_cast (type, arg1);
741 : /* ..."(x & 1) -> x". */
742 447 : if (cst1 && !zerop (cst1))
743 159 : return get_or_create_cast (type, arg0);
744 : /* ..."(0 & x) -> 0". */
745 288 : if (cst0 && zerop (cst0))
746 0 : return get_or_create_int_cst (type, 0);
747 : /* ..."(x & 0) -> 0". */
748 288 : if (cst1 && zerop (cst1))
749 0 : return get_or_create_int_cst (type, 0);
750 : }
751 : break;
752 3168 : case BIT_IOR_EXPR:
753 3168 : if (arg0->get_type () == boolean_type_node
754 3168 : && arg1->get_type () == boolean_type_node)
755 : {
756 : /* If the LHS are both _Bool, then... */
757 : /* ..."(1 | x) -> 1". */
758 117 : if (cst0 && !zerop (cst0))
759 0 : return get_or_create_int_cst (type, 1);
760 : /* ..."(x | 1) -> 1". */
761 117 : if (cst1 && !zerop (cst1))
762 38 : return get_or_create_int_cst (type, 1);
763 : /* ..."(0 | x) -> x". */
764 79 : if (cst0 && zerop (cst0))
765 0 : return get_or_create_cast (type, arg1);
766 : /* ..."(x | 0) -> x". */
767 79 : if (cst1 && zerop (cst1))
768 20 : return get_or_create_cast (type, arg0);
769 : }
770 : break;
771 12 : case TRUTH_ANDIF_EXPR:
772 12 : case TRUTH_AND_EXPR:
773 12 : if (cst1)
774 : {
775 12 : if (zerop (cst1) && INTEGRAL_TYPE_P (type))
776 : /* "(ARG0 && 0)" -> "0". */
777 4 : return get_or_create_constant_svalue (build_int_cst (type, 0));
778 : else
779 : /* "(ARG0 && nonzero-cst)" -> "ARG0". */
780 8 : return get_or_create_cast (type, arg0);
781 : }
782 : break;
783 12 : case TRUTH_ORIF_EXPR:
784 12 : case TRUTH_OR_EXPR:
785 12 : if (cst1)
786 : {
787 12 : if (zerop (cst1))
788 : /* "(ARG0 || 0)" -> "ARG0". */
789 8 : return get_or_create_cast (type, arg0);
790 : else
791 : /* "(ARG0 && nonzero-cst)" -> "nonzero-cst". */
792 4 : return get_or_create_cast (type, arg1);
793 : }
794 : break;
795 :
796 1162 : case TRUNC_DIV_EXPR:
797 1162 : case CEIL_DIV_EXPR:
798 1162 : case FLOOR_DIV_EXPR:
799 1162 : case ROUND_DIV_EXPR:
800 1162 : case TRUNC_MOD_EXPR:
801 1162 : case CEIL_MOD_EXPR:
802 1162 : case FLOOR_MOD_EXPR:
803 1162 : case ROUND_MOD_EXPR:
804 1162 : case RDIV_EXPR:
805 1162 : case EXACT_DIV_EXPR:
806 1162 : if (cst1 && zerop (cst1))
807 0 : return get_or_create_unknown_svalue (type);
808 : break;
809 : }
810 :
811 : /* For associative ops, fold "(X op CST_A) op CST_B)" to
812 : "X op (CST_A op CST_B)". */
813 134661 : if (cst1 && associative_tree_code (op))
814 63048 : if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
815 16650 : if (binop->get_op () == op
816 16650 : && binop->get_arg1 ()->maybe_get_constant ())
817 2556 : return get_or_create_binop
818 2556 : (type, op, binop->get_arg0 (),
819 : get_or_create_binop (type, op,
820 2556 : binop->get_arg1 (), arg1));
821 :
822 : /* associative_tree_code is false for POINTER_PLUS_EXPR, but we
823 : can fold:
824 : "(PTR ptr+ CST_A) ptr+ CST_B)" to "PTR ptr+ (CST_A ptr+ CST_B)"
825 : e.g. in data-model-1.c: test_4c. */
826 132105 : if (cst1 && op == POINTER_PLUS_EXPR)
827 26231 : if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
828 1573 : if (binop->get_op () == POINTER_PLUS_EXPR)
829 1425 : if (binop->get_arg1 ()->maybe_get_constant ())
830 1110 : return get_or_create_binop
831 1110 : (type, op, binop->get_arg0 (),
832 : get_or_create_binop (size_type_node, op,
833 1110 : binop->get_arg1 (), arg1));
834 :
835 : /* Distribute multiplication by a constant through addition/subtraction:
836 : (X + Y) * CST => (X * CST) + (Y * CST). */
837 130995 : if (cst1 && op == MULT_EXPR)
838 24611 : if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
839 2729 : if (binop->get_op () == PLUS_EXPR
840 2729 : || binop->get_op () == MINUS_EXPR)
841 : {
842 2062 : return get_or_create_binop
843 2062 : (type, binop->get_op (),
844 : get_or_create_binop (type, op,
845 : binop->get_arg0 (), arg1),
846 : get_or_create_binop (type, op,
847 2062 : binop->get_arg1 (), arg1));
848 : }
849 :
850 :
851 : /* Typeless operations, assumed to be effectively arbitrary sized
852 : integers following normal arithmetic rules. */
853 128933 : if (!type)
854 34361 : switch (op)
855 : {
856 : default:
857 : break;
858 827 : case MINUS_EXPR:
859 827 : {
860 : /* (X - X) -> 0. */
861 827 : if (arg0 == arg1)
862 751 : return get_or_create_int_cst (type, 0);
863 :
864 : /* (X + A) - (A + B) -> (A - B). */
865 76 : if (const binop_svalue *binop0 = arg0->dyn_cast_binop_svalue ())
866 60 : if (const binop_svalue *binop1 = arg1->dyn_cast_binop_svalue ())
867 36 : if (binop0->get_op () == PLUS_EXPR
868 28 : && binop1->get_op () == PLUS_EXPR
869 56 : && binop0->get_arg0 () == binop1->get_arg0 ())
870 20 : return get_or_create_binop (NULL_TREE, op,
871 : binop0->get_arg1 (),
872 20 : binop1->get_arg1 ());
873 : }
874 : break;
875 :
876 73 : case EXACT_DIV_EXPR:
877 73 : {
878 73 : if (const unaryop_svalue *unaryop0 = arg0->dyn_cast_unaryop_svalue ())
879 : {
880 0 : if (unaryop0->get_op () == NOP_EXPR)
881 0 : if (const svalue *sval = maybe_fold_binop (NULL_TREE, op,
882 : unaryop0->get_arg (),
883 : arg1))
884 : return sval;
885 : }
886 73 : if (const binop_svalue *binop0 = arg0->dyn_cast_binop_svalue ())
887 : {
888 72 : switch (binop0->get_op ())
889 : {
890 : default:
891 : break;
892 :
893 26 : case PLUS_EXPR:
894 26 : case MINUS_EXPR:
895 : /* (A op B) / C -> (A / C) op (B / C). */
896 26 : {
897 26 : if (const svalue *op_on_a
898 26 : = maybe_fold_binop (NULL_TREE, op,
899 : binop0->get_arg0 (), arg1))
900 26 : if (const svalue *op_on_b
901 26 : = maybe_fold_binop (NULL_TREE, op,
902 : binop0->get_arg1 (), arg1))
903 26 : return get_or_create_binop (NULL_TREE,
904 : binop0->get_op (),
905 26 : op_on_a, op_on_b);
906 : }
907 : break;
908 :
909 46 : case MULT_EXPR:
910 : /* (A * B) / C -> A * (B / C) if C is a divisor of B.
911 : In particular, this should also handle the case
912 : (A * B) / B -> A. */
913 46 : if (const svalue *b_div_c
914 46 : = maybe_fold_binop (NULL_TREE, op,
915 : binop0->get_arg1 (), arg1))
916 46 : return get_or_create_binop (NULL_TREE, binop0->get_op (),
917 46 : binop0->get_arg0 (), b_div_c);
918 : }
919 : }
920 : }
921 : break;
922 : }
923 :
924 : /* etc. */
925 :
926 : return nullptr;
927 : }
928 :
929 : /* Return the svalue * for an binary operation OP on ARG0 and ARG1
930 : with a result of type TYPE, creating it if necessary. */
931 :
932 : const svalue *
933 227009 : region_model_manager::get_or_create_binop (tree type, enum tree_code op,
934 : const svalue *arg0,
935 : const svalue *arg1)
936 : {
937 : /* For commutative ops, put any constant on the RHS. */
938 227009 : if (arg0->maybe_get_constant () && commutative_tree_code (op))
939 : std::swap (arg0, arg1);
940 :
941 227009 : if (const svalue *folded = maybe_fold_binop (type, op, arg0, arg1))
942 : return folded;
943 :
944 : /* Ops on "unknown"/"poisoned" are unknown (unless we were able to fold
945 : it via an identity in maybe_fold_binop). */
946 128602 : if (!arg0->can_have_associated_state_p ()
947 128602 : || !arg1->can_have_associated_state_p ())
948 24685 : return get_or_create_unknown_svalue (type);
949 :
950 103917 : binop_svalue::key_t key (type, op, arg0, arg1);
951 103917 : if (binop_svalue **slot = m_binop_values_map.get (key))
952 83570 : return *slot;
953 20347 : binop_svalue *binop_sval
954 20347 : = new binop_svalue (alloc_symbol_id (), type, op, arg0, arg1);
955 20347 : RETURN_UNKNOWN_IF_TOO_COMPLEX (binop_sval);
956 20242 : m_binop_values_map.put (key, binop_sval);
957 20242 : return binop_sval;
958 : }
959 :
960 : /* Subroutine of region_model_manager::get_or_create_sub_svalue.
961 : Return a folded svalue, or nullptr. */
962 :
963 : const svalue *
964 62853 : region_model_manager::maybe_fold_sub_svalue (tree type,
965 : const svalue *parent_svalue,
966 : const region *subregion)
967 : {
968 : /* Subvalues of "unknown"/"poisoned" are unknown. */
969 62853 : if (!parent_svalue->can_have_associated_state_p ())
970 39726 : return get_or_create_unknown_svalue (type);
971 :
972 : /* If we have a subvalue of a zero constant, it's zero. */
973 23127 : if (tree cst = parent_svalue->maybe_get_constant ())
974 6396 : if (TREE_CODE (cst) == INTEGER_CST)
975 108 : if (zerop (cst))
976 100 : return get_or_create_cast (type, parent_svalue);
977 :
978 : /* If we have a subregion of a zero-fill, it's zero. */
979 46054 : if (const unaryop_svalue *unary
980 23027 : = parent_svalue->dyn_cast_unaryop_svalue ())
981 : {
982 566 : if (unary->get_op () == NOP_EXPR
983 566 : || unary->get_op () == VIEW_CONVERT_EXPR)
984 566 : if (tree cst = unary->get_arg ()->maybe_get_constant ())
985 566 : if (zerop (cst) && type)
986 : {
987 554 : const svalue *cst_sval
988 554 : = get_or_create_constant_svalue (cst);
989 554 : return get_or_create_cast (type, cst_sval);
990 : }
991 : }
992 :
993 : /* Handle getting individual chars from a STRING_CST or RAW_DATA_CST. */
994 22473 : if (tree cst = parent_svalue->maybe_get_constant ())
995 6296 : if (TREE_CODE (cst) == STRING_CST
996 6296 : || TREE_CODE (cst) == RAW_DATA_CST)
997 : {
998 : /* If we have a concrete 1-byte access within the parent region... */
999 6275 : byte_range subregion_bytes (0, 0);
1000 6275 : if (subregion->get_relative_concrete_byte_range (&subregion_bytes)
1001 6275 : && subregion_bytes.m_size_in_bytes == 1
1002 12320 : && type)
1003 : {
1004 : /* ...then attempt to get that char from the constant. */
1005 6045 : HOST_WIDE_INT hwi_start_byte
1006 6045 : = subregion_bytes.m_start_byte_offset.to_shwi ();
1007 6045 : tree cst_idx
1008 6045 : = build_int_cst_type (size_type_node, hwi_start_byte);
1009 12090 : if (const svalue *char_sval
1010 6045 : = maybe_get_char_from_cst (cst, cst_idx))
1011 6013 : return get_or_create_cast (type, char_sval);
1012 : }
1013 : }
1014 :
1015 32920 : if (const initial_svalue *init_sval
1016 16460 : = parent_svalue->dyn_cast_initial_svalue ())
1017 : {
1018 : /* SUB(INIT(r)).FIELD -> INIT(r.FIELD)
1019 : i.e.
1020 : Subvalue(InitialValue(R1), FieldRegion(R2, F))
1021 : -> InitialValue(FieldRegion(R1, F)). */
1022 548 : if (const field_region *field_reg = subregion->dyn_cast_field_region ())
1023 : {
1024 342 : const region *field_reg_new
1025 342 : = get_field_region (init_sval->get_region (),
1026 : field_reg->get_field ());
1027 342 : return get_or_create_initial_value (field_reg_new);
1028 : }
1029 : /* SUB(INIT(r)[ELEMENT] -> INIT(e[ELEMENT])
1030 : i.e.
1031 : Subvalue(InitialValue(R1), ElementRegion(R2, IDX))
1032 : -> InitialValue(ElementRegion(R1, IDX)). */
1033 206 : if (const element_region *element_reg = subregion->dyn_cast_element_region ())
1034 : {
1035 124 : const region *element_reg_new
1036 124 : = get_element_region (init_sval->get_region (),
1037 : element_reg->get_type (),
1038 : element_reg->get_index ());
1039 124 : return get_or_create_initial_value (element_reg_new);
1040 : }
1041 : }
1042 :
1043 31988 : if (const repeated_svalue *repeated_sval
1044 15994 : = parent_svalue->dyn_cast_repeated_svalue ())
1045 100 : if (type)
1046 96 : return get_or_create_cast (type, repeated_sval->get_inner_svalue ());
1047 :
1048 : return nullptr;
1049 : }
1050 :
1051 : /* Return the svalue * for extracting a subvalue of type TYPE from
1052 : PARENT_SVALUE based on SUBREGION, creating it if necessary. */
1053 :
1054 : const svalue *
1055 62853 : region_model_manager::get_or_create_sub_svalue (tree type,
1056 : const svalue *parent_svalue,
1057 : const region *subregion)
1058 : {
1059 125706 : if (const svalue *folded
1060 62853 : = maybe_fold_sub_svalue (type, parent_svalue, subregion))
1061 : return folded;
1062 :
1063 15898 : sub_svalue::key_t key (type, parent_svalue, subregion);
1064 15898 : if (sub_svalue **slot = m_sub_values_map.get (key))
1065 13567 : return *slot;
1066 2331 : sub_svalue *sub_sval
1067 2331 : = new sub_svalue (alloc_symbol_id (), type, parent_svalue, subregion);
1068 2331 : RETURN_UNKNOWN_IF_TOO_COMPLEX (sub_sval);
1069 2331 : m_sub_values_map.put (key, sub_sval);
1070 2331 : return sub_sval;
1071 : }
1072 :
1073 : /* Subroutine of region_model_manager::get_or_create_repeated_svalue.
1074 : Return a folded svalue, or nullptr. */
1075 :
1076 : const svalue *
1077 2539 : region_model_manager::maybe_fold_repeated_svalue (tree type,
1078 : const svalue *outer_size,
1079 : const svalue *inner_svalue)
1080 : {
1081 : /* Repeated "unknown"/"poisoned" is unknown. */
1082 2539 : if (!outer_size->can_have_associated_state_p ()
1083 2539 : || !inner_svalue->can_have_associated_state_p ())
1084 4 : return get_or_create_unknown_svalue (type);
1085 :
1086 : /* If INNER_SVALUE is the same size as OUTER_SIZE,
1087 : turn into simply a cast. */
1088 2535 : if (inner_svalue->get_type ())
1089 2523 : if (tree cst_outer_num_bytes = outer_size->maybe_get_constant ())
1090 : {
1091 2391 : HOST_WIDE_INT num_bytes_inner_svalue
1092 2391 : = int_size_in_bytes (inner_svalue->get_type ());
1093 2391 : if (num_bytes_inner_svalue != -1)
1094 2391 : if (num_bytes_inner_svalue
1095 2391 : == (HOST_WIDE_INT)tree_to_uhwi (cst_outer_num_bytes))
1096 : {
1097 237 : if (type)
1098 217 : return get_or_create_cast (type, inner_svalue);
1099 : else
1100 : return inner_svalue;
1101 : }
1102 : }
1103 :
1104 : /* Handle zero-fill of a specific type. */
1105 2298 : if (tree cst = inner_svalue->maybe_get_constant ())
1106 2275 : if (zerop (cst) && type)
1107 585 : return get_or_create_cast (type, inner_svalue);
1108 :
1109 : return nullptr;
1110 : }
1111 :
1112 : /* Return the svalue * of type TYPE in which INNER_SVALUE is repeated
1113 : enough times to be of size OUTER_SIZE, creating it if necessary.
1114 : e.g. for filling buffers with a constant value. */
1115 :
1116 : const svalue *
1117 2539 : region_model_manager::get_or_create_repeated_svalue (tree type,
1118 : const svalue *outer_size,
1119 : const svalue *inner_svalue)
1120 : {
1121 5078 : if (const svalue *folded
1122 2539 : = maybe_fold_repeated_svalue (type, outer_size, inner_svalue))
1123 : return folded;
1124 :
1125 1713 : repeated_svalue::key_t key (type, outer_size, inner_svalue);
1126 1713 : if (repeated_svalue **slot = m_repeated_values_map.get (key))
1127 1201 : return *slot;
1128 512 : repeated_svalue *repeated_sval
1129 512 : = new repeated_svalue (alloc_symbol_id (), type, outer_size, inner_svalue);
1130 512 : RETURN_UNKNOWN_IF_TOO_COMPLEX (repeated_sval);
1131 512 : m_repeated_values_map.put (key, repeated_sval);
1132 512 : return repeated_sval;
1133 : }
1134 :
1135 : /* Attempt to get the bit_range for FIELD within a RECORD_TYPE.
1136 : Return true and write the result to OUT if successful.
1137 : Return false otherwise. */
1138 :
1139 : static bool
1140 931 : get_bit_range_for_field (tree field, bit_range *out)
1141 : {
1142 931 : bit_size_t bit_size;
1143 931 : if (!int_size_in_bits (TREE_TYPE (field), &bit_size))
1144 : return false;
1145 931 : int field_bit_offset = int_bit_position (field);
1146 931 : *out = bit_range (field_bit_offset, bit_size);
1147 931 : return true;
1148 : }
1149 :
1150 : /* Attempt to get the byte_range for FIELD within a RECORD_TYPE.
1151 : Return true and write the result to OUT if successful.
1152 : Return false otherwise. */
1153 :
1154 : static bool
1155 931 : get_byte_range_for_field (tree field, byte_range *out)
1156 : {
1157 931 : bit_range field_bits (0, 0);
1158 931 : if (!get_bit_range_for_field (field, &field_bits))
1159 : return false;
1160 931 : return field_bits.as_byte_range (out);
1161 : }
1162 :
1163 : /* Attempt to determine if there is a specific field within RECORD_TYPE
1164 : at BYTES. If so, return it, and write the location of BYTES relative
1165 : to the field to *OUT_RANGE_WITHIN_FIELD.
1166 : Otherwise, return NULL_TREE.
1167 : For example, given:
1168 : struct foo { uint32 a; uint32; b};
1169 : and
1170 : bytes = {bytes 6-7} (of foo)
1171 : we have bytes 3-4 of field b. */
1172 :
1173 : static tree
1174 931 : get_field_at_byte_range (tree record_type, const byte_range &bytes,
1175 : byte_range *out_range_within_field)
1176 : {
1177 931 : bit_offset_t bit_offset = bytes.m_start_byte_offset * BITS_PER_UNIT;
1178 :
1179 931 : tree field = get_field_at_bit_offset (record_type, bit_offset);
1180 931 : if (!field)
1181 : return NULL_TREE;
1182 :
1183 931 : byte_range field_bytes (0,0);
1184 931 : if (!get_byte_range_for_field (field, &field_bytes))
1185 : return NULL_TREE;
1186 :
1187 : /* Is BYTES fully within field_bytes? */
1188 925 : byte_range bytes_within_field (0,0);
1189 925 : if (!field_bytes.contains_p (bytes, &bytes_within_field))
1190 : return NULL_TREE;
1191 :
1192 391 : *out_range_within_field = bytes_within_field;
1193 391 : return field;
1194 : }
1195 :
1196 : /* Subroutine of region_model_manager::get_or_create_bits_within.
1197 : Return a folded svalue, or NULL. */
1198 :
1199 : const svalue *
1200 20965 : region_model_manager::maybe_fold_bits_within_svalue (tree type,
1201 : const bit_range &bits,
1202 : const svalue *inner_svalue)
1203 : {
1204 20965 : tree inner_type = inner_svalue->get_type ();
1205 : /* Fold:
1206 : BITS_WITHIN ((0, sizeof (VAL), VAL))
1207 : to:
1208 : CAST(TYPE, VAL). */
1209 20965 : if (bits.m_start_bit_offset == 0 && inner_type)
1210 : {
1211 1550 : bit_size_t inner_type_size;
1212 1550 : if (int_size_in_bits (inner_type, &inner_type_size))
1213 1550 : if (inner_type_size == bits.m_size_in_bits)
1214 : {
1215 562 : if (type)
1216 562 : return get_or_create_cast (type, inner_svalue);
1217 : else
1218 : return inner_svalue;
1219 : }
1220 : }
1221 :
1222 : /* Kind-specific folding. */
1223 40806 : if (const svalue *sval
1224 20403 : = inner_svalue->maybe_fold_bits_within (type, bits, this))
1225 : return sval;
1226 :
1227 1814 : byte_range bytes (0,0);
1228 1814 : if (bits.as_byte_range (&bytes) && inner_type)
1229 1670 : switch (TREE_CODE (inner_type))
1230 : {
1231 : default:
1232 : break;
1233 582 : case ARRAY_TYPE:
1234 582 : {
1235 : /* Fold:
1236 : BITS_WITHIN (range, KIND(REG))
1237 : to:
1238 : BITS_WITHIN (range - offsetof(ELEMENT), KIND(REG.ELEMENT))
1239 : if range1 is a byte-range fully within one ELEMENT. */
1240 582 : tree element_type = TREE_TYPE (inner_type);
1241 582 : HOST_WIDE_INT element_byte_size
1242 582 : = int_size_in_bytes (element_type);
1243 582 : if (element_byte_size > 0)
1244 : {
1245 582 : HOST_WIDE_INT start_idx
1246 582 : = (bytes.get_start_byte_offset ().to_shwi ()
1247 582 : / element_byte_size);
1248 582 : HOST_WIDE_INT last_idx
1249 582 : = (bytes.get_last_byte_offset ().to_shwi ()
1250 582 : / element_byte_size);
1251 582 : if (start_idx == last_idx)
1252 : {
1253 708 : if (const initial_svalue *initial_sval
1254 354 : = inner_svalue->dyn_cast_initial_svalue ())
1255 : {
1256 162 : bit_offset_t start_of_element
1257 162 : = start_idx * element_byte_size * BITS_PER_UNIT;
1258 162 : bit_range bits_within_element
1259 162 : (bits.m_start_bit_offset - start_of_element,
1260 162 : bits.m_size_in_bits);
1261 162 : const svalue *idx_sval
1262 162 : = get_or_create_int_cst (integer_type_node, start_idx);
1263 162 : const region *element_reg =
1264 162 : get_element_region (initial_sval->get_region (),
1265 : element_type, idx_sval);
1266 162 : const svalue *element_reg_sval
1267 162 : = get_or_create_initial_value (element_reg);
1268 162 : return get_or_create_bits_within (type,
1269 : bits_within_element,
1270 : element_reg_sval);
1271 : }
1272 : }
1273 : }
1274 : }
1275 : break;
1276 931 : case RECORD_TYPE:
1277 931 : {
1278 : /* Fold:
1279 : BYTES_WITHIN (range, KIND(REG))
1280 : to:
1281 : BYTES_WITHIN (range - offsetof(FIELD), KIND(REG.FIELD))
1282 : if range1 is fully within FIELD. */
1283 931 : byte_range bytes_within_field (0, 0);
1284 931 : if (tree field = get_field_at_byte_range (inner_type, bytes,
1285 : &bytes_within_field))
1286 : {
1287 782 : if (const initial_svalue *initial_sval
1288 391 : = inner_svalue->dyn_cast_initial_svalue ())
1289 : {
1290 218 : const region *field_reg =
1291 218 : get_field_region (initial_sval->get_region (), field);
1292 218 : const svalue *initial_reg_sval
1293 218 : = get_or_create_initial_value (field_reg);
1294 218 : return get_or_create_bits_within
1295 218 : (type,
1296 436 : bytes_within_field.as_bit_range (),
1297 : initial_reg_sval);
1298 : }
1299 : }
1300 : }
1301 713 : break;
1302 : }
1303 : return nullptr;
1304 : }
1305 :
1306 : /* Return the svalue * of type TYPE for extracting BITS from INNER_SVALUE,
1307 : creating it if necessary. */
1308 :
1309 : const svalue *
1310 20965 : region_model_manager::get_or_create_bits_within (tree type,
1311 : const bit_range &bits,
1312 : const svalue *inner_svalue)
1313 : {
1314 41930 : if (const svalue *folded
1315 20965 : = maybe_fold_bits_within_svalue (type, bits, inner_svalue))
1316 : return folded;
1317 :
1318 1434 : bits_within_svalue::key_t key (type, bits, inner_svalue);
1319 1434 : if (bits_within_svalue **slot = m_bits_within_values_map.get (key))
1320 662 : return *slot;
1321 772 : bits_within_svalue *bits_within_sval
1322 772 : = new bits_within_svalue (alloc_symbol_id (), type, bits, inner_svalue);
1323 772 : RETURN_UNKNOWN_IF_TOO_COMPLEX (bits_within_sval);
1324 772 : m_bits_within_values_map.put (key, bits_within_sval);
1325 772 : return bits_within_sval;
1326 : }
1327 :
1328 : /* Return the svalue * that decorates ARG as being unmergeable,
1329 : creating it if necessary. */
1330 :
1331 : const svalue *
1332 832 : region_model_manager::get_or_create_unmergeable (const svalue *arg)
1333 : {
1334 832 : if (arg->get_kind () == SK_UNMERGEABLE)
1335 : return arg;
1336 :
1337 832 : if (unmergeable_svalue **slot = m_unmergeable_values_map.get (arg))
1338 628 : return *slot;
1339 204 : unmergeable_svalue *unmergeable_sval
1340 204 : = new unmergeable_svalue (alloc_symbol_id (), arg);
1341 204 : RETURN_UNKNOWN_IF_TOO_COMPLEX (unmergeable_sval);
1342 204 : m_unmergeable_values_map.put (arg, unmergeable_sval);
1343 204 : return unmergeable_sval;
1344 : }
1345 :
1346 : /* Return the svalue * of type TYPE for the merger of value BASE_SVAL
1347 : and ITER_SVAL at SNODE, creating it if necessary. */
1348 :
1349 : const svalue *
1350 5809 : region_model_manager::
1351 : get_or_create_widening_svalue (tree type,
1352 : const supernode *snode,
1353 : const svalue *base_sval,
1354 : const svalue *iter_sval)
1355 : {
1356 5809 : gcc_assert (base_sval->get_kind () != SK_WIDENING);
1357 5809 : gcc_assert (iter_sval->get_kind () != SK_WIDENING);
1358 5809 : widening_svalue::key_t key (type, snode, base_sval, iter_sval);
1359 5809 : if (widening_svalue **slot = m_widening_values_map.get (key))
1360 3448 : return *slot;
1361 2361 : widening_svalue *widening_sval
1362 : = new widening_svalue (alloc_symbol_id (), type, snode, base_sval,
1363 2361 : iter_sval);
1364 2361 : RETURN_UNKNOWN_IF_TOO_COMPLEX (widening_sval);
1365 2361 : m_widening_values_map.put (key, widening_sval);
1366 2361 : return widening_sval;
1367 : }
1368 :
1369 : /* Return the svalue * of type TYPE for the compound values in MAP,
1370 : creating it if necessary. */
1371 :
1372 : const svalue *
1373 3745 : region_model_manager::get_or_create_compound_svalue (tree type,
1374 : const binding_map &map)
1375 : {
1376 3745 : compound_svalue::key_t tmp_key (type, &map);
1377 3745 : if (compound_svalue **slot = m_compound_values_map.get (tmp_key))
1378 3134 : return *slot;
1379 611 : compound_svalue *compound_sval
1380 611 : = new compound_svalue (alloc_symbol_id (), type, map);
1381 611 : RETURN_UNKNOWN_IF_TOO_COMPLEX (compound_sval);
1382 : /* Use make_key rather than reusing the key, so that we use a
1383 : ptr to compound_sval's binding_map, rather than the MAP param. */
1384 611 : m_compound_values_map.put (compound_sval->make_key (), compound_sval);
1385 611 : return compound_sval;
1386 : }
1387 :
1388 : /* class conjured_purge. */
1389 :
1390 : /* Purge state relating to SVAL. */
1391 :
1392 : void
1393 26785 : conjured_purge::purge (const conjured_svalue *sval) const
1394 : {
1395 26785 : m_model->purge_state_involving (sval, m_ctxt);
1396 26785 : }
1397 :
1398 : /* Return the svalue * of type TYPE for the value conjured for ID_REG
1399 : at STMT (using IDX for any further disambiguation),
1400 : creating it if necessary.
1401 : Use P to purge existing state from the svalue, for the case where a
1402 : conjured_svalue would be reused along an execution path. */
1403 :
1404 : const svalue *
1405 52685 : region_model_manager::get_or_create_conjured_svalue (tree type,
1406 : const gimple *stmt,
1407 : const region *id_reg,
1408 : const conjured_purge &p,
1409 : unsigned idx)
1410 : {
1411 52685 : conjured_svalue::key_t key (type, stmt, id_reg, idx);
1412 52685 : if (conjured_svalue **slot = m_conjured_values_map.get (key))
1413 : {
1414 26785 : const conjured_svalue *sval = *slot;
1415 : /* We're reusing an existing conjured_svalue, perhaps from a different
1416 : state within this analysis, or perhaps from an earlier state on this
1417 : execution path. For the latter, purge any state involving the "new"
1418 : svalue from the current program_state. */
1419 26785 : p.purge (sval);
1420 26785 : return sval;
1421 : }
1422 25900 : conjured_svalue *conjured_sval
1423 25900 : = new conjured_svalue (alloc_symbol_id (), type, stmt, id_reg, idx);
1424 25900 : RETURN_UNKNOWN_IF_TOO_COMPLEX (conjured_sval);
1425 25830 : m_conjured_values_map.put (key, conjured_sval);
1426 25830 : return conjured_sval;
1427 : }
1428 :
1429 : /* Subroutine of region_model_manager::get_or_create_asm_output_svalue.
1430 : Return a folded svalue, or nullptr. */
1431 :
1432 : const svalue *
1433 367 : region_model_manager::
1434 : maybe_fold_asm_output_svalue (tree type,
1435 : const vec<const svalue *> &inputs)
1436 : {
1437 : /* Unknown inputs should lead to unknown results. */
1438 1653 : for (const auto &iter : inputs)
1439 609 : if (iter->get_kind () == SK_UNKNOWN)
1440 9 : return get_or_create_unknown_svalue (type);
1441 :
1442 : return nullptr;
1443 : }
1444 :
1445 : /* Return the svalue * of type TYPE for OUTPUT_IDX of the deterministic
1446 : asm stmt ASM_STMT, given INPUTS as inputs. */
1447 :
1448 : const svalue *
1449 289 : region_model_manager::
1450 : get_or_create_asm_output_svalue (tree type,
1451 : const gasm *asm_stmt,
1452 : unsigned output_idx,
1453 : const vec<const svalue *> &inputs)
1454 : {
1455 289 : gcc_assert (inputs.length () <= asm_output_svalue::MAX_INPUTS);
1456 :
1457 578 : if (const svalue *folded
1458 289 : = maybe_fold_asm_output_svalue (type, inputs))
1459 : return folded;
1460 :
1461 280 : const char *asm_string = gimple_asm_string (asm_stmt);
1462 280 : const unsigned noutputs = gimple_asm_noutputs (asm_stmt);
1463 :
1464 280 : asm_output_svalue::key_t key (type, asm_string, output_idx, inputs);
1465 280 : if (asm_output_svalue **slot = m_asm_output_values_map.get (key))
1466 108 : return *slot;
1467 172 : asm_output_svalue *asm_output_sval
1468 : = new asm_output_svalue (alloc_symbol_id (), type, asm_string, output_idx,
1469 172 : noutputs, inputs);
1470 172 : RETURN_UNKNOWN_IF_TOO_COMPLEX (asm_output_sval);
1471 167 : m_asm_output_values_map.put (key, asm_output_sval);
1472 167 : return asm_output_sval;
1473 : }
1474 :
1475 : /* Return the svalue * of type TYPE for OUTPUT_IDX of a deterministic
1476 : asm stmt with string ASM_STRING with NUM_OUTPUTS outputs, given
1477 : INPUTS as inputs. */
1478 :
1479 : const svalue *
1480 78 : region_model_manager::
1481 : get_or_create_asm_output_svalue (tree type,
1482 : const char *asm_string,
1483 : unsigned output_idx,
1484 : unsigned num_outputs,
1485 : const vec<const svalue *> &inputs)
1486 : {
1487 78 : gcc_assert (inputs.length () <= asm_output_svalue::MAX_INPUTS);
1488 :
1489 156 : if (const svalue *folded
1490 78 : = maybe_fold_asm_output_svalue (type, inputs))
1491 : return folded;
1492 :
1493 78 : asm_output_svalue::key_t key (type, asm_string, output_idx, inputs);
1494 78 : if (asm_output_svalue **slot = m_asm_output_values_map.get (key))
1495 58 : return *slot;
1496 20 : asm_output_svalue *asm_output_sval
1497 : = new asm_output_svalue (alloc_symbol_id (), type, asm_string, output_idx,
1498 20 : num_outputs, inputs);
1499 20 : RETURN_UNKNOWN_IF_TOO_COMPLEX (asm_output_sval);
1500 20 : m_asm_output_values_map.put (key, asm_output_sval);
1501 20 : return asm_output_sval;
1502 : }
1503 :
1504 : /* Return the svalue * of type TYPE for the result of a call to FNDECL
1505 : with __attribute__((const)), given INPUTS as inputs. */
1506 :
1507 : const svalue *
1508 891 : region_model_manager::
1509 : get_or_create_const_fn_result_svalue (tree type,
1510 : tree fndecl,
1511 : const vec<const svalue *> &inputs)
1512 : {
1513 891 : gcc_assert (fndecl);
1514 891 : gcc_assert (DECL_P (fndecl));
1515 891 : gcc_assert (TREE_READONLY (fndecl));
1516 891 : gcc_assert (inputs.length () <= const_fn_result_svalue::MAX_INPUTS);
1517 :
1518 891 : const_fn_result_svalue::key_t key (type, fndecl, inputs);
1519 891 : if (const_fn_result_svalue **slot = m_const_fn_result_values_map.get (key))
1520 704 : return *slot;
1521 187 : const_fn_result_svalue *const_fn_result_sval
1522 187 : = new const_fn_result_svalue (alloc_symbol_id (), type, fndecl, inputs);
1523 187 : RETURN_UNKNOWN_IF_TOO_COMPLEX (const_fn_result_sval);
1524 187 : m_const_fn_result_values_map.put (key, const_fn_result_sval);
1525 187 : return const_fn_result_sval;
1526 : }
1527 :
1528 : /* Given DATA_CST (a STRING_CST or RAW_DATA_CST) and BYTE_OFFSET_CST a constant,
1529 : attempt to get the character at that offset, returning either
1530 : the svalue for the character constant, or nullptr if unsuccessful. */
1531 :
1532 : const svalue *
1533 6045 : region_model_manager::maybe_get_char_from_cst (tree data_cst,
1534 : tree byte_offset_cst)
1535 : {
1536 6045 : switch (TREE_CODE (data_cst))
1537 : {
1538 0 : default: gcc_unreachable ();
1539 4570 : case STRING_CST:
1540 4570 : return maybe_get_char_from_string_cst (data_cst, byte_offset_cst);
1541 1475 : case RAW_DATA_CST:
1542 1475 : return maybe_get_char_from_raw_data_cst (data_cst, byte_offset_cst);
1543 : }
1544 : }
1545 :
1546 : /* Get a tree for the size of STRING_CST, or NULL_TREE.
1547 : Note that this may be larger than TREE_STRING_LENGTH (implying
1548 : a run of trailing zero bytes from TREE_STRING_LENGTH up to this
1549 : higher limit). */
1550 :
1551 : tree
1552 5234 : get_string_cst_size (const_tree string_cst)
1553 : {
1554 5234 : gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1555 5234 : gcc_assert (TREE_CODE (TREE_TYPE (string_cst)) == ARRAY_TYPE);
1556 :
1557 5234 : return TYPE_SIZE_UNIT (TREE_TYPE (string_cst));
1558 : }
1559 :
1560 : /* Given STRING_CST, a STRING_CST and BYTE_OFFSET_CST a constant,
1561 : attempt to get the character at that offset, returning either
1562 : the svalue for the character constant, or nullptr if unsuccessful. */
1563 :
1564 : const svalue *
1565 4950 : region_model_manager::maybe_get_char_from_string_cst (tree string_cst,
1566 : tree byte_offset_cst)
1567 : {
1568 4950 : gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1569 :
1570 : /* Adapted from fold_read_from_constant_string. */
1571 4950 : scalar_int_mode char_mode;
1572 4950 : if (TREE_CODE (byte_offset_cst) == INTEGER_CST
1573 9900 : && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string_cst))),
1574 : &char_mode)
1575 9900 : && GET_MODE_SIZE (char_mode) == 1)
1576 : {
1577 : /* If we're beyond the string_cst, the read is unsuccessful. */
1578 4950 : if (compare_constants (byte_offset_cst,
1579 : GE_EXPR,
1580 4950 : get_string_cst_size (string_cst)).is_true ())
1581 : return nullptr;
1582 :
1583 4921 : int char_val;
1584 9842 : if (compare_tree_int (byte_offset_cst,
1585 4921 : TREE_STRING_LENGTH (string_cst)) < 0)
1586 : /* We're within the area defined by TREE_STRING_POINTER. */
1587 4919 : char_val = (TREE_STRING_POINTER (string_cst)
1588 4919 : [TREE_INT_CST_LOW (byte_offset_cst)]);
1589 : else
1590 : /* We're in the padding area of trailing zeroes. */
1591 : char_val = 0;
1592 4921 : tree char_cst
1593 4921 : = build_int_cst_type (TREE_TYPE (TREE_TYPE (string_cst)), char_val);
1594 4921 : return get_or_create_constant_svalue (char_cst);
1595 : }
1596 : return nullptr;
1597 : }
1598 :
1599 : /* Given RAW_DATA_CST, a RAW_DATA_CST and BYTE_OFFSET_CST a constant,
1600 : attempt to get the character at that offset, returning either
1601 : the svalue for the character constant, or nullptr if unsuccessful. */
1602 :
1603 : const svalue *
1604 1475 : region_model_manager::maybe_get_char_from_raw_data_cst (tree raw_data_cst,
1605 : tree byte_offset_cst)
1606 : {
1607 1475 : gcc_assert (TREE_CODE (raw_data_cst) == RAW_DATA_CST);
1608 1475 : gcc_assert (TREE_CODE (byte_offset_cst) == INTEGER_CST);
1609 :
1610 1475 : offset_int o = (wi::to_offset (byte_offset_cst));
1611 1475 : if (o >= 0 && o < RAW_DATA_LENGTH (raw_data_cst))
1612 1468 : return get_or_create_int_cst
1613 1468 : (TREE_TYPE (raw_data_cst),
1614 2936 : RAW_DATA_UCHAR_ELT (raw_data_cst, o.to_uhwi ()));
1615 : return nullptr;
1616 : }
1617 :
1618 : /* region consolidation. */
1619 :
1620 : /* Return the region for FNDECL, creating it if necessary. */
1621 :
1622 : const function_region *
1623 876741 : region_model_manager::get_region_for_fndecl (tree fndecl)
1624 : {
1625 876741 : gcc_assert (TREE_CODE (fndecl) == FUNCTION_DECL);
1626 :
1627 876741 : function_region **slot = m_fndecls_map.get (fndecl);
1628 876741 : if (slot)
1629 867356 : return *slot;
1630 9385 : function_region *reg
1631 9385 : = new function_region (alloc_symbol_id (), &m_code_region, fndecl);
1632 9385 : m_fndecls_map.put (fndecl, reg);
1633 9385 : return reg;
1634 : }
1635 :
1636 : /* Return the region for LABEL, creating it if necessary. */
1637 :
1638 : const label_region *
1639 329 : region_model_manager::get_region_for_label (tree label)
1640 : {
1641 329 : gcc_assert (TREE_CODE (label) == LABEL_DECL);
1642 :
1643 329 : label_region **slot = m_labels_map.get (label);
1644 329 : if (slot)
1645 283 : return *slot;
1646 :
1647 46 : tree fndecl = DECL_CONTEXT (label);
1648 46 : gcc_assert (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL);
1649 :
1650 46 : const function_region *func_reg = get_region_for_fndecl (fndecl);
1651 46 : label_region *reg
1652 46 : = new label_region (alloc_symbol_id (), func_reg, label);
1653 46 : m_labels_map.put (label, reg);
1654 46 : return reg;
1655 : }
1656 :
1657 : /* Return the region for EXPR, creating it if necessary. */
1658 :
1659 : const decl_region *
1660 51173 : region_model_manager::get_region_for_global (tree expr)
1661 : {
1662 51173 : gcc_assert (VAR_P (expr));
1663 :
1664 51173 : decl_region **slot = m_globals_map.get (expr);
1665 51173 : if (slot)
1666 44803 : return *slot;
1667 6370 : decl_region *reg
1668 6370 : = new decl_region (alloc_symbol_id (), &m_globals_region, expr);
1669 6370 : m_globals_map.put (expr, reg);
1670 6370 : return reg;
1671 : }
1672 :
1673 : /* Return the region for an unknown access of type REGION_TYPE,
1674 : creating it if necessary.
1675 : This is a symbolic_region, where the pointer is an unknown_svalue
1676 : of type ®ION_TYPE. */
1677 :
1678 : const region *
1679 12777 : region_model_manager::get_unknown_symbolic_region (tree region_type)
1680 : {
1681 12777 : tree ptr_type = region_type ? build_pointer_type (region_type) : NULL_TREE;
1682 12777 : const svalue *unknown_ptr = get_or_create_unknown_svalue (ptr_type);
1683 12777 : return get_symbolic_region (unknown_ptr);
1684 : }
1685 :
1686 : /* Return the region that describes accessing field FIELD of PARENT,
1687 : creating it if necessary. */
1688 :
1689 : const region *
1690 47072 : region_model_manager::get_field_region (const region *parent, tree field)
1691 : {
1692 47072 : gcc_assert (TREE_CODE (field) == FIELD_DECL);
1693 :
1694 : /* (*UNKNOWN_PTR).field is (*UNKNOWN_PTR_OF_&FIELD_TYPE). */
1695 47072 : if (parent->symbolic_for_unknown_ptr_p ())
1696 4179 : return get_unknown_symbolic_region (TREE_TYPE (field));
1697 :
1698 42893 : field_region::key_t key (parent, field);
1699 78027 : if (field_region *reg = m_field_regions.get (key))
1700 : return reg;
1701 :
1702 7759 : field_region *field_reg
1703 7759 : = new field_region (alloc_symbol_id (), parent, field);
1704 7759 : m_field_regions.put (key, field_reg);
1705 7759 : return field_reg;
1706 : }
1707 :
1708 : /* Return the region that describes accessing the element of type
1709 : ELEMENT_TYPE at index INDEX of PARENT, creating it if necessary. */
1710 :
1711 : const region *
1712 28234 : region_model_manager::get_element_region (const region *parent,
1713 : tree element_type,
1714 : const svalue *index)
1715 : {
1716 : /* (UNKNOWN_PTR[IDX]) is (UNKNOWN_PTR). */
1717 28234 : if (parent->symbolic_for_unknown_ptr_p ())
1718 52 : return get_unknown_symbolic_region (element_type);
1719 :
1720 28182 : element_region::key_t key (parent, element_type, index);
1721 52252 : if (element_region *reg = m_element_regions.get (key))
1722 : return reg;
1723 :
1724 4112 : element_region *element_reg
1725 4112 : = new element_region (alloc_symbol_id (), parent, element_type, index);
1726 4112 : m_element_regions.put (key, element_reg);
1727 4112 : return element_reg;
1728 : }
1729 :
1730 : /* Return the region that describes accessing the subregion of type
1731 : ELEMENT_TYPE at offset BYTE_OFFSET within PARENT, creating it if
1732 : necessary. */
1733 :
1734 : const region *
1735 93954 : region_model_manager::get_offset_region (const region *parent,
1736 : tree type,
1737 : const svalue *byte_offset)
1738 : {
1739 : /* (UNKNOWN_PTR + OFFSET) is (UNKNOWN_PTR). */
1740 95039 : if (parent->symbolic_for_unknown_ptr_p ())
1741 8177 : return get_unknown_symbolic_region (type);
1742 :
1743 : /* If BYTE_OFFSET is zero, return PARENT. */
1744 86862 : if (tree cst_offset = byte_offset->maybe_get_constant ())
1745 77598 : if (zerop (cst_offset))
1746 63086 : return get_cast_region (parent, type);
1747 :
1748 : /* Fold OFFSET_REGION(OFFSET_REGION(REG, X), Y)
1749 : to OFFSET_REGION(REG, (X + Y)). */
1750 47552 : if (const offset_region *parent_offset_reg
1751 23776 : = parent->dyn_cast_offset_region ())
1752 : {
1753 1085 : const svalue *sval_x = parent_offset_reg->get_byte_offset ();
1754 1085 : const svalue *sval_sum
1755 1085 : = get_or_create_binop (byte_offset->get_type (),
1756 : POINTER_PLUS_EXPR, sval_x, byte_offset);
1757 1085 : return get_offset_region (parent->get_parent_region (), type, sval_sum);
1758 : }
1759 :
1760 22691 : offset_region::key_t key (parent, type, byte_offset);
1761 41804 : if (offset_region *reg = m_offset_regions.get (key))
1762 : return reg;
1763 :
1764 3578 : offset_region *offset_reg
1765 3578 : = new offset_region (alloc_symbol_id (), parent, type, byte_offset);
1766 3578 : m_offset_regions.put (key, offset_reg);
1767 3578 : return offset_reg;
1768 : }
1769 :
1770 : /* Return the region that describes accessing the subregion of type
1771 : TYPE of size BYTE_SIZE_SVAL within PARENT, creating it if necessary. */
1772 :
1773 : const region *
1774 9468 : region_model_manager::get_sized_region (const region *parent,
1775 : tree type,
1776 : const svalue *byte_size_sval)
1777 : {
1778 9468 : if (parent->symbolic_for_unknown_ptr_p ())
1779 273 : return get_unknown_symbolic_region (type);
1780 :
1781 9195 : if (byte_size_sval->get_type () != size_type_node)
1782 2280 : byte_size_sval = get_or_create_cast (size_type_node, byte_size_sval);
1783 :
1784 : /* If PARENT is already that size, return it. */
1785 9195 : const svalue *parent_byte_size_sval = parent->get_byte_size_sval (this);
1786 9195 : if (tree parent_size_cst = parent_byte_size_sval->maybe_get_constant ())
1787 2836 : if (tree size_cst = byte_size_sval->maybe_get_constant ())
1788 : {
1789 2130 : tree comparison
1790 2130 : = fold_binary (EQ_EXPR, boolean_type_node, parent_size_cst, size_cst);
1791 2130 : if (comparison == boolean_true_node)
1792 : return parent;
1793 : }
1794 :
1795 7950 : sized_region::key_t key (parent, type, byte_size_sval);
1796 11950 : if (sized_region *reg = m_sized_regions.get (key))
1797 : return reg;
1798 :
1799 3950 : sized_region *sized_reg
1800 3950 : = new sized_region (alloc_symbol_id (), parent, type, byte_size_sval);
1801 3950 : m_sized_regions.put (key, sized_reg);
1802 3950 : return sized_reg;
1803 : }
1804 :
1805 : /* Return the region that describes accessing PARENT_REGION as if
1806 : it were of type TYPE, creating it if necessary. */
1807 :
1808 : const region *
1809 65821 : region_model_manager::get_cast_region (const region *original_region,
1810 : tree type)
1811 : {
1812 : /* If types match, return ORIGINAL_REGION. */
1813 65821 : if (type == original_region->get_type ())
1814 : return original_region;
1815 :
1816 17057 : if (original_region->symbolic_for_unknown_ptr_p ())
1817 96 : return get_unknown_symbolic_region (type);
1818 :
1819 16961 : cast_region::key_t key (original_region, type);
1820 31285 : if (cast_region *reg = m_cast_regions.get (key))
1821 : return reg;
1822 :
1823 2637 : cast_region *cast_reg
1824 2637 : = new cast_region (alloc_symbol_id (), original_region, type);
1825 2637 : m_cast_regions.put (key, cast_reg);
1826 2637 : return cast_reg;
1827 : }
1828 :
1829 : /* Return the frame_region for call to FUN from CALLING_FRAME, creating it
1830 : if necessary. CALLING_FRAME may be nullptr. */
1831 :
1832 : const frame_region *
1833 32640 : region_model_manager::get_frame_region (const frame_region *calling_frame,
1834 : const function &fun)
1835 : {
1836 32640 : int index = calling_frame ? calling_frame->get_index () + 1 : 0;
1837 :
1838 32640 : frame_region::key_t key (calling_frame, fun);
1839 50372 : if (frame_region *reg = m_frame_regions.get (key))
1840 : return reg;
1841 :
1842 14908 : frame_region *frame_reg
1843 : = new frame_region (alloc_symbol_id (), &m_stack_region, calling_frame,
1844 14908 : fun, index);
1845 14908 : m_frame_regions.put (key, frame_reg);
1846 14908 : return frame_reg;
1847 : }
1848 :
1849 : /* Return the region that describes dereferencing SVAL, creating it
1850 : if necessary. */
1851 :
1852 : const region *
1853 73121 : region_model_manager::get_symbolic_region (const svalue *sval)
1854 : {
1855 73121 : symbolic_region::key_t key (&m_root_region, sval);
1856 138027 : if (symbolic_region *reg = m_symbolic_regions.get (key))
1857 : return reg;
1858 :
1859 8215 : symbolic_region *symbolic_reg
1860 8215 : = new symbolic_region (alloc_symbol_id (), &m_root_region, sval);
1861 8215 : m_symbolic_regions.put (key, symbolic_reg);
1862 8215 : return symbolic_reg;
1863 : }
1864 :
1865 : /* Return the region that describes accessing STRING_CST, creating it
1866 : if necessary. */
1867 :
1868 : const string_region *
1869 15185 : region_model_manager::get_region_for_string (tree string_cst)
1870 : {
1871 15185 : gcc_assert (TREE_CODE (string_cst) == STRING_CST);
1872 :
1873 15185 : string_region **slot = m_string_map.get (string_cst);
1874 15185 : if (slot)
1875 11437 : return *slot;
1876 3748 : string_region *reg
1877 3748 : = new string_region (alloc_symbol_id (), &m_root_region, string_cst);
1878 3748 : m_string_map.put (string_cst, reg);
1879 3748 : return reg;
1880 : }
1881 :
1882 : /* Return the region that describes accessing BITS within PARENT as TYPE,
1883 : creating it if necessary. */
1884 :
1885 : const region *
1886 247 : region_model_manager::get_bit_range (const region *parent, tree type,
1887 : const bit_range &bits)
1888 : {
1889 247 : gcc_assert (parent);
1890 :
1891 247 : if (parent->symbolic_for_unknown_ptr_p ())
1892 0 : return get_unknown_symbolic_region (type);
1893 :
1894 247 : bit_range_region::key_t key (parent, type, bits);
1895 327 : if (bit_range_region *reg = m_bit_range_regions.get (key))
1896 : return reg;
1897 :
1898 167 : bit_range_region *bit_range_reg
1899 167 : = new bit_range_region (alloc_symbol_id (), parent, type, bits);
1900 167 : m_bit_range_regions.put (key, bit_range_reg);
1901 167 : return bit_range_reg;
1902 : }
1903 :
1904 : /* Return the region that describes accessing the IDX-th variadic argument
1905 : within PARENT_FRAME, creating it if necessary. */
1906 :
1907 : const var_arg_region *
1908 1199 : region_model_manager::get_var_arg_region (const frame_region *parent_frame,
1909 : unsigned idx)
1910 : {
1911 1199 : gcc_assert (parent_frame);
1912 :
1913 1199 : var_arg_region::key_t key (parent_frame, idx);
1914 1902 : if (var_arg_region *reg = m_var_arg_regions.get (key))
1915 : return reg;
1916 :
1917 496 : var_arg_region *var_arg_reg
1918 496 : = new var_arg_region (alloc_symbol_id (), parent_frame, idx);
1919 496 : m_var_arg_regions.put (key, var_arg_reg);
1920 496 : return var_arg_reg;
1921 : }
1922 :
1923 : /* If we see a tree code we don't know how to handle, rather than
1924 : ICE or generate bogus results, create a dummy region, and notify
1925 : CTXT so that it can mark the new state as being not properly
1926 : modelled. The exploded graph can then stop exploring that path,
1927 : since any diagnostics we might issue will have questionable
1928 : validity. */
1929 :
1930 : const region *
1931 56 : region_model_manager::
1932 : get_region_for_unexpected_tree_code (region_model_context *ctxt,
1933 : tree t,
1934 : const dump_location_t &loc)
1935 : {
1936 56 : tree type = TYPE_P (t) ? t : TREE_TYPE (t);
1937 56 : region *new_reg
1938 56 : = new unknown_region (alloc_symbol_id (), &m_root_region, type);
1939 56 : if (ctxt)
1940 52 : ctxt->on_unexpected_tree_code (t, loc);
1941 56 : return new_reg;
1942 : }
1943 :
1944 : /* Return a region describing a heap-allocated block of memory.
1945 : Reuse an existing heap_allocated_region is its id is not within
1946 : BASE_REGS_IN_USE. */
1947 :
1948 : const region *
1949 19271 : region_model_manager::
1950 : get_or_create_region_for_heap_alloc (const bitmap &base_regs_in_use)
1951 : {
1952 : /* Try to reuse an existing region, if it's unreferenced in the
1953 : client state. */
1954 73789 : for (auto existing_reg : m_managed_dynamic_regions)
1955 35928 : if (!bitmap_bit_p (base_regs_in_use, existing_reg->get_id ()))
1956 17669 : if (existing_reg->get_kind () == RK_HEAP_ALLOCATED)
1957 : return existing_reg;
1958 :
1959 : /* All existing ones (if any) are in use; create a new one. */
1960 1701 : region *reg
1961 1701 : = new heap_allocated_region (alloc_symbol_id (), &m_heap_region);
1962 1701 : m_managed_dynamic_regions.safe_push (reg);
1963 1701 : return reg;
1964 : }
1965 :
1966 : /* Return a new region describing a block of memory allocated within FRAME. */
1967 :
1968 : const region *
1969 956 : region_model_manager::create_region_for_alloca (const frame_region *frame)
1970 : {
1971 956 : gcc_assert (frame);
1972 956 : region *reg = new alloca_region (alloc_symbol_id (), frame);
1973 956 : m_managed_dynamic_regions.safe_push (reg);
1974 956 : return reg;
1975 : }
1976 :
1977 : /* Log OBJ to LOGGER. */
1978 :
1979 : template <typename T>
1980 : static void
1981 211 : log_managed_object (logger *logger, const T *obj)
1982 : {
1983 211 : logger->start_log_line ();
1984 211 : pretty_printer *pp = logger->get_printer ();
1985 211 : pp_string (pp, " ");
1986 211 : obj->dump_to_pp (pp, true);
1987 211 : logger->end_log_line ();
1988 211 : }
1989 :
1990 : /* Specialization for frame_region, which also logs the count of locals
1991 : managed by the frame_region. */
1992 :
1993 : template <>
1994 : void
1995 6 : log_managed_object (logger *logger, const frame_region *obj)
1996 : {
1997 6 : logger->start_log_line ();
1998 6 : pretty_printer *pp = logger->get_printer ();
1999 6 : pp_string (pp, " ");
2000 6 : obj->dump_to_pp (pp, true);
2001 6 : pp_printf (pp, " [with %i region(s) for locals]", obj->get_num_locals ());
2002 6 : logger->end_log_line ();
2003 6 : }
2004 :
2005 : /* Dump the number of objects that were managed by UNIQ_MAP to LOGGER.
2006 : If SHOW_OBJS is true, also dump the objects themselves. */
2007 :
2008 : template <typename K, typename T>
2009 : static void
2010 105 : log_uniq_map (logger *logger, bool show_objs, const char *title,
2011 : const hash_map<K, T*> &uniq_map)
2012 : {
2013 105 : logger->log (" # %s: %li", title, (long)uniq_map.elements ());
2014 105 : if (!show_objs)
2015 0 : return;
2016 105 : auto_vec<const T *> vec_objs (uniq_map.elements ());
2017 105 : for (typename hash_map<K, T*>::iterator iter = uniq_map.begin ();
2018 376 : iter != uniq_map.end (); ++iter)
2019 166 : vec_objs.quick_push ((*iter).second);
2020 :
2021 271 : vec_objs.qsort (T::cmp_ptr_ptr);
2022 :
2023 : unsigned i;
2024 : const T *obj;
2025 306 : FOR_EACH_VEC_ELT (vec_objs, i, obj)
2026 166 : log_managed_object<T> (logger, obj);
2027 105 : }
2028 :
2029 : /* Dump the number of objects that were managed by MAP to LOGGER.
2030 : If SHOW_OBJS is true, also dump the objects themselves. */
2031 :
2032 : template <typename T>
2033 : static void
2034 55 : log_uniq_map (logger *logger, bool show_objs, const char *title,
2035 : const consolidation_map<T> &map)
2036 : {
2037 55 : logger->log (" # %s: %li", title, (long)map.elements ());
2038 55 : if (!show_objs)
2039 0 : return;
2040 :
2041 55 : auto_vec<const T *> vec_objs (map.elements ());
2042 55 : for (typename consolidation_map<T>::iterator iter = map.begin ();
2043 158 : iter != map.end (); ++iter)
2044 48 : vec_objs.quick_push ((*iter).second);
2045 :
2046 103 : vec_objs.qsort (T::cmp_ptr_ptr);
2047 :
2048 : unsigned i;
2049 : const T *obj;
2050 125 : FOR_EACH_VEC_ELT (vec_objs, i, obj)
2051 48 : log_managed_object<T> (logger, obj);
2052 55 : }
2053 :
2054 : /* Dump the number of objects of each class that were managed by this
2055 : manager to LOGGER.
2056 : If SHOW_OBJS is true, also dump the objects themselves. */
2057 :
2058 : void
2059 5 : region_model_manager::log_stats (logger *logger, bool show_objs) const
2060 : {
2061 5 : LOG_SCOPE (logger);
2062 5 : logger->log ("call string consolidation");
2063 5 : m_empty_call_string.recursive_log (logger);
2064 5 : logger->log ("next symbol id: %i", m_next_symbol_id);
2065 5 : logger->log ("svalue consolidation");
2066 5 : log_uniq_map (logger, show_objs, "constant_svalue", m_constants_map);
2067 5 : log_uniq_map (logger, show_objs, "unknown_svalue", m_unknowns_map);
2068 5 : if (m_unknown_NULL)
2069 3 : log_managed_object (logger, m_unknown_NULL);
2070 5 : log_uniq_map (logger, show_objs, "poisoned_svalue", m_poisoned_values_map);
2071 5 : log_uniq_map (logger, show_objs, "setjmp_svalue", m_setjmp_values_map);
2072 5 : log_uniq_map (logger, show_objs, "initial_svalue", m_initial_values_map);
2073 5 : log_uniq_map (logger, show_objs, "region_svalue", m_pointer_values_map);
2074 5 : log_uniq_map (logger, show_objs, "unaryop_svalue", m_unaryop_values_map);
2075 5 : log_uniq_map (logger, show_objs, "binop_svalue", m_binop_values_map);
2076 5 : log_uniq_map (logger, show_objs, "sub_svalue", m_sub_values_map);
2077 5 : log_uniq_map (logger, show_objs, "repeated_svalue", m_repeated_values_map);
2078 5 : log_uniq_map (logger, show_objs, "bits_within_svalue",
2079 5 : m_bits_within_values_map);
2080 5 : log_uniq_map (logger, show_objs, "unmergeable_svalue",
2081 5 : m_unmergeable_values_map);
2082 5 : log_uniq_map (logger, show_objs, "widening_svalue", m_widening_values_map);
2083 5 : log_uniq_map (logger, show_objs, "compound_svalue", m_compound_values_map);
2084 5 : log_uniq_map (logger, show_objs, "conjured_svalue", m_conjured_values_map);
2085 5 : log_uniq_map (logger, show_objs, "asm_output_svalue",
2086 5 : m_asm_output_values_map);
2087 5 : log_uniq_map (logger, show_objs, "const_fn_result_svalue",
2088 5 : m_const_fn_result_values_map);
2089 :
2090 5 : logger->log ("max accepted svalue num_nodes: %i",
2091 5 : m_max_complexity.m_num_nodes);
2092 5 : logger->log ("max accepted svalue max_depth: %i",
2093 5 : m_max_complexity.m_max_depth);
2094 :
2095 5 : logger->log ("region consolidation");
2096 5 : log_uniq_map (logger, show_objs, "function_region", m_fndecls_map);
2097 5 : log_uniq_map (logger, show_objs, "label_region", m_labels_map);
2098 5 : log_uniq_map (logger, show_objs, "decl_region for globals", m_globals_map);
2099 5 : log_uniq_map (logger, show_objs, "field_region", m_field_regions);
2100 5 : log_uniq_map (logger, show_objs, "element_region", m_element_regions);
2101 5 : log_uniq_map (logger, show_objs, "offset_region", m_offset_regions);
2102 5 : log_uniq_map (logger, show_objs, "sized_region", m_sized_regions);
2103 5 : log_uniq_map (logger, show_objs, "cast_region", m_cast_regions);
2104 5 : log_uniq_map (logger, show_objs, "frame_region", m_frame_regions);
2105 5 : log_uniq_map (logger, show_objs, "symbolic_region", m_symbolic_regions);
2106 5 : log_uniq_map (logger, show_objs, "string_region", m_string_map);
2107 5 : log_uniq_map (logger, show_objs, "bit_range_region", m_bit_range_regions);
2108 5 : log_uniq_map (logger, show_objs, "var_arg_region", m_var_arg_regions);
2109 5 : logger->log (" # managed dynamic regions: %i",
2110 : m_managed_dynamic_regions.length ());
2111 5 : m_store_mgr.log_stats (logger, show_objs);
2112 5 : m_range_mgr->log_stats (logger, show_objs);
2113 5 : }
2114 :
2115 : /* Dump the number of objects of each class that were managed by this
2116 : manager to LOGGER.
2117 : If SHOW_OBJS is true, also dump the objects themselves.
2118 : This is here so it can use log_uniq_map. */
2119 :
2120 : void
2121 5 : store_manager::log_stats (logger *logger, bool show_objs) const
2122 : {
2123 5 : LOG_SCOPE (logger);
2124 5 : log_uniq_map (logger, show_objs, "concrete_binding",
2125 5 : m_concrete_binding_key_mgr);
2126 5 : log_uniq_map (logger, show_objs, "symbolic_binding",
2127 5 : m_symbolic_binding_key_mgr);
2128 5 : }
2129 :
2130 : /* Emit a warning showing DECL_REG->tracked_p () for use in DejaGnu tests
2131 : (using -fdump-analyzer-untracked). */
2132 :
2133 : static void
2134 162 : dump_untracked_region (const decl_region *decl_reg)
2135 : {
2136 162 : tree decl = decl_reg->get_decl ();
2137 162 : if (TREE_CODE (decl) != VAR_DECL)
2138 : return;
2139 : /* For now, don't emit the status of decls in the constant pool, to avoid
2140 : differences in DejaGnu test results between targets that use these vs
2141 : those that don't.
2142 : (Eventually these decls should probably be untracked and we should test
2143 : for that, but that's not stage 4 material). */
2144 56 : if (DECL_IN_CONSTANT_POOL (decl))
2145 : return;
2146 85 : warning_at (DECL_SOURCE_LOCATION (decl), 0,
2147 : "track %qD: %s",
2148 56 : decl, (decl_reg->tracked_p () ? "yes" : "no"));
2149 : }
2150 :
2151 : /* Implementation of -fdump-analyzer-untracked. */
2152 :
2153 : void
2154 23 : region_model_manager::dump_untracked_regions () const
2155 : {
2156 74 : for (auto iter : m_globals_map)
2157 : {
2158 51 : const decl_region *decl_reg = iter.second;
2159 51 : dump_untracked_region (decl_reg);
2160 : }
2161 75 : for (auto frame_iter : m_frame_regions)
2162 : {
2163 52 : const frame_region *frame_reg = frame_iter.second;
2164 52 : frame_reg->dump_untracked_regions ();
2165 : }
2166 23 : }
2167 :
2168 : void
2169 52 : frame_region::dump_untracked_regions () const
2170 : {
2171 163 : for (auto iter : m_locals)
2172 : {
2173 111 : const decl_region *decl_reg = iter.second;
2174 111 : dump_untracked_region (decl_reg);
2175 : }
2176 52 : }
2177 :
2178 : } // namespace ana
2179 :
2180 : #endif /* #if ENABLE_ANALYZER */
|