Line data Source code
1 : /* Classes for modeling the state of memory.
2 : Copyright (C) 2019-2026 Free Software Foundation, Inc.
3 : Contributed by David Malcolm <dmalcolm@redhat.com>.
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify it
8 : under the terms of the GNU General Public License as published by
9 : the Free Software Foundation; either version 3, or (at your option)
10 : any later version.
11 :
12 : GCC is distributed in the hope that it will be useful, but
13 : WITHOUT ANY WARRANTY; without even the implied warranty of
14 : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 : General Public License for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 : #define INCLUDE_ALGORITHM
22 : #include "analyzer/common.h"
23 :
24 : #include "ordered-hash-map.h"
25 : #include "options.h"
26 : #include "cgraph.h"
27 : #include "cfg.h"
28 : #include "sbitmap.h"
29 : #include "diagnostics/event-id.h"
30 : #include "stor-layout.h"
31 : #include "stringpool.h"
32 : #include "attribs.h"
33 : #include "tree-object-size.h"
34 : #include "gimple-ssa.h"
35 : #include "tree-phinodes.h"
36 : #include "tree-ssa-operands.h"
37 : #include "ssa-iterators.h"
38 : #include "target.h"
39 : #include "calls.h"
40 : #include "is-a.h"
41 : #include "gcc-rich-location.h"
42 : #include "gcc-urlifier.h"
43 : #include "diagnostics/sarif-sink.h"
44 : #include "tree-pretty-print.h"
45 : #include "fold-const.h"
46 : #include "selftest-tree.h"
47 : #include "context.h"
48 : #include "channels.h"
49 : #include "value-relation.h"
50 : #include "range-op.h"
51 :
52 : #include "text-art/tree-widget.h"
53 :
54 : #include "analyzer/analyzer-logging.h"
55 : #include "analyzer/supergraph.h"
56 : #include "analyzer/call-string.h"
57 : #include "analyzer/program-point.h"
58 : #include "analyzer/store.h"
59 : #include "analyzer/region-model.h"
60 : #include "analyzer/constraint-manager.h"
61 : #include "analyzer/sm.h"
62 : #include "analyzer/pending-diagnostic.h"
63 : #include "analyzer/region-model-reachability.h"
64 : #include "analyzer/analyzer-selftests.h"
65 : #include "analyzer/program-state.h"
66 : #include "analyzer/call-summary.h"
67 : #include "analyzer/checker-event.h"
68 : #include "analyzer/checker-path.h"
69 : #include "analyzer/feasible-graph.h"
70 : #include "analyzer/record-layout.h"
71 : #include "analyzer/function-set.h"
72 : #include "analyzer/state-transition.h"
73 :
74 : #if ENABLE_ANALYZER
75 :
76 : namespace ana {
77 :
78 : /* Dump T to PP in language-independent form, for debugging/logging/dumping
79 : purposes. */
80 :
81 : void
82 41489 : dump_tree (pretty_printer *pp, tree t)
83 : {
84 41489 : dump_generic_node (pp, t, 0, TDF_SLIM, 0);
85 41489 : }
86 :
87 : /* Dump T to PP in language-independent form in quotes, for
88 : debugging/logging/dumping purposes. */
89 :
90 : void
91 1366 : dump_quoted_tree (pretty_printer *pp, tree t)
92 : {
93 1366 : pp_begin_quote (pp, pp_show_color (pp));
94 1366 : dump_tree (pp, t);
95 1366 : pp_end_quote (pp, pp_show_color (pp));
96 1366 : }
97 :
98 : /* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
99 : calls within other pp_printf calls.
100 :
101 : default_tree_printer handles 'T' and some other codes by calling
102 : dump_generic_node (pp, t, 0, TDF_SLIM, 0);
103 : dump_generic_node calls pp_printf in various places, leading to
104 : garbled output.
105 :
106 : Ideally pp_printf could be made to be reentrant, but in the meantime
107 : this function provides a workaround. */
108 :
109 : void
110 4448 : print_quoted_type (pretty_printer *pp, tree t)
111 : {
112 4448 : if (!t)
113 : return;
114 4355 : pp_begin_quote (pp, pp_show_color (pp));
115 4355 : dump_generic_node (pp, t, 0, TDF_SLIM, 0);
116 4355 : pp_end_quote (pp, pp_show_color (pp));
117 : }
118 :
119 : /* Print EXPR to PP, without quotes.
120 : For use within svalue::maybe_print_for_user
121 : and region::maybe_print_for_user. */
122 :
123 : void
124 38 : print_expr_for_user (pretty_printer *pp, tree expr)
125 : {
126 : /* Workaround for C++'s lang_hooks.decl_printable_name,
127 : which unhelpfully (for us) prefixes the decl with its
128 : type. */
129 38 : if (DECL_P (expr))
130 38 : dump_generic_node (pp, expr, 0, TDF_SLIM, 0);
131 : else
132 0 : pp_printf (pp, "%E", expr);
133 38 : }
134 :
135 : /* class region_to_value_map. */
136 :
137 : /* Assignment operator for region_to_value_map. */
138 :
139 : region_to_value_map &
140 97771 : region_to_value_map::operator= (const region_to_value_map &other)
141 : {
142 97771 : m_hash_map.empty ();
143 126724 : for (auto iter : other.m_hash_map)
144 : {
145 28953 : const region *reg = iter.first;
146 28953 : const svalue *sval = iter.second;
147 28953 : m_hash_map.put (reg, sval);
148 : }
149 97771 : return *this;
150 : }
151 :
152 : /* Equality operator for region_to_value_map. */
153 :
154 : bool
155 447526 : region_to_value_map::operator== (const region_to_value_map &other) const
156 : {
157 447526 : if (m_hash_map.elements () != other.m_hash_map.elements ())
158 : return false;
159 :
160 1156210 : for (auto iter : *this)
161 : {
162 133177 : const region *reg = iter.first;
163 133177 : const svalue *sval = iter.second;
164 133177 : const svalue * const *other_slot = other.get (reg);
165 133177 : if (other_slot == nullptr)
166 58 : return false;
167 133147 : if (sval != *other_slot)
168 : return false;
169 : }
170 :
171 444928 : return true;
172 : }
173 :
174 : /* Dump this object to PP. */
175 :
176 : void
177 416 : region_to_value_map::dump_to_pp (pretty_printer *pp, bool simple,
178 : bool multiline) const
179 : {
180 416 : auto_vec<const region *> regs;
181 1248 : for (iterator iter = begin (); iter != end (); ++iter)
182 416 : regs.safe_push ((*iter).first);
183 416 : regs.qsort (region::cmp_ptr_ptr);
184 416 : if (multiline)
185 416 : pp_newline (pp);
186 : else
187 0 : pp_string (pp, " {");
188 : unsigned i;
189 : const region *reg;
190 832 : FOR_EACH_VEC_ELT (regs, i, reg)
191 : {
192 416 : if (multiline)
193 416 : pp_string (pp, " ");
194 0 : else if (i > 0)
195 0 : pp_string (pp, ", ");
196 416 : reg->dump_to_pp (pp, simple);
197 416 : pp_string (pp, ": ");
198 416 : const svalue *sval = *get (reg);
199 416 : sval->dump_to_pp (pp, true);
200 416 : if (multiline)
201 416 : pp_newline (pp);
202 : }
203 416 : if (!multiline)
204 0 : pp_string (pp, "}");
205 416 : }
206 :
207 : /* Dump this object to stderr. */
208 :
209 : DEBUG_FUNCTION void
210 0 : region_to_value_map::dump (bool simple) const
211 : {
212 0 : tree_dump_pretty_printer pp (stderr);
213 0 : dump_to_pp (&pp, simple, true);
214 0 : pp_newline (&pp);
215 0 : }
216 :
217 : /* Generate a JSON value for this region_to_value_map.
218 : This is intended for debugging the analyzer rather than
219 : serialization. */
220 :
221 : std::unique_ptr<json::object>
222 4 : region_to_value_map::to_json () const
223 : {
224 4 : auto map_obj = std::make_unique<json::object> ();
225 :
226 4 : auto_vec<const region *> regs;
227 4 : for (iterator iter = begin (); iter != end (); ++iter)
228 0 : regs.safe_push ((*iter).first);
229 4 : regs.qsort (region::cmp_ptr_ptr);
230 :
231 : unsigned i;
232 : const region *reg;
233 4 : FOR_EACH_VEC_ELT (regs, i, reg)
234 : {
235 0 : label_text reg_desc = reg->get_desc ();
236 0 : const svalue *sval = *get (reg);
237 0 : map_obj->set (reg_desc.get (), sval->to_json ());
238 0 : }
239 :
240 4 : return map_obj;
241 4 : }
242 :
243 : std::unique_ptr<text_art::tree_widget>
244 4 : region_to_value_map::
245 : make_dump_widget (const text_art::dump_widget_info &dwi) const
246 : {
247 4 : if (is_empty ())
248 4 : return nullptr;
249 :
250 0 : std::unique_ptr<text_art::tree_widget> w
251 0 : (text_art::tree_widget::make (dwi, "Dynamic Extents"));
252 :
253 0 : auto_vec<const region *> regs;
254 0 : for (iterator iter = begin (); iter != end (); ++iter)
255 0 : regs.safe_push ((*iter).first);
256 0 : regs.qsort (region::cmp_ptr_ptr);
257 :
258 : unsigned i;
259 : const region *reg;
260 0 : FOR_EACH_VEC_ELT (regs, i, reg)
261 : {
262 0 : pretty_printer the_pp;
263 0 : pretty_printer * const pp = &the_pp;
264 0 : pp_format_decoder (pp) = default_tree_printer;
265 0 : const bool simple = true;
266 :
267 0 : reg->dump_to_pp (pp, simple);
268 0 : pp_string (pp, ": ");
269 0 : const svalue *sval = *get (reg);
270 0 : sval->dump_to_pp (pp, true);
271 0 : w->add_child (text_art::tree_widget::make (dwi, pp));
272 0 : }
273 0 : return w;
274 0 : }
275 :
276 : /* Attempt to merge THIS with OTHER, writing the result
277 : to OUT.
278 :
279 : For now, write (region, value) mappings that are in common between THIS
280 : and OTHER to OUT, effectively taking the intersection.
281 :
282 : Reject merger of different values. */
283 :
284 : bool
285 42267 : region_to_value_map::can_merge_with_p (const region_to_value_map &other,
286 : region_to_value_map *out) const
287 : {
288 57321 : for (auto iter : *this)
289 : {
290 9451 : const region *iter_reg = iter.first;
291 9451 : const svalue *iter_sval = iter.second;
292 9451 : const svalue * const * other_slot = other.get (iter_reg);
293 9451 : if (other_slot)
294 : {
295 9167 : if (iter_sval == *other_slot)
296 7243 : out->put (iter_reg, iter_sval);
297 : else
298 1924 : return false;
299 : }
300 : }
301 40343 : return true;
302 : }
303 :
304 : /* Purge any state involving SVAL. */
305 :
306 : void
307 30046 : region_to_value_map::purge_state_involving (const svalue *sval)
308 : {
309 30046 : auto_vec<const region *> to_purge;
310 76532 : for (auto iter : *this)
311 : {
312 23243 : const region *iter_reg = iter.first;
313 23243 : const svalue *iter_sval = iter.second;
314 23243 : if (iter_reg->involves_p (sval) || iter_sval->involves_p (sval))
315 26 : to_purge.safe_push (iter_reg);
316 : }
317 30124 : for (auto iter : to_purge)
318 26 : m_hash_map.remove (iter);
319 30046 : }
320 :
321 : // struct exception_node
322 :
323 : bool
324 10623 : exception_node::operator== (const exception_node &other) const
325 : {
326 10623 : return (m_exception_sval == other.m_exception_sval
327 10623 : && m_typeinfo_sval == other.m_typeinfo_sval
328 21246 : && m_destructor_sval == other.m_destructor_sval);
329 : }
330 :
331 : void
332 6 : exception_node::dump_to_pp (pretty_printer *pp,
333 : bool simple) const
334 : {
335 6 : pp_printf (pp, "{exception: ");
336 6 : m_exception_sval->dump_to_pp (pp, simple);
337 6 : pp_string (pp, ", typeinfo: ");
338 6 : m_typeinfo_sval->dump_to_pp (pp, simple);
339 6 : pp_string (pp, ", destructor: ");
340 6 : m_destructor_sval->dump_to_pp (pp, simple);
341 6 : pp_string (pp, "}");
342 6 : }
343 :
344 : void
345 0 : exception_node::dump (FILE *fp, bool simple) const
346 : {
347 0 : tree_dump_pretty_printer pp (fp);
348 0 : dump_to_pp (&pp, simple);
349 0 : pp_newline (&pp);
350 0 : }
351 :
352 : /* Dump a multiline representation of this model to stderr. */
353 :
354 : DEBUG_FUNCTION void
355 0 : exception_node::dump (bool simple) const
356 : {
357 0 : dump (stderr, simple);
358 0 : }
359 :
360 : DEBUG_FUNCTION void
361 0 : exception_node::dump () const
362 : {
363 0 : text_art::dump (*this);
364 0 : }
365 :
366 : std::unique_ptr<json::object>
367 0 : exception_node::to_json () const
368 : {
369 0 : auto obj = std::make_unique<json::object> ();
370 0 : obj->set ("exception", m_exception_sval->to_json ());
371 0 : obj->set ("typeinfo", m_typeinfo_sval->to_json ());
372 0 : obj->set ("destructor", m_destructor_sval->to_json ());
373 0 : return obj;
374 : }
375 :
376 : std::unique_ptr<text_art::tree_widget>
377 0 : exception_node::make_dump_widget (const text_art::dump_widget_info &dwi) const
378 : {
379 0 : using text_art::tree_widget;
380 0 : std::unique_ptr<tree_widget> w
381 0 : (tree_widget::from_fmt (dwi, nullptr, "Exception Node"));
382 :
383 0 : w->add_child (m_exception_sval->make_dump_widget (dwi, "exception"));
384 0 : w->add_child (m_typeinfo_sval->make_dump_widget (dwi, "typeinfo"));
385 0 : w->add_child (m_destructor_sval->make_dump_widget (dwi, "destructor"));
386 :
387 0 : return w;
388 : }
389 :
390 : tree
391 553 : exception_node::maybe_get_type () const
392 : {
393 553 : return m_typeinfo_sval->maybe_get_type_from_typeinfo ();
394 : }
395 :
396 : void
397 64 : exception_node::add_to_reachable_regions (reachable_regions ®s) const
398 : {
399 64 : regs.handle_sval (m_exception_sval);
400 64 : regs.handle_sval (m_typeinfo_sval);
401 64 : regs.handle_sval (m_destructor_sval);
402 64 : }
403 :
404 : /* class region_model. */
405 :
406 : /* Ctor for region_model: construct an "empty" model. */
407 :
408 415012 : region_model::region_model (region_model_manager *mgr)
409 415012 : : m_mgr (mgr), m_store (), m_current_frame (nullptr),
410 415012 : m_thrown_exceptions_stack (),
411 415012 : m_caught_exceptions_stack (),
412 415012 : m_dynamic_extents ()
413 : {
414 415012 : m_constraints = new constraint_manager (mgr);
415 415012 : }
416 :
417 : /* region_model's copy ctor. */
418 :
419 3481141 : region_model::region_model (const region_model &other)
420 3481141 : : m_mgr (other.m_mgr), m_store (other.m_store),
421 3481141 : m_constraints (new constraint_manager (*other.m_constraints)),
422 3481141 : m_current_frame (other.m_current_frame),
423 3481141 : m_thrown_exceptions_stack (other.m_thrown_exceptions_stack),
424 3481141 : m_caught_exceptions_stack (other.m_caught_exceptions_stack),
425 3481141 : m_dynamic_extents (other.m_dynamic_extents)
426 : {
427 3481141 : }
428 :
429 : /* region_model's dtor. */
430 :
431 3896153 : region_model::~region_model ()
432 : {
433 3896153 : delete m_constraints;
434 3896153 : }
435 :
436 : /* region_model's assignment operator. */
437 :
438 : region_model &
439 97771 : region_model::operator= (const region_model &other)
440 : {
441 : /* m_mgr is const. */
442 97771 : gcc_assert (m_mgr == other.m_mgr);
443 :
444 97771 : m_store = other.m_store;
445 :
446 97771 : delete m_constraints;
447 97771 : m_constraints = new constraint_manager (*other.m_constraints);
448 :
449 97771 : m_current_frame = other.m_current_frame;
450 :
451 97771 : m_thrown_exceptions_stack = other.m_thrown_exceptions_stack;
452 97771 : m_caught_exceptions_stack = other.m_caught_exceptions_stack;
453 :
454 97771 : m_dynamic_extents = other.m_dynamic_extents;
455 :
456 97771 : return *this;
457 : }
458 :
459 : /* Equality operator for region_model.
460 :
461 : Amongst other things this directly compares the stores and the constraint
462 : managers, so for this to be meaningful both this and OTHER should
463 : have been canonicalized. */
464 :
465 : bool
466 505285 : region_model::operator== (const region_model &other) const
467 : {
468 : /* We can only compare instances that use the same manager. */
469 505285 : gcc_assert (m_mgr == other.m_mgr);
470 :
471 505285 : if (m_store != other.m_store)
472 : return false;
473 :
474 407660 : if (*m_constraints != *other.m_constraints)
475 : return false;
476 :
477 402689 : if (m_current_frame != other.m_current_frame)
478 : return false;
479 :
480 402681 : if (m_thrown_exceptions_stack != other.m_thrown_exceptions_stack)
481 : return false;
482 402681 : if (m_caught_exceptions_stack != other.m_caught_exceptions_stack)
483 : return false;
484 :
485 402681 : if (m_dynamic_extents != other.m_dynamic_extents)
486 : return false;
487 :
488 402421 : gcc_checking_assert (hash () == other.hash ());
489 :
490 : return true;
491 : }
492 :
493 : /* Generate a hash value for this region_model. */
494 :
495 : hashval_t
496 1249820 : region_model::hash () const
497 : {
498 1249820 : hashval_t result = m_store.hash ();
499 1249820 : result ^= m_constraints->hash ();
500 1249820 : return result;
501 : }
502 :
503 : /* Dump a representation of this model to PP, showing the
504 : stack, the store, and any constraints.
505 : Use SIMPLE to control how svalues and regions are printed. */
506 :
507 : void
508 2126 : region_model::dump_to_pp (pretty_printer *pp, bool simple,
509 : bool multiline) const
510 : {
511 : /* Dump frame stack. */
512 2126 : pp_printf (pp, "stack depth: %i", get_stack_depth ());
513 2126 : if (multiline)
514 545 : pp_newline (pp);
515 : else
516 1581 : pp_string (pp, " {");
517 4228 : for (const frame_region *iter_frame = m_current_frame; iter_frame;
518 2102 : iter_frame = iter_frame->get_calling_frame ())
519 : {
520 2102 : if (multiline)
521 549 : pp_string (pp, " ");
522 1553 : else if (iter_frame != m_current_frame)
523 0 : pp_string (pp, ", ");
524 2102 : pp_printf (pp, "frame (index %i): ", iter_frame->get_index ());
525 2102 : iter_frame->dump_to_pp (pp, simple);
526 2102 : if (multiline)
527 549 : pp_newline (pp);
528 : }
529 2126 : if (!multiline)
530 1581 : pp_string (pp, "}");
531 :
532 : /* Dump exception stacks. */
533 2126 : if (m_thrown_exceptions_stack.size () > 0)
534 : {
535 6 : pp_printf (pp, "thrown exceptions: %i", (int)m_thrown_exceptions_stack.size ());
536 6 : if (multiline)
537 6 : pp_newline (pp);
538 : else
539 0 : pp_string (pp, " {");
540 12 : for (size_t idx = 0; idx < m_thrown_exceptions_stack.size (); ++idx)
541 : {
542 6 : if (multiline)
543 6 : pp_string (pp, " ");
544 0 : else if (idx > 0)
545 0 : pp_string (pp, ", ");
546 6 : pp_printf (pp, "exception (index %i): ", (int)idx);
547 6 : m_thrown_exceptions_stack[idx].dump_to_pp (pp, simple);
548 6 : if (multiline)
549 6 : pp_newline (pp);
550 : }
551 6 : if (!multiline)
552 0 : pp_string (pp, "}");
553 : }
554 2126 : if (m_caught_exceptions_stack.size () > 0)
555 : {
556 0 : pp_printf (pp, "caught exceptions: %i", (int)m_caught_exceptions_stack.size ());
557 0 : if (multiline)
558 0 : pp_newline (pp);
559 : else
560 0 : pp_string (pp, " {");
561 0 : for (size_t idx = 0; idx < m_caught_exceptions_stack.size (); ++idx)
562 : {
563 0 : if (multiline)
564 0 : pp_string (pp, " ");
565 0 : else if (idx > 0)
566 0 : pp_string (pp, ", ");
567 0 : pp_printf (pp, "exception (index %i): ", (int)idx);
568 0 : m_caught_exceptions_stack[idx].dump_to_pp (pp, simple);
569 0 : if (multiline)
570 0 : pp_newline (pp);
571 : }
572 0 : if (!multiline)
573 0 : pp_string (pp, "}");
574 : }
575 :
576 : /* Dump store. */
577 2126 : if (!multiline)
578 1581 : pp_string (pp, ", {");
579 2126 : m_store.dump_to_pp (pp, simple, multiline,
580 2126 : m_mgr->get_store_manager ());
581 2126 : if (!multiline)
582 1581 : pp_string (pp, "}");
583 :
584 : /* Dump constraints. */
585 2126 : pp_string (pp, "constraint_manager:");
586 2126 : if (multiline)
587 545 : pp_newline (pp);
588 : else
589 1581 : pp_string (pp, " {");
590 2126 : m_constraints->dump_to_pp (pp, multiline);
591 2126 : if (!multiline)
592 1581 : pp_string (pp, "}");
593 :
594 : /* Dump sizes of dynamic regions, if any are known. */
595 2126 : if (!m_dynamic_extents.is_empty ())
596 : {
597 416 : pp_string (pp, "dynamic_extents:");
598 416 : m_dynamic_extents.dump_to_pp (pp, simple, multiline);
599 : }
600 2126 : }
601 :
602 : /* Dump a representation of this model to FILE. */
603 :
604 : void
605 0 : region_model::dump (FILE *fp, bool simple, bool multiline) const
606 : {
607 0 : tree_dump_pretty_printer pp (fp);
608 0 : dump_to_pp (&pp, simple, multiline);
609 0 : pp_newline (&pp);
610 0 : }
611 :
612 : /* Dump a multiline representation of this model to stderr. */
613 :
614 : DEBUG_FUNCTION void
615 0 : region_model::dump (bool simple) const
616 : {
617 0 : dump (stderr, simple, true);
618 0 : }
619 :
620 : /* Dump a tree-like representation of this state to stderr. */
621 :
622 : DEBUG_FUNCTION void
623 0 : region_model::dump () const
624 : {
625 0 : text_art::dump (*this);
626 0 : }
627 :
628 : /* Dump a multiline representation of this model to stderr. */
629 :
630 : DEBUG_FUNCTION void
631 0 : region_model::debug () const
632 : {
633 0 : dump (true);
634 0 : }
635 :
636 : /* Generate a JSON value for this region_model.
637 : This is intended for debugging the analyzer rather than
638 : serialization. */
639 :
640 : std::unique_ptr<json::object>
641 4 : region_model::to_json () const
642 : {
643 4 : auto model_obj = std::make_unique<json::object> ();
644 4 : model_obj->set ("store", m_store.to_json ());
645 4 : model_obj->set ("constraints", m_constraints->to_json ());
646 4 : if (m_current_frame)
647 4 : model_obj->set ("current_frame", m_current_frame->to_json ());
648 :
649 4 : auto thrown_exceptions_arr = std::make_unique<json::array> ();
650 4 : for (auto &node : m_thrown_exceptions_stack)
651 0 : thrown_exceptions_arr->append (node.to_json ());
652 4 : model_obj->set ("thrown_exception_stack", std::move (thrown_exceptions_arr));
653 :
654 4 : auto caught_exceptions_arr = std::make_unique<json::array> ();
655 4 : for (auto &node : m_caught_exceptions_stack)
656 0 : caught_exceptions_arr->append (node.to_json ());
657 4 : model_obj->set ("caught_exception_stack", std::move (caught_exceptions_arr));
658 :
659 4 : model_obj->set ("dynamic_extents", m_dynamic_extents.to_json ());
660 8 : return model_obj;
661 4 : }
662 :
663 : std::unique_ptr<text_art::tree_widget>
664 4 : region_model::make_dump_widget (const text_art::dump_widget_info &dwi) const
665 : {
666 4 : using text_art::tree_widget;
667 4 : std::unique_ptr<tree_widget> model_widget
668 4 : (tree_widget::from_fmt (dwi, nullptr, "Region Model"));
669 :
670 4 : if (m_current_frame)
671 : {
672 0 : pretty_printer the_pp;
673 0 : pretty_printer * const pp = &the_pp;
674 0 : pp_format_decoder (pp) = default_tree_printer;
675 0 : pp_show_color (pp) = true;
676 0 : const bool simple = true;
677 :
678 0 : pp_string (pp, "Current Frame: ");
679 0 : m_current_frame->dump_to_pp (pp, simple);
680 0 : model_widget->add_child (tree_widget::make (dwi, pp));
681 0 : }
682 :
683 4 : if (m_thrown_exceptions_stack.size () > 0)
684 : {
685 0 : auto thrown_exceptions_widget
686 0 : = tree_widget::make (dwi, "Thrown Exceptions");
687 0 : for (auto &thrown_exception : m_thrown_exceptions_stack)
688 0 : thrown_exceptions_widget->add_child
689 0 : (thrown_exception.make_dump_widget (dwi));
690 0 : model_widget->add_child (std::move (thrown_exceptions_widget));
691 0 : }
692 4 : if (m_caught_exceptions_stack.size () > 0)
693 : {
694 0 : auto caught_exceptions_widget
695 0 : = tree_widget::make (dwi, "Caught Exceptions");
696 0 : for (auto &caught_exception : m_caught_exceptions_stack)
697 0 : caught_exceptions_widget->add_child
698 0 : (caught_exception.make_dump_widget (dwi));
699 0 : model_widget->add_child (std::move (caught_exceptions_widget));
700 0 : }
701 :
702 4 : model_widget->add_child
703 8 : (m_store.make_dump_widget (dwi,
704 4 : m_mgr->get_store_manager ()));
705 4 : model_widget->add_child (m_constraints->make_dump_widget (dwi));
706 4 : model_widget->add_child (m_dynamic_extents.make_dump_widget (dwi));
707 4 : return model_widget;
708 : }
709 :
710 : /* Assert that this object is valid. */
711 :
712 : void
713 1729302 : region_model::validate () const
714 : {
715 1729302 : m_store.validate ();
716 1729302 : }
717 :
718 : /* Canonicalize the store and constraints, to maximize the chance of
719 : equality between region_model instances. */
720 :
721 : void
722 829244 : region_model::canonicalize ()
723 : {
724 829244 : m_store.canonicalize (m_mgr->get_store_manager ());
725 829244 : m_constraints->canonicalize ();
726 829244 : }
727 :
728 : /* Return true if this region_model is in canonical form. */
729 :
730 : bool
731 392414 : region_model::canonicalized_p () const
732 : {
733 392414 : region_model copy (*this);
734 392414 : copy.canonicalize ();
735 392414 : return *this == copy;
736 392414 : }
737 :
738 : /* See the comment for store::loop_replay_fixup. */
739 :
740 : void
741 4661 : region_model::loop_replay_fixup (const region_model *dst_state)
742 : {
743 4661 : m_store.loop_replay_fixup (dst_state->get_store (), m_mgr);
744 4661 : }
745 :
746 : /* A subclass of pending_diagnostic for complaining about pointer
747 : subtractions involving unrelated buffers. */
748 :
749 : class undefined_ptrdiff_diagnostic
750 : : public pending_diagnostic_subclass<undefined_ptrdiff_diagnostic>
751 : {
752 : public:
753 : /* Region_creation_event subclass to give a custom wording when
754 : talking about creation of buffers for LHS and RHS of the
755 : subtraction. */
756 : class ptrdiff_region_creation_event : public region_creation_event
757 : {
758 : public:
759 56 : ptrdiff_region_creation_event (const event_loc_info &loc_info,
760 : bool is_lhs)
761 56 : : region_creation_event (loc_info),
762 56 : m_is_lhs (is_lhs)
763 : {
764 : }
765 :
766 112 : void print_desc (pretty_printer &pp) const final override
767 : {
768 112 : if (m_is_lhs)
769 56 : pp_string (&pp,
770 : "underlying object for left-hand side"
771 : " of subtraction created here");
772 : else
773 56 : pp_string (&pp,
774 : "underlying object for right-hand side"
775 : " of subtraction created here");
776 112 : }
777 :
778 : private:
779 : bool m_is_lhs;
780 : };
781 :
782 64 : undefined_ptrdiff_diagnostic (const gassign *assign,
783 : const svalue *sval_a,
784 : const svalue *sval_b,
785 : const region *base_reg_a,
786 : const region *base_reg_b)
787 64 : : m_assign (assign),
788 64 : m_sval_a (sval_a),
789 64 : m_sval_b (sval_b),
790 64 : m_base_reg_a (base_reg_a),
791 64 : m_base_reg_b (base_reg_b)
792 : {
793 64 : gcc_assert (m_base_reg_a != m_base_reg_b);
794 : }
795 :
796 380 : const char *get_kind () const final override
797 : {
798 380 : return "undefined_ptrdiff_diagnostic";
799 : }
800 :
801 56 : bool operator== (const undefined_ptrdiff_diagnostic &other) const
802 : {
803 56 : return (m_assign == other.m_assign
804 56 : && m_sval_a == other.m_sval_a
805 56 : && m_sval_b == other.m_sval_b
806 56 : && m_base_reg_a == other.m_base_reg_a
807 112 : && m_base_reg_b == other.m_base_reg_b);
808 : }
809 :
810 84 : int get_controlling_option () const final override
811 : {
812 84 : return OPT_Wanalyzer_undefined_behavior_ptrdiff;
813 : }
814 :
815 28 : bool emit (diagnostic_emission_context &ctxt) final override
816 : {
817 : /* CWE-469: Use of Pointer Subtraction to Determine Size. */
818 28 : ctxt.add_cwe (469);
819 28 : return ctxt.warn ("undefined behavior when subtracting pointers");
820 : }
821 :
822 56 : void add_region_creation_events (const region *reg,
823 : tree /*capacity*/,
824 : const event_loc_info &loc_info,
825 : checker_path &emission_path) final override
826 : {
827 56 : if (reg == m_base_reg_a)
828 28 : emission_path.add_event
829 28 : (std::make_unique<ptrdiff_region_creation_event> (loc_info, true));
830 28 : else if (reg == m_base_reg_b)
831 28 : emission_path.add_event
832 28 : (std::make_unique<ptrdiff_region_creation_event> (loc_info, false));
833 56 : }
834 :
835 : bool
836 56 : describe_final_event (pretty_printer &pp,
837 : const evdesc::final_event &) final override
838 : {
839 56 : pp_string (&pp,
840 : "subtraction of pointers has undefined behavior if"
841 : " they do not point into the same array object");
842 56 : return true;
843 : }
844 :
845 56 : void mark_interesting_stuff (interesting_t *interesting) final override
846 : {
847 56 : interesting->add_region_creation (m_base_reg_a);
848 56 : interesting->add_region_creation (m_base_reg_b);
849 56 : }
850 :
851 : private:
852 : const gassign *m_assign;
853 : const svalue *m_sval_a;
854 : const svalue *m_sval_b;
855 : const region *m_base_reg_a;
856 : const region *m_base_reg_b;
857 : };
858 :
859 : /* Locate the parameter with the given index within FNDECL.
860 : ARGNUM is zero based, -1 indicates the `this' argument of a method.
861 : Return the location of the FNDECL itself if there are problems. */
862 :
863 : bool
864 23 : callsite_expr::maybe_get_param_location (tree fndecl,
865 : location_t *out_loc) const
866 : {
867 23 : gcc_assert (fndecl);
868 :
869 23 : if (DECL_ARTIFICIAL (fndecl))
870 : return false;
871 :
872 23 : tree param = get_param_tree (fndecl);
873 23 : if (!param)
874 : return false;
875 :
876 22 : *out_loc = DECL_SOURCE_LOCATION (param);
877 22 : return true;
878 : }
879 :
880 : /* If this callsite_expr refers to a parameter, get the PARM_DECL from
881 : FNDECL.
882 : Return NULL_TREE on any problems. */
883 :
884 : tree
885 69 : callsite_expr::get_param_tree (tree fndecl) const
886 : {
887 69 : if (!param_p ())
888 : return NULL_TREE;
889 :
890 66 : int i;
891 66 : tree param;
892 :
893 : /* Locate param by index within DECL_ARGUMENTS (fndecl). */
894 66 : for (i = 1, param = DECL_ARGUMENTS (fndecl);
895 126 : i < param_num () && param;
896 60 : i++, param = TREE_CHAIN (param))
897 : ;
898 :
899 : return param;
900 : }
901 :
902 : class div_by_zero_diagnostic
903 : : public pending_diagnostic_subclass<div_by_zero_diagnostic>
904 : {
905 : public:
906 74 : div_by_zero_diagnostic (const gassign *assign,
907 : const region *divisor_reg)
908 74 : : m_assign (assign),
909 74 : m_divisor_reg (divisor_reg)
910 : {}
911 :
912 409 : const char *get_kind () const final override
913 : {
914 409 : return "div_by_zero_diagnostic";
915 : }
916 :
917 74 : bool operator== (const div_by_zero_diagnostic &other) const
918 : {
919 74 : return m_assign == other.m_assign;
920 : }
921 :
922 107 : int get_controlling_option () const final override
923 : {
924 107 : return OPT_Wanalyzer_div_by_zero;
925 : }
926 :
927 33 : bool emit (diagnostic_emission_context &ctxt) final override
928 : {
929 33 : return ctxt.warn ("division by zero");
930 : }
931 :
932 : bool
933 66 : describe_final_event (pretty_printer &pp,
934 : const evdesc::final_event &) final override
935 : {
936 66 : pp_printf (&pp, "division by zero");
937 66 : return true;
938 : }
939 :
940 : void
941 66 : mark_interesting_stuff (interesting_t *interest)
942 : {
943 66 : interest->add_read_region (m_divisor_reg, "divisor zero value");
944 66 : }
945 :
946 : void
947 57 : add_function_entry_event (const exploded_edge &eedge,
948 : checker_path *emission_path,
949 : const state_transition_at_call *state_trans)
950 : {
951 0 : class custom_function_entry_event : public function_entry_event
952 : {
953 : public:
954 57 : custom_function_entry_event (const event_loc_info &loc_info,
955 : const program_state &state,
956 : const state_transition_at_call *state_trans)
957 : : function_entry_event (loc_info,
958 : state,
959 57 : state_trans)
960 : {
961 : }
962 :
963 72 : void print_desc (pretty_printer &pp) const override
964 : {
965 72 : if (auto state_trans = get_state_transition_at_call ())
966 : {
967 4 : auto expr = state_trans->get_callsite_expr ();
968 4 : if (tree parm = expr.get_param_tree (m_effective_fndecl))
969 : {
970 4 : auto src_event_id = state_trans->get_src_event_id ();
971 4 : if (src_event_id.known_p ())
972 4 : pp_printf (&pp, "entry to %qE with zero from %@ for %qE",
973 4 : m_effective_fndecl,
974 : &src_event_id,
975 : parm);
976 : else
977 0 : pp_printf (&pp, "entry to %qE with zero for %qE",
978 0 : m_effective_fndecl, parm);
979 4 : return;
980 : }
981 : }
982 68 : return function_entry_event::print_desc (pp);
983 : }
984 : };
985 :
986 57 : const exploded_node *dst_node = eedge.m_dest;
987 57 : const program_point &dst_point = dst_node->get_point ();
988 57 : const program_state &dst_state = dst_node->get_state ();
989 57 : auto loc_info {event_loc_info_for_function_entry (dst_point, state_trans)};
990 57 : emission_path->add_event
991 57 : (std::make_unique<custom_function_entry_event> (loc_info,
992 : dst_state,
993 : state_trans));
994 57 : }
995 :
996 : bool
997 44 : describe_origin_of_state (pretty_printer &pp,
998 : const evdesc::origin_of_state &) final override
999 : {
1000 44 : pp_printf (&pp, "zero value originates here");
1001 44 : return true;
1002 : }
1003 :
1004 : bool
1005 4 : describe_call_with_state (pretty_printer &pp,
1006 : const evdesc::call_with_state &evd) final override
1007 : {
1008 4 : if (evd.m_state_trans)
1009 : {
1010 4 : callsite_expr expr = evd.m_state_trans->get_callsite_expr ();
1011 4 : if (expr.param_p ())
1012 : {
1013 4 : if (evd.m_src_event_id.known_p ())
1014 4 : pp_printf (&pp, "passing zero from %@ from %qE to %qE via parameter %i",
1015 : &evd.m_src_event_id,
1016 4 : evd.m_caller_fndecl,
1017 4 : evd.m_callee_fndecl,
1018 : expr.param_num ());
1019 : else
1020 0 : pp_printf (&pp, "passing zero from %qE to %qE via parameter %i",
1021 0 : evd.m_caller_fndecl,
1022 0 : evd.m_callee_fndecl,
1023 : expr.param_num ());
1024 4 : return true;
1025 : }
1026 : }
1027 :
1028 : return false;
1029 : }
1030 :
1031 : bool
1032 24 : describe_return_of_state (pretty_printer &pp,
1033 : const evdesc::return_of_state &evd) final override
1034 : {
1035 24 : if (evd.m_src_event_id.known_p ())
1036 24 : pp_printf (&pp, "returning zero from %@ from %qE here",
1037 : &evd.m_src_event_id,
1038 24 : evd.m_callee_fndecl);
1039 : else
1040 0 : pp_printf (&pp, "returning zero from %qE here",
1041 0 : evd.m_callee_fndecl);
1042 24 : return true;
1043 : }
1044 :
1045 : bool
1046 12 : describe_copy_of_state (pretty_printer &pp,
1047 : const evdesc::copy_of_state &evd) final override
1048 : {
1049 12 : if (evd.m_src_event_id.known_p ())
1050 12 : pp_printf (&pp, "copying zero value from %@ from %qE to %qE",
1051 : &evd.m_src_event_id,
1052 12 : evd.m_src_reg_expr, evd.m_dst_reg_expr);
1053 : else
1054 0 : pp_printf (&pp, "copying zero value from %qE to %qE",
1055 0 : evd.m_src_reg_expr, evd.m_dst_reg_expr);
1056 12 : return true;
1057 : }
1058 :
1059 : bool
1060 16 : describe_use_of_state (pretty_printer &pp,
1061 : const evdesc::use_of_state &evd) final override
1062 : {
1063 16 : if (evd.m_src_event_id.known_p ())
1064 8 : pp_printf (&pp, "using zero value from %@ from %qE",
1065 : &evd.m_src_event_id,
1066 8 : evd.m_src_reg_expr);
1067 : else
1068 8 : pp_printf (&pp, "using zero value from %qE",
1069 8 : evd.m_src_reg_expr);
1070 16 : return true;
1071 : }
1072 :
1073 : private:
1074 : const gassign *m_assign;
1075 : const region *m_divisor_reg;
1076 : };
1077 :
1078 : /* Check the pointer subtraction SVAL_A - SVAL_B at ASSIGN and add
1079 : a warning to CTXT if they're not within the same base region. */
1080 :
1081 : static void
1082 582 : check_for_invalid_ptrdiff (const gassign *assign,
1083 : region_model_context &ctxt,
1084 : const svalue *sval_a, const svalue *sval_b)
1085 : {
1086 582 : const region *base_reg_a = sval_a->maybe_get_deref_base_region ();
1087 582 : if (!base_reg_a)
1088 518 : return;
1089 102 : const region *base_reg_b = sval_b->maybe_get_deref_base_region ();
1090 102 : if (!base_reg_b)
1091 : return;
1092 :
1093 78 : if (base_reg_a == base_reg_b)
1094 : return;
1095 :
1096 64 : if (base_reg_a->get_kind () == RK_SYMBOLIC)
1097 : return;
1098 64 : if (base_reg_b->get_kind () == RK_SYMBOLIC)
1099 : return;
1100 :
1101 64 : ctxt.warn
1102 64 : (std::make_unique<undefined_ptrdiff_diagnostic> (assign,
1103 : sval_a,
1104 : sval_b,
1105 : base_reg_a,
1106 : base_reg_b));
1107 : }
1108 :
1109 : /* If ASSIGN is a stmt that can be modelled via
1110 : set_value (lhs_reg, SVALUE, CTXT)
1111 : for some SVALUE, get the SVALUE.
1112 : Otherwise return nullptr. */
1113 :
1114 : const svalue *
1115 420026 : region_model::get_gassign_result (const gassign *assign,
1116 : region_model_context *ctxt)
1117 : {
1118 420026 : tree lhs = gimple_assign_lhs (assign);
1119 :
1120 420026 : if (gimple_has_volatile_ops (assign)
1121 420026 : && !gimple_clobber_p (assign))
1122 : {
1123 116 : conjured_purge p (this, ctxt);
1124 116 : return m_mgr->get_or_create_conjured_svalue (TREE_TYPE (lhs),
1125 : assign,
1126 : get_lvalue (lhs, ctxt),
1127 : p);
1128 : }
1129 :
1130 419910 : tree rhs1 = gimple_assign_rhs1 (assign);
1131 419910 : enum tree_code op = gimple_assign_rhs_code (assign);
1132 419910 : switch (op)
1133 : {
1134 : default:
1135 : return nullptr;
1136 :
1137 39415 : case POINTER_PLUS_EXPR:
1138 39415 : {
1139 : /* e.g. "_1 = a_10(D) + 12;" */
1140 39415 : tree ptr = rhs1;
1141 39415 : tree offset = gimple_assign_rhs2 (assign);
1142 :
1143 39415 : const svalue *ptr_sval = get_rvalue (ptr, ctxt);
1144 39415 : const svalue *offset_sval = get_rvalue (offset, ctxt);
1145 : /* Quoting tree.def, "the second operand [of a POINTER_PLUS_EXPR]
1146 : is an integer of type sizetype". */
1147 39415 : offset_sval = m_mgr->get_or_create_cast (size_type_node, offset_sval);
1148 :
1149 39415 : const svalue *sval_binop
1150 39415 : = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
1151 : ptr_sval, offset_sval);
1152 39415 : return sval_binop;
1153 : }
1154 752 : break;
1155 :
1156 752 : case POINTER_DIFF_EXPR:
1157 752 : {
1158 : /* e.g. "_1 = p_2(D) - q_3(D);". */
1159 752 : tree rhs2 = gimple_assign_rhs2 (assign);
1160 752 : const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
1161 752 : const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
1162 :
1163 : // TODO: perhaps fold to zero if they're known to be equal?
1164 :
1165 752 : if (ctxt)
1166 582 : check_for_invalid_ptrdiff (assign, *ctxt, rhs1_sval, rhs2_sval);
1167 :
1168 752 : const svalue *sval_binop
1169 752 : = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
1170 : rhs1_sval, rhs2_sval);
1171 752 : return sval_binop;
1172 : }
1173 207187 : break;
1174 :
1175 : /* Assignments of the form
1176 : set_value (lvalue (LHS), rvalue (EXPR))
1177 : for various EXPR.
1178 : We already have the lvalue for the LHS above, as "lhs_reg". */
1179 207187 : case ADDR_EXPR: /* LHS = &RHS; */
1180 207187 : case BIT_FIELD_REF:
1181 207187 : case COMPONENT_REF: /* LHS = op0.op1; */
1182 207187 : case MEM_REF:
1183 207187 : case REAL_CST:
1184 207187 : case COMPLEX_CST:
1185 207187 : case VECTOR_CST:
1186 207187 : case INTEGER_CST:
1187 207187 : case ARRAY_REF:
1188 207187 : case SSA_NAME: /* LHS = VAR; */
1189 207187 : case VAR_DECL: /* LHS = VAR; */
1190 207187 : case PARM_DECL:/* LHS = VAR; */
1191 207187 : case REALPART_EXPR:
1192 207187 : case IMAGPART_EXPR:
1193 207187 : return get_rvalue (rhs1, ctxt);
1194 :
1195 65584 : case ABS_EXPR:
1196 65584 : case ABSU_EXPR:
1197 65584 : case CONJ_EXPR:
1198 65584 : case BIT_NOT_EXPR:
1199 65584 : case FIX_TRUNC_EXPR:
1200 65584 : case FLOAT_EXPR:
1201 65584 : case NEGATE_EXPR:
1202 65584 : case NOP_EXPR:
1203 65584 : case VIEW_CONVERT_EXPR:
1204 65584 : {
1205 : /* Unary ops. */
1206 65584 : const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
1207 65584 : const svalue *sval_unaryop
1208 65584 : = m_mgr->get_or_create_unaryop (TREE_TYPE (lhs), op, rhs_sval);
1209 65584 : return sval_unaryop;
1210 : }
1211 :
1212 15196 : case EQ_EXPR:
1213 15196 : case GE_EXPR:
1214 15196 : case LE_EXPR:
1215 15196 : case NE_EXPR:
1216 15196 : case GT_EXPR:
1217 15196 : case LT_EXPR:
1218 15196 : case UNORDERED_EXPR:
1219 15196 : case ORDERED_EXPR:
1220 15196 : {
1221 15196 : tree rhs2 = gimple_assign_rhs2 (assign);
1222 :
1223 15196 : const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
1224 15196 : const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
1225 :
1226 15196 : if (TREE_TYPE (lhs) == boolean_type_node)
1227 : {
1228 : /* Consider constraints between svalues. */
1229 15049 : tristate t = eval_condition (rhs1_sval, op, rhs2_sval);
1230 15049 : if (t.is_known ())
1231 8422 : return m_mgr->get_or_create_constant_svalue
1232 8422 : (t.is_true () ? boolean_true_node : boolean_false_node);
1233 : }
1234 :
1235 : /* Otherwise, generate a symbolic binary op. */
1236 6774 : const svalue *sval_binop
1237 6774 : = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
1238 : rhs1_sval, rhs2_sval);
1239 6774 : return sval_binop;
1240 : }
1241 78664 : break;
1242 :
1243 78664 : case PLUS_EXPR:
1244 78664 : case MINUS_EXPR:
1245 78664 : case MULT_EXPR:
1246 78664 : case MULT_HIGHPART_EXPR:
1247 78664 : case TRUNC_DIV_EXPR:
1248 78664 : case CEIL_DIV_EXPR:
1249 78664 : case FLOOR_DIV_EXPR:
1250 78664 : case ROUND_DIV_EXPR:
1251 78664 : case TRUNC_MOD_EXPR:
1252 78664 : case CEIL_MOD_EXPR:
1253 78664 : case FLOOR_MOD_EXPR:
1254 78664 : case ROUND_MOD_EXPR:
1255 78664 : case RDIV_EXPR:
1256 78664 : case EXACT_DIV_EXPR:
1257 78664 : case LSHIFT_EXPR:
1258 78664 : case RSHIFT_EXPR:
1259 78664 : case LROTATE_EXPR:
1260 78664 : case RROTATE_EXPR:
1261 78664 : case BIT_IOR_EXPR:
1262 78664 : case BIT_XOR_EXPR:
1263 78664 : case BIT_AND_EXPR:
1264 78664 : case MIN_EXPR:
1265 78664 : case MAX_EXPR:
1266 78664 : case COMPLEX_EXPR:
1267 78664 : {
1268 : /* Binary ops. */
1269 78664 : tree rhs2 = gimple_assign_rhs2 (assign);
1270 :
1271 78664 : const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
1272 78664 : const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
1273 :
1274 78664 : if (ctxt && (op == LSHIFT_EXPR || op == RSHIFT_EXPR))
1275 : {
1276 : /* "INT34-C. Do not shift an expression by a negative number of bits
1277 : or by greater than or equal to the number of bits that exist in
1278 : the operand." */
1279 13757 : if (const tree rhs2_cst = rhs2_sval->maybe_get_constant ())
1280 13461 : if (TREE_CODE (rhs2_cst) == INTEGER_CST
1281 13461 : && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
1282 : {
1283 13459 : if (tree_int_cst_sgn (rhs2_cst) < 0)
1284 : {
1285 24 : const region *rhs2_reg
1286 48 : = get_lvalue (gimple_assign_rhs2 (assign), nullptr);
1287 24 : ctxt->warn
1288 24 : (make_shift_count_negative_diagnostic (assign,
1289 : rhs2_cst,
1290 : rhs2_reg));
1291 : }
1292 13435 : else if (compare_tree_int (rhs2_cst,
1293 13435 : TYPE_PRECISION (TREE_TYPE (rhs1)))
1294 : >= 0)
1295 : {
1296 24 : const region *rhs2_reg
1297 48 : = get_lvalue (gimple_assign_rhs2 (assign), nullptr);
1298 48 : ctxt->warn (make_shift_count_overflow_diagnostic
1299 24 : (assign,
1300 24 : int (TYPE_PRECISION (TREE_TYPE (rhs1))),
1301 : rhs2_cst,
1302 : rhs2_reg));
1303 : }
1304 : }
1305 : }
1306 :
1307 78664 : if (op == TRUNC_DIV_EXPR
1308 : || op == CEIL_DIV_EXPR
1309 : || op == FLOOR_DIV_EXPR
1310 : || op == ROUND_DIV_EXPR
1311 : || op == TRUNC_MOD_EXPR
1312 : || op == CEIL_MOD_EXPR
1313 : || op == FLOOR_MOD_EXPR
1314 : || op == ROUND_MOD_EXPR
1315 : || op == RDIV_EXPR
1316 78664 : || op == EXACT_DIV_EXPR)
1317 : {
1318 1280 : value_range rhs_vr;
1319 1280 : if (rhs2_sval->maybe_get_value_range (rhs_vr))
1320 1212 : if (rhs_vr.zero_p ())
1321 : {
1322 74 : if (ctxt)
1323 : {
1324 74 : const region *rhs2_reg
1325 148 : = get_lvalue (gimple_assign_rhs2 (assign), nullptr);
1326 74 : ctxt->warn
1327 74 : (std::make_unique<div_by_zero_diagnostic> (assign,
1328 : rhs2_reg));
1329 74 : ctxt->terminate_path ();
1330 : }
1331 74 : return nullptr;
1332 : }
1333 1280 : }
1334 :
1335 78590 : const svalue *sval_binop
1336 78590 : = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
1337 : rhs1_sval, rhs2_sval);
1338 78590 : return sval_binop;
1339 : }
1340 :
1341 : /* Vector expressions. In theory we could implement these elementwise,
1342 : but for now, simply return unknown values. */
1343 0 : case VEC_DUPLICATE_EXPR:
1344 0 : case VEC_SERIES_EXPR:
1345 0 : case VEC_COND_EXPR:
1346 0 : case VEC_PERM_EXPR:
1347 0 : case VEC_WIDEN_MULT_HI_EXPR:
1348 0 : case VEC_WIDEN_MULT_LO_EXPR:
1349 0 : case VEC_WIDEN_MULT_EVEN_EXPR:
1350 0 : case VEC_WIDEN_MULT_ODD_EXPR:
1351 0 : case VEC_UNPACK_HI_EXPR:
1352 0 : case VEC_UNPACK_LO_EXPR:
1353 0 : case VEC_UNPACK_FLOAT_HI_EXPR:
1354 0 : case VEC_UNPACK_FLOAT_LO_EXPR:
1355 0 : case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1356 0 : case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1357 0 : case VEC_PACK_TRUNC_EXPR:
1358 0 : case VEC_PACK_SAT_EXPR:
1359 0 : case VEC_PACK_FIX_TRUNC_EXPR:
1360 0 : case VEC_PACK_FLOAT_EXPR:
1361 0 : case VEC_WIDEN_LSHIFT_HI_EXPR:
1362 0 : case VEC_WIDEN_LSHIFT_LO_EXPR:
1363 0 : return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
1364 : }
1365 : }
1366 :
1367 : /* Workaround for discarding certain false positives from
1368 : -Wanalyzer-use-of-uninitialized-value
1369 : of the form:
1370 : ((A OR-IF B) OR-IF C)
1371 : and:
1372 : ((A AND-IF B) AND-IF C)
1373 : where evaluating B is redundant, but could involve simple accesses of
1374 : uninitialized locals.
1375 :
1376 : When optimization is turned on the FE can immediately fold compound
1377 : conditionals. Specifically, c_parser_condition parses this condition:
1378 : ((A OR-IF B) OR-IF C)
1379 : and calls c_fully_fold on the condition.
1380 : Within c_fully_fold, fold_truth_andor is called, which bails when
1381 : optimization is off, but if any optimization is turned on can convert the
1382 : ((A OR-IF B) OR-IF C)
1383 : into:
1384 : ((A OR B) OR_IF C)
1385 : for sufficiently simple B
1386 : i.e. the inner OR-IF becomes an OR.
1387 : At gimplification time the inner OR becomes BIT_IOR_EXPR (in gimplify_expr),
1388 : giving this for the inner condition:
1389 : tmp = A | B;
1390 : if (tmp)
1391 : thus effectively synthesizing a redundant access of B when optimization
1392 : is turned on, when compared to:
1393 : if (A) goto L1; else goto L4;
1394 : L1: if (B) goto L2; else goto L4;
1395 : L2: if (C) goto L3; else goto L4;
1396 : for the unoptimized case.
1397 :
1398 : Return true if CTXT appears to be handling such a short-circuitable stmt,
1399 : such as the def-stmt for B for the:
1400 : tmp = A | B;
1401 : case above, for the case where A is true and thus B would have been
1402 : short-circuited without optimization, using MODEL for the value of A. */
1403 :
1404 : static bool
1405 1164 : within_short_circuited_stmt_p (const region_model *model,
1406 : const gassign *assign_stmt)
1407 : {
1408 : /* We must have an assignment to a temporary of _Bool type. */
1409 1164 : tree lhs = gimple_assign_lhs (assign_stmt);
1410 1164 : if (TREE_TYPE (lhs) != boolean_type_node)
1411 : return false;
1412 40 : if (TREE_CODE (lhs) != SSA_NAME)
1413 : return false;
1414 40 : if (SSA_NAME_VAR (lhs) != NULL_TREE)
1415 : return false;
1416 :
1417 : /* The temporary bool must be used exactly once: as the second arg of
1418 : a BIT_IOR_EXPR or BIT_AND_EXPR. */
1419 40 : use_operand_p use_op;
1420 40 : gimple *use_stmt;
1421 40 : if (!single_imm_use (lhs, &use_op, &use_stmt))
1422 : return false;
1423 1192 : const gassign *use_assign = dyn_cast <const gassign *> (use_stmt);
1424 40 : if (!use_assign)
1425 : return false;
1426 40 : enum tree_code op = gimple_assign_rhs_code (use_assign);
1427 40 : if (!(op == BIT_IOR_EXPR ||op == BIT_AND_EXPR))
1428 : return false;
1429 28 : if (!(gimple_assign_rhs1 (use_assign) != lhs
1430 28 : && gimple_assign_rhs2 (use_assign) == lhs))
1431 : return false;
1432 :
1433 : /* The first arg of the bitwise stmt must have a known value in MODEL
1434 : that implies that the value of the second arg doesn't matter, i.e.
1435 : 1 for bitwise or, 0 for bitwise and. */
1436 28 : tree other_arg = gimple_assign_rhs1 (use_assign);
1437 : /* Use a nullptr ctxt here to avoid generating warnings. */
1438 28 : const svalue *other_arg_sval = model->get_rvalue (other_arg, nullptr);
1439 28 : tree other_arg_cst = other_arg_sval->maybe_get_constant ();
1440 28 : if (!other_arg_cst)
1441 : return false;
1442 12 : switch (op)
1443 : {
1444 0 : default:
1445 0 : gcc_unreachable ();
1446 12 : case BIT_IOR_EXPR:
1447 12 : if (zerop (other_arg_cst))
1448 : return false;
1449 : break;
1450 0 : case BIT_AND_EXPR:
1451 0 : if (!zerop (other_arg_cst))
1452 : return false;
1453 : break;
1454 : }
1455 :
1456 : /* All tests passed. We appear to be in a stmt that generates a boolean
1457 : temporary with a value that won't matter. */
1458 : return true;
1459 : }
1460 :
1461 : /* Workaround for discarding certain false positives from
1462 : -Wanalyzer-use-of-uninitialized-value
1463 : seen with -ftrivial-auto-var-init=.
1464 :
1465 : -ftrivial-auto-var-init= will generate calls to IFN_DEFERRED_INIT.
1466 :
1467 : If the address of the var is taken, gimplification will give us
1468 : something like:
1469 :
1470 : _1 = .DEFERRED_INIT (4, 2, &"len"[0]);
1471 : len = _1;
1472 :
1473 : The result of DEFERRED_INIT will be an uninit value; we don't
1474 : want to emit a false positive for "len = _1;"
1475 :
1476 : Return true if ASSIGN_STMT is such a stmt. */
1477 :
1478 : static bool
1479 1152 : due_to_ifn_deferred_init_p (const gassign *assign_stmt)
1480 :
1481 : {
1482 : /* We must have an assignment to a decl from an SSA name that's the
1483 : result of a IFN_DEFERRED_INIT call. */
1484 2134 : if (gimple_assign_rhs_code (assign_stmt) != SSA_NAME)
1485 : return false;
1486 287 : tree lhs = gimple_assign_lhs (assign_stmt);
1487 287 : if (TREE_CODE (lhs) != VAR_DECL)
1488 : return false;
1489 222 : tree rhs = gimple_assign_rhs1 (assign_stmt);
1490 222 : if (TREE_CODE (rhs) != SSA_NAME)
1491 : return false;
1492 222 : const gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1493 222 : const gcall *call = dyn_cast <const gcall *> (def_stmt);
1494 222 : if (!call)
1495 : return false;
1496 222 : if (gimple_call_internal_p (call)
1497 222 : && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
1498 210 : return true;
1499 : return false;
1500 : }
1501 :
1502 : /* Check for SVAL being poisoned, adding a warning to CTXT.
1503 : Return SVAL, or, if a warning is added, another value, to avoid
1504 : repeatedly complaining about the same poisoned value in followup code.
1505 : SRC_REGION is a hint about where SVAL came from, and can be nullptr. */
1506 :
1507 : const svalue *
1508 3377168 : region_model::check_for_poison (const svalue *sval,
1509 : tree expr,
1510 : const region *src_region,
1511 : region_model_context *ctxt) const
1512 : {
1513 3377168 : if (!ctxt)
1514 : return sval;
1515 :
1516 1715042 : if (const poisoned_svalue *poisoned_sval = sval->dyn_cast_poisoned_svalue ())
1517 : {
1518 2716 : enum poison_kind pkind = poisoned_sval->get_poison_kind ();
1519 :
1520 : /* Ignore uninitialized uses of empty types; there's nothing
1521 : to initialize. */
1522 2716 : if (pkind == poison_kind::uninit
1523 2675 : && sval->get_type ()
1524 5280 : && is_empty_type (sval->get_type ()))
1525 : return sval;
1526 :
1527 2483 : if (pkind == poison_kind::uninit)
1528 2442 : if (const gimple *curr_stmt = ctxt->get_stmt ())
1529 1510 : if (const gassign *assign_stmt
1530 3425 : = dyn_cast <const gassign *> (curr_stmt))
1531 : {
1532 : /* Special case to avoid certain false positives. */
1533 1164 : if (within_short_circuited_stmt_p (this, assign_stmt))
1534 : return sval;
1535 :
1536 : /* Special case to avoid false positive on
1537 : -ftrivial-auto-var-init=. */
1538 1152 : if (due_to_ifn_deferred_init_p (assign_stmt))
1539 : return sval;
1540 : }
1541 :
1542 : /* If we have an SSA name for a temporary, we don't want to print
1543 : '<unknown>'.
1544 : Poisoned values are shared by type, and so we can't reconstruct
1545 : the tree other than via the def stmts, using
1546 : fixup_tree_for_diagnostic. */
1547 2261 : tree diag_arg = fixup_tree_for_diagnostic (expr);
1548 2261 : if (src_region == nullptr && pkind == poison_kind::uninit)
1549 2169 : src_region = get_region_for_poisoned_expr (expr);
1550 :
1551 : /* Can we reliably get the poisoned value from "expr"?
1552 : This is for use by poisoned_value_diagnostic::check_valid_fpath_p.
1553 : Unfortunately, we might not have a reliable value for EXPR.
1554 : Hence we only query its value now, and only use it if we get the
1555 : poisoned value back again. */
1556 2261 : tree check_expr = expr;
1557 2261 : const svalue *foo_sval = get_rvalue (expr, nullptr);
1558 2261 : if (foo_sval == sval)
1559 : check_expr = expr;
1560 : else
1561 111 : check_expr = nullptr;
1562 2261 : if (ctxt->warn (make_poisoned_value_diagnostic (diag_arg,
1563 : pkind,
1564 : src_region,
1565 : check_expr)))
1566 : {
1567 : /* We only want to report use of a poisoned value at the first
1568 : place it gets used; return an unknown value to avoid generating
1569 : a chain of followup warnings. */
1570 1372 : sval = m_mgr->get_or_create_unknown_svalue (sval->get_type ());
1571 : }
1572 :
1573 2261 : return sval;
1574 : }
1575 :
1576 : return sval;
1577 : }
1578 :
1579 : /* Attempt to get a region for describing EXPR, the source of region of
1580 : a poisoned_svalue for use in a poisoned_value_diagnostic.
1581 : Return nullptr if there is no good region to use. */
1582 :
1583 : const region *
1584 2169 : region_model::get_region_for_poisoned_expr (tree expr) const
1585 : {
1586 2169 : if (TREE_CODE (expr) == SSA_NAME)
1587 : {
1588 1402 : tree decl = SSA_NAME_VAR (expr);
1589 1362 : if (decl && DECL_P (decl))
1590 : expr = decl;
1591 : else
1592 : return nullptr;
1593 : }
1594 2129 : return get_lvalue (expr, nullptr);
1595 : }
1596 :
1597 : /* Update this model for the ASSIGN stmt, using CTXT to report any
1598 : diagnostics. */
1599 :
1600 : void
1601 252794 : region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
1602 : {
1603 252794 : tree lhs = gimple_assign_lhs (assign);
1604 252794 : tree rhs1 = gimple_assign_rhs1 (assign);
1605 :
1606 252794 : const region *lhs_reg = get_lvalue (lhs, ctxt);
1607 :
1608 : /* Any writes other than to the stack are treated
1609 : as externally visible. */
1610 252794 : if (ctxt)
1611 : {
1612 191710 : enum memory_space memspace = lhs_reg->get_memory_space ();
1613 191710 : if (memspace != MEMSPACE_STACK)
1614 11210 : ctxt->maybe_did_work ();
1615 : }
1616 :
1617 : /* Most assignments are handled by:
1618 : set_value (lhs_reg, SVALUE, CTXT)
1619 : for some SVALUE. */
1620 252794 : if (const svalue *sval = get_gassign_result (assign, ctxt))
1621 : {
1622 245394 : tree expr = get_diagnostic_tree_for_gassign (assign);
1623 245394 : check_for_poison (sval, expr, nullptr, ctxt);
1624 245394 : set_value (lhs_reg, sval, ctxt);
1625 245394 : return;
1626 : }
1627 :
1628 7400 : enum tree_code op = gimple_assign_rhs_code (assign);
1629 7400 : switch (op)
1630 : {
1631 37 : default:
1632 37 : {
1633 37 : if (0)
1634 : sorry_at (assign->location, "unhandled assignment op: %qs",
1635 : get_tree_code_name (op));
1636 37 : const svalue *unknown_sval
1637 37 : = m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
1638 37 : set_value (lhs_reg, unknown_sval, ctxt);
1639 : }
1640 37 : break;
1641 :
1642 6805 : case CONSTRUCTOR:
1643 6805 : {
1644 6805 : if (TREE_CLOBBER_P (rhs1))
1645 : {
1646 : /* e.g. "x ={v} {CLOBBER};" */
1647 6656 : clobber_region (lhs_reg);
1648 : }
1649 : else
1650 : {
1651 : /* Any CONSTRUCTOR that survives to this point is either
1652 : just a zero-init of everything, or a vector. */
1653 149 : if (!CONSTRUCTOR_NO_CLEARING (rhs1))
1654 149 : zero_fill_region (lhs_reg, ctxt);
1655 : unsigned ix;
1656 : tree index;
1657 : tree val;
1658 311 : FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), ix, index, val)
1659 : {
1660 162 : gcc_assert (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE);
1661 162 : if (!index)
1662 22 : index = build_int_cst (integer_type_node, ix);
1663 162 : gcc_assert (TREE_CODE (index) == INTEGER_CST);
1664 162 : const svalue *index_sval
1665 162 : = m_mgr->get_or_create_constant_svalue (index);
1666 162 : gcc_assert (index_sval);
1667 162 : const region *sub_reg
1668 162 : = m_mgr->get_element_region (lhs_reg,
1669 162 : TREE_TYPE (val),
1670 : index_sval);
1671 162 : const svalue *val_sval = get_rvalue (val, ctxt);
1672 162 : set_value (sub_reg, val_sval, ctxt);
1673 : }
1674 : }
1675 : }
1676 : break;
1677 :
1678 558 : case STRING_CST:
1679 558 : {
1680 : /* e.g. "struct s2 x = {{'A', 'B', 'C', 'D'}};". */
1681 558 : const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
1682 762 : m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
1683 204 : ctxt ? ctxt->get_uncertainty () : nullptr);
1684 : }
1685 558 : break;
1686 : }
1687 : }
1688 :
1689 : /* Handle the pre-sm-state part of STMT, modifying this object in-place.
1690 : Write true to *OUT_UNKNOWN_SIDE_EFFECTS if the stmt has unknown
1691 : side effects. */
1692 :
1693 : void
1694 333701 : region_model::on_stmt_pre (const gimple *stmt,
1695 : bool *out_unknown_side_effects,
1696 : region_model_context *ctxt)
1697 : {
1698 333701 : switch (gimple_code (stmt))
1699 : {
1700 : case GIMPLE_COND:
1701 : case GIMPLE_EH_DISPATCH:
1702 : case GIMPLE_GOTO:
1703 : case GIMPLE_LABEL:
1704 : case GIMPLE_NOP:
1705 : case GIMPLE_PREDICT:
1706 : case GIMPLE_RESX:
1707 : case GIMPLE_SWITCH:
1708 : /* No-ops here. */
1709 : break;
1710 :
1711 252794 : case GIMPLE_ASSIGN:
1712 252794 : {
1713 252794 : const gassign *assign = as_a <const gassign *> (stmt);
1714 252794 : on_assignment (assign, ctxt);
1715 : }
1716 252794 : break;
1717 :
1718 384 : case GIMPLE_ASM:
1719 384 : {
1720 384 : const gasm *asm_stmt = as_a <const gasm *> (stmt);
1721 384 : on_asm_stmt (asm_stmt, ctxt);
1722 384 : if (ctxt)
1723 342 : ctxt->maybe_did_work ();
1724 : }
1725 : break;
1726 :
1727 77016 : case GIMPLE_CALL:
1728 77016 : {
1729 : /* Track whether we have a gcall to a function that's not recognized by
1730 : anything, for which we don't have a function body, or for which we
1731 : don't know the fndecl. */
1732 77016 : const gcall *call = as_a <const gcall *> (stmt);
1733 77016 : *out_unknown_side_effects = on_call_pre (*call, ctxt);
1734 : }
1735 77016 : break;
1736 :
1737 0 : case GIMPLE_RETURN:
1738 0 : {
1739 0 : const greturn *return_ = as_a <const greturn *> (stmt);
1740 0 : on_return (return_, ctxt);
1741 : }
1742 0 : break;
1743 :
1744 : /* We don't expect to see any other statement kinds in the analyzer. */
1745 0 : case GIMPLE_DEBUG: // should have stripped these out when building the supergraph
1746 0 : default:
1747 0 : internal_error ("unexpected gimple stmt code: %qs",
1748 0 : gimple_code_name[gimple_code (stmt)]);
1749 333701 : break;
1750 : }
1751 333701 : }
1752 :
1753 : /* Given a call CD with function attribute FORMAT_ATTR, check that the
1754 : format arg to the call is a valid null-terminated string. */
1755 :
1756 : void
1757 1055 : region_model::check_call_format_attr (const call_details &cd,
1758 : tree format_attr) const
1759 : {
1760 : /* We assume that FORMAT_ATTR has already been validated. */
1761 :
1762 : /* arg0 of the attribute should be kind of format strings
1763 : that this function expects (e.g. "printf"). */
1764 1055 : const tree arg0_tree_list = TREE_VALUE (format_attr);
1765 1055 : if (!arg0_tree_list)
1766 0 : return;
1767 :
1768 : /* arg1 of the attribute should be the 1-based parameter index
1769 : to treat as the format string. */
1770 1055 : const tree arg1_tree_list = TREE_CHAIN (arg0_tree_list);
1771 1055 : if (!arg1_tree_list)
1772 : return;
1773 1055 : const tree arg1_value = TREE_VALUE (arg1_tree_list);
1774 1055 : if (!arg1_value)
1775 : return;
1776 :
1777 1055 : unsigned format_arg_idx = TREE_INT_CST_LOW (arg1_value) - 1;
1778 1055 : if (cd.num_args () <= format_arg_idx)
1779 : return;
1780 :
1781 : /* Subclass of annotating_context that
1782 : adds a note about the format attr to any saved diagnostics. */
1783 1055 : class annotating_ctxt : public annotating_context
1784 : {
1785 : public:
1786 1055 : annotating_ctxt (const call_details &cd,
1787 : unsigned fmt_param_idx)
1788 1055 : : annotating_context (cd.get_ctxt ()),
1789 1055 : m_cd (cd),
1790 1055 : m_fmt_param_idx (fmt_param_idx)
1791 : {
1792 : }
1793 13 : void add_annotations () final override
1794 : {
1795 0 : class reason_format_attr
1796 : : public pending_note_subclass<reason_format_attr>
1797 : {
1798 : public:
1799 13 : reason_format_attr (const call_arg_details &arg_details)
1800 13 : : m_arg_details (arg_details)
1801 : {
1802 : }
1803 :
1804 74 : const char *get_kind () const final override
1805 : {
1806 74 : return "reason_format_attr";
1807 : }
1808 :
1809 13 : void emit () const final override
1810 : {
1811 13 : inform (DECL_SOURCE_LOCATION (m_arg_details.m_called_fndecl),
1812 : "parameter %i of %qD marked as a format string"
1813 : " via %qs attribute",
1814 13 : m_arg_details.m_arg_idx + 1, m_arg_details.m_called_fndecl,
1815 : "format");
1816 13 : }
1817 :
1818 37 : bool operator== (const reason_format_attr &other) const
1819 : {
1820 37 : return m_arg_details == other.m_arg_details;
1821 : }
1822 :
1823 : private:
1824 : call_arg_details m_arg_details;
1825 : };
1826 :
1827 13 : call_arg_details arg_details (m_cd, m_fmt_param_idx);
1828 13 : add_note (std::make_unique<reason_format_attr> (arg_details));
1829 13 : }
1830 : private:
1831 : const call_details &m_cd;
1832 : unsigned m_fmt_param_idx;
1833 : };
1834 :
1835 1055 : annotating_ctxt my_ctxt (cd, format_arg_idx);
1836 1055 : call_details my_cd (cd, &my_ctxt);
1837 1055 : my_cd.check_for_null_terminated_string_arg (format_arg_idx);
1838 : }
1839 :
1840 : /* Ensure that all arguments at the call described by CD are checked
1841 : for poisoned values, by calling get_rvalue on each argument.
1842 :
1843 : Check that calls to functions with "format" attribute have valid
1844 : null-terminated strings for their format argument. */
1845 :
1846 : void
1847 49312 : region_model::check_call_args (const call_details &cd) const
1848 : {
1849 114853 : for (unsigned arg_idx = 0; arg_idx < cd.num_args (); arg_idx++)
1850 65541 : cd.get_arg_svalue (arg_idx);
1851 :
1852 : /* Handle attribute "format". */
1853 49312 : if (tree format_attr = cd.lookup_function_attribute ("format"))
1854 1055 : check_call_format_attr (cd, format_attr);
1855 49312 : }
1856 :
1857 : /* Update this model for an outcome of a call that returns a specific
1858 : integer constant.
1859 : If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1860 : the state-merger code from merging success and failure outcomes. */
1861 :
1862 : void
1863 936 : region_model::update_for_int_cst_return (const call_details &cd,
1864 : int retval,
1865 : bool unmergeable)
1866 : {
1867 936 : if (!cd.get_lhs_type ())
1868 : return;
1869 615 : if (TREE_CODE (cd.get_lhs_type ()) != INTEGER_TYPE)
1870 : return;
1871 609 : const svalue *result
1872 609 : = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), retval);
1873 609 : if (unmergeable)
1874 609 : result = m_mgr->get_or_create_unmergeable (result);
1875 609 : set_value (cd.get_lhs_region (), result, cd.get_ctxt ());
1876 : }
1877 :
1878 : /* Update this model for an outcome of a call that returns zero.
1879 : If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1880 : the state-merger code from merging success and failure outcomes. */
1881 :
1882 : void
1883 290 : region_model::update_for_zero_return (const call_details &cd,
1884 : bool unmergeable)
1885 : {
1886 290 : update_for_int_cst_return (cd, 0, unmergeable);
1887 290 : }
1888 :
1889 : /* Update this model for an outcome of a call that returns a NULL
1890 : pointer.
1891 : If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1892 : the state-merger code from merging success and failure outcomes. */
1893 :
1894 : void
1895 14 : region_model::update_for_null_return (const call_details &cd, bool unmergeable)
1896 : {
1897 14 : if (!cd.get_lhs_type ())
1898 : return;
1899 2 : if (!POINTER_TYPE_P (cd.get_lhs_type ()))
1900 : return;
1901 2 : const svalue *result = m_mgr->get_or_create_null_ptr (cd.get_lhs_type ());
1902 2 : if (unmergeable)
1903 2 : result = m_mgr->get_or_create_unmergeable (result);
1904 2 : set_value (cd.get_lhs_region (), result, cd.get_ctxt ());
1905 : }
1906 :
1907 : /* Update this model for an outcome of a call that returns non-zero.
1908 : Specifically, assign an svalue to the LHS, and add a constraint that
1909 : that svalue is non-zero. */
1910 :
1911 : void
1912 136 : region_model::update_for_nonzero_return (const call_details &cd)
1913 : {
1914 136 : if (!cd.get_lhs_type ())
1915 : return;
1916 100 : if (TREE_CODE (cd.get_lhs_type ()) != INTEGER_TYPE)
1917 : return;
1918 100 : cd.set_any_lhs_with_defaults ();
1919 100 : const svalue *zero
1920 100 : = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), 0);
1921 100 : const svalue *result
1922 100 : = get_store_value (cd.get_lhs_region (), cd.get_ctxt ());
1923 100 : add_constraint (result, NE_EXPR, zero, cd.get_ctxt ());
1924 : }
1925 :
1926 : /* Subroutine of region_model::maybe_get_copy_bounds.
1927 : The Linux kernel commonly uses
1928 : min_t([unsigned] long, VAR, sizeof(T));
1929 : to set an upper bound on the size of a copy_to_user.
1930 : Attempt to simplify such sizes by trying to get the upper bound as a
1931 : constant.
1932 : Return the simplified svalue if possible, or nullptr otherwise. */
1933 :
1934 : static const svalue *
1935 53 : maybe_simplify_upper_bound (const svalue *num_bytes_sval,
1936 : region_model_manager *mgr)
1937 : {
1938 53 : tree type = num_bytes_sval->get_type ();
1939 70 : while (const svalue *raw = num_bytes_sval->maybe_undo_cast ())
1940 : num_bytes_sval = raw;
1941 53 : if (const binop_svalue *binop_sval = num_bytes_sval->dyn_cast_binop_svalue ())
1942 38 : if (binop_sval->get_op () == MIN_EXPR)
1943 8 : if (binop_sval->get_arg1 ()->get_kind () == SK_CONSTANT)
1944 : {
1945 8 : return mgr->get_or_create_cast (type, binop_sval->get_arg1 ());
1946 : /* TODO: we might want to also capture the constraint
1947 : when recording the diagnostic, or note that we're using
1948 : the upper bound. */
1949 : }
1950 : return nullptr;
1951 : }
1952 :
1953 : /* Attempt to get an upper bound for the size of a copy when simulating a
1954 : copy function.
1955 :
1956 : NUM_BYTES_SVAL is the symbolic value for the size of the copy.
1957 : Use it if it's constant, otherwise try to simplify it. Failing
1958 : that, use the size of SRC_REG if constant.
1959 :
1960 : Return a symbolic value for an upper limit on the number of bytes
1961 : copied, or nullptr if no such value could be determined. */
1962 :
1963 : const svalue *
1964 157 : region_model::maybe_get_copy_bounds (const region *src_reg,
1965 : const svalue *num_bytes_sval)
1966 : {
1967 157 : if (num_bytes_sval->maybe_get_constant ())
1968 : return num_bytes_sval;
1969 :
1970 106 : if (const svalue *simplified
1971 53 : = maybe_simplify_upper_bound (num_bytes_sval, m_mgr))
1972 8 : num_bytes_sval = simplified;
1973 :
1974 53 : if (num_bytes_sval->maybe_get_constant ())
1975 : return num_bytes_sval;
1976 :
1977 : /* For now, try just guessing the size as the capacity of the
1978 : base region of the src.
1979 : This is a hack; we might get too large a value. */
1980 45 : const region *src_base_reg = src_reg->get_base_region ();
1981 45 : num_bytes_sval = get_capacity (src_base_reg);
1982 :
1983 45 : if (num_bytes_sval->maybe_get_constant ())
1984 11 : return num_bytes_sval;
1985 :
1986 : /* Non-constant: give up. */
1987 : return nullptr;
1988 : }
1989 :
1990 : /* Get any known_function for FNDECL for call CD.
1991 :
1992 : The call must match all assumptions made by the known_function (such as
1993 : e.g. "argument 1's type must be a pointer type").
1994 :
1995 : Return nullptr if no known_function is found, or it does not match the
1996 : assumption(s). */
1997 :
1998 : const known_function *
1999 303953 : region_model::get_known_function (tree fndecl, const call_details &cd) const
2000 : {
2001 303953 : known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
2002 303953 : return known_fn_mgr->get_match (fndecl, cd);
2003 : }
2004 :
2005 : /* Get any known_function for IFN, or nullptr. */
2006 :
2007 : const known_function *
2008 1442 : region_model::get_known_function (enum internal_fn ifn) const
2009 : {
2010 1442 : known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
2011 1442 : return known_fn_mgr->get_internal_fn (ifn);
2012 : }
2013 :
2014 : /* Get any builtin_known_function for CALL and emit any warning to CTXT
2015 : if not nullptr.
2016 :
2017 : The call must match all assumptions made by the known_function (such as
2018 : e.g. "argument 1's type must be a pointer type").
2019 :
2020 : Return nullptr if no builtin_known_function is found, or it does
2021 : not match the assumption(s).
2022 :
2023 : Internally calls get_known_function to find a known_function and cast it
2024 : to a builtin_known_function.
2025 :
2026 : For instance, calloc is a C builtin, defined in gcc/builtins.def
2027 : by the DEF_LIB_BUILTIN macro. Such builtins are recognized by the
2028 : analyzer by their name, so that even in C++ or if the user redeclares
2029 : them but mismatch their signature, they are still recognized as builtins.
2030 :
2031 : Cases when a supposed builtin is not flagged as one by the FE:
2032 :
2033 : The C++ FE does not recognize calloc as a builtin if it has not been
2034 : included from a standard header, but the C FE does. Hence in C++ if
2035 : CALL comes from a calloc and stdlib is not included,
2036 : gcc/tree.h:fndecl_built_in_p (CALL) would be false.
2037 :
2038 : In C code, a __SIZE_TYPE__ calloc (__SIZE_TYPE__, __SIZE_TYPE__) user
2039 : declaration has obviously a mismatching signature from the standard, and
2040 : its function_decl tree won't be unified by
2041 : gcc/c-decl.cc:match_builtin_function_types.
2042 :
2043 : Yet in both cases the analyzer should treat the calls as a builtin calloc
2044 : so that extra attributes unspecified by the standard but added by GCC
2045 : (e.g. sprintf attributes in gcc/builtins.def), useful for the detection of
2046 : dangerous behavior, are indeed processed.
2047 :
2048 : Therefore for those cases when a "builtin flag" is not added by the FE,
2049 : builtins' kf are derived from builtin_known_function, whose method
2050 : builtin_known_function::builtin_decl returns the builtin's
2051 : function_decl tree as defined in gcc/builtins.def, with all the extra
2052 : attributes. */
2053 :
2054 : const builtin_known_function *
2055 164016 : region_model::get_builtin_kf (const gcall &call,
2056 : region_model_context *ctxt /* = nullptr */) const
2057 : {
2058 164016 : region_model *mut_this = const_cast <region_model *> (this);
2059 164016 : tree callee_fndecl = mut_this->get_fndecl_for_call (call, ctxt);
2060 164016 : if (! callee_fndecl)
2061 : return nullptr;
2062 :
2063 164016 : call_details cd (call, mut_this, ctxt);
2064 164016 : if (const known_function *kf = get_known_function (callee_fndecl, cd))
2065 112322 : return kf->dyn_cast_builtin_kf ();
2066 :
2067 : return nullptr;
2068 : }
2069 :
2070 : /* Subclass of custom_edge_info for use by exploded_edges that represent
2071 : an exception being thrown from a call we don't have the code for. */
2072 :
2073 : class exception_thrown_from_unrecognized_call : public custom_edge_info
2074 : {
2075 : public:
2076 5483 : exception_thrown_from_unrecognized_call (const gcall &call,
2077 : tree fndecl)
2078 5483 : : m_call (call),
2079 5483 : m_fndecl (fndecl)
2080 : {
2081 : }
2082 :
2083 12 : void print (pretty_printer *pp) const final override
2084 : {
2085 12 : if (m_fndecl)
2086 12 : pp_printf (pp, "if %qD throws an exception...", m_fndecl);
2087 : else
2088 0 : pp_printf (pp, "if the called function throws an exception...");
2089 12 : };
2090 :
2091 : bool
2092 5514 : update_model (region_model *model,
2093 : const exploded_edge *,
2094 : region_model_context *ctxt) const final override
2095 : {
2096 : /* Allocate an exception and set it as the current exception. */
2097 5514 : const region *exception_reg
2098 : = model->get_or_create_region_for_heap_alloc
2099 5514 : (nullptr, /* We don't know the size of the region. */
2100 : ctxt);
2101 :
2102 5514 : region_model_manager *mgr = model->get_manager ();
2103 5514 : conjured_purge p (model, ctxt);
2104 :
2105 : /* The contents of the region are some conjured svalue. */
2106 5514 : const svalue *exception_sval
2107 11028 : = mgr->get_or_create_conjured_svalue (NULL_TREE,
2108 5514 : &m_call,
2109 : exception_reg, p, 0);
2110 5514 : model->set_value (exception_reg, exception_sval, ctxt);
2111 5514 : const svalue *exception_ptr_sval
2112 5514 : = mgr->get_ptr_svalue (ptr_type_node, exception_reg);
2113 5514 : const svalue *tinfo_sval
2114 11028 : = mgr->get_or_create_conjured_svalue (ptr_type_node,
2115 5514 : &m_call,
2116 : exception_reg, p, 1);
2117 5514 : const svalue *destructor_sval
2118 11028 : = mgr->get_or_create_conjured_svalue (ptr_type_node,
2119 5514 : &m_call,
2120 : exception_reg, p, 2);
2121 :
2122 : /* Push a new exception_node on the model's thrown exception stack. */
2123 5514 : exception_node eh_node (exception_ptr_sval, tinfo_sval, destructor_sval);
2124 5514 : model->push_thrown_exception (eh_node);
2125 :
2126 5514 : return true;
2127 : }
2128 :
2129 : void
2130 26 : add_events_to_path (checker_path *emission_path,
2131 : const exploded_edge &eedge,
2132 : pending_diagnostic &,
2133 : const state_transition *) const final override
2134 : {
2135 26 : const exploded_node *dst_node = eedge.m_dest;
2136 26 : const program_point &dst_point = dst_node->get_point ();
2137 26 : const int dst_stack_depth = dst_point.get_stack_depth ();
2138 :
2139 26 : emission_path->add_event
2140 26 : (std::make_unique<throw_from_call_to_external_fn_event>
2141 26 : (event_loc_info (m_call.location,
2142 : dst_point.get_fndecl (),
2143 26 : dst_stack_depth),
2144 : dst_node,
2145 : m_call,
2146 26 : m_fndecl));
2147 26 : }
2148 :
2149 : exploded_node *
2150 5370 : create_enode (exploded_graph &eg,
2151 : const program_point &point,
2152 : program_state &&state,
2153 : exploded_node *enode_for_diag,
2154 : region_model_context *ctxt) const final override
2155 : {
2156 5370 : exploded_node *thrown_enode
2157 5370 : = eg.get_or_create_node (point, state, enode_for_diag,
2158 : /* Don't add to worklist. */
2159 : false);
2160 5370 : if (!thrown_enode)
2161 : return nullptr;
2162 :
2163 : /* Add successor edges for thrown_enode "by hand" for the exception. */
2164 5294 : eg.unwind_from_exception (*thrown_enode,
2165 5294 : &m_call,
2166 : ctxt);
2167 5294 : return thrown_enode;
2168 : }
2169 :
2170 : private:
2171 : const gcall &m_call;
2172 : tree m_fndecl; // could be null
2173 : };
2174 :
2175 : /* Get a set of functions that are assumed to not throw exceptions. */
2176 :
2177 : static function_set
2178 5350 : get_fns_assumed_not_to_throw ()
2179 : {
2180 : // TODO: populate this list more fully
2181 5350 : static const char * const fn_names[] = {
2182 : /* This array must be kept sorted. */
2183 :
2184 : "fclose"
2185 : };
2186 5350 : const size_t count = ARRAY_SIZE (fn_names);
2187 5350 : function_set fs (fn_names, count);
2188 5350 : return fs;
2189 : }
2190 :
2191 : /* Return true if CALL could throw an exception.
2192 : FNDECL could be NULL_TREE. */
2193 :
2194 : static bool
2195 12819 : can_throw_p (const gcall &call, tree fndecl)
2196 : {
2197 12819 : if (!flag_exceptions)
2198 : return false;
2199 :
2200 : /* Compatibility flag to allow the user to assume external functions
2201 : never throw exceptions. This may be useful when using the analyzer
2202 : on C code that is compiled with -fexceptions, but for which the headers
2203 : haven't yet had "nothrow" attributes systematically added. */
2204 6031 : if (flag_analyzer_assume_nothrow)
2205 : return false;
2206 :
2207 6029 : if (gimple_call_nothrow_p (&call))
2208 : return false;
2209 :
2210 5490 : if (fndecl)
2211 : {
2212 5350 : const function_set fs = get_fns_assumed_not_to_throw ();
2213 5350 : if (fs.contains_decl_p (fndecl))
2214 7 : return false;
2215 : }
2216 :
2217 : return true;
2218 : }
2219 :
2220 : /* Given CALL where we don't know what code is being called
2221 : (by not having the body of FNDECL, or having NULL_TREE for FNDECL),
2222 : potentially bifurcate control flow to simulate the call throwing
2223 : an exception. */
2224 :
2225 : void
2226 18848 : region_model::check_for_throw_inside_call (const gcall &call,
2227 : tree fndecl,
2228 : region_model_context *ctxt)
2229 : {
2230 18848 : if (!ctxt)
2231 13365 : return;
2232 :
2233 : /* Could this function throw an exception?
2234 : If so, add an extra e-edge for that. */
2235 12819 : if (!can_throw_p (call, fndecl))
2236 : return;
2237 :
2238 5483 : auto throws_exception
2239 5483 : = std::make_unique<exception_thrown_from_unrecognized_call> (call, fndecl);
2240 5483 : ctxt->bifurcate (std::move (throws_exception));
2241 5483 : }
2242 :
2243 : /* A subclass of pending_diagnostic for complaining about jumps through NULL
2244 : function pointers. */
2245 :
2246 : class jump_through_null : public pending_diagnostic_subclass<jump_through_null>
2247 : {
2248 : public:
2249 16 : jump_through_null (const gcall &call)
2250 16 : : m_call (call)
2251 : {}
2252 :
2253 152 : const char *get_kind () const final override
2254 : {
2255 152 : return "jump_through_null";
2256 : }
2257 :
2258 16 : bool operator== (const jump_through_null &other) const
2259 : {
2260 16 : return &m_call == &other.m_call;
2261 : }
2262 :
2263 32 : int get_controlling_option () const final override
2264 : {
2265 32 : return OPT_Wanalyzer_jump_through_null;
2266 : }
2267 :
2268 16 : bool emit (diagnostic_emission_context &ctxt) final override
2269 : {
2270 16 : return ctxt.warn ("jump through null pointer");
2271 : }
2272 :
2273 32 : bool describe_final_event (pretty_printer &pp,
2274 : const evdesc::final_event &) final override
2275 : {
2276 32 : pp_string (&pp, "jump through null pointer here");
2277 32 : return true;
2278 : }
2279 :
2280 : private:
2281 : const gcall &m_call;
2282 : };
2283 : /* Update this model for the CALL stmt, using CTXT to report any
2284 : diagnostics - the first half.
2285 :
2286 : Updates to the region_model that should be made *before* sm-states
2287 : are updated are done here; other updates to the region_model are done
2288 : in region_model::on_call_post.
2289 :
2290 : Return true if the function call has unknown side effects (it wasn't
2291 : recognized and we don't have a body for it, or are unable to tell which
2292 : fndecl it is). */
2293 :
2294 : bool
2295 77016 : region_model::on_call_pre (const gcall &call, region_model_context *ctxt)
2296 : {
2297 77016 : call_details cd (call, this, ctxt);
2298 :
2299 : /* Special-case for IFN_DEFERRED_INIT.
2300 : We want to report uninitialized variables with -fanalyzer (treating
2301 : -ftrivial-auto-var-init= as purely a mitigation feature).
2302 : Handle IFN_DEFERRED_INIT by treating it as no-op: don't touch the
2303 : lhs of the call, so that it is still uninitialized from the point of
2304 : view of the analyzer. */
2305 77016 : if (gimple_call_internal_p (&call)
2306 77016 : && gimple_call_internal_fn (&call) == IFN_DEFERRED_INIT)
2307 : return false; /* No side effects. */
2308 :
2309 : /* Get svalues for all of the arguments at the callsite, to ensure that we
2310 : complain about any uninitialized arguments. This might lead to
2311 : duplicates if any of the handling below also looks up the svalues,
2312 : but the deduplication code should deal with that. */
2313 72006 : if (ctxt)
2314 49312 : check_call_args (cd);
2315 :
2316 72006 : tree callee_fndecl = get_fndecl_for_call (call, ctxt);
2317 :
2318 72006 : if (gimple_call_internal_p (&call))
2319 2884 : if (const known_function *kf
2320 1442 : = get_known_function (gimple_call_internal_fn (&call)))
2321 : {
2322 1408 : kf->impl_call_pre (cd);
2323 1408 : return false; /* No further side effects. */
2324 : }
2325 :
2326 70598 : if (!callee_fndecl)
2327 : {
2328 : /* Check for jump through nullptr. */
2329 520 : if (ctxt)
2330 404 : if (tree fn_ptr = gimple_call_fn (&call))
2331 : {
2332 378 : const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
2333 378 : if (fn_ptr_sval->all_zeroes_p ())
2334 : {
2335 16 : ctxt->warn
2336 16 : (std::make_unique<jump_through_null> (call));
2337 16 : ctxt->terminate_path ();
2338 16 : return true;
2339 : }
2340 : }
2341 :
2342 504 : check_for_throw_inside_call (call, NULL_TREE, ctxt);
2343 504 : cd.set_any_lhs_with_defaults ();
2344 504 : return true; /* Unknown side effects. */
2345 : }
2346 :
2347 70078 : if (const known_function *kf = get_known_function (callee_fndecl, cd))
2348 : {
2349 49428 : kf->impl_call_pre (cd);
2350 49428 : return false; /* No further side effects. */
2351 : }
2352 :
2353 20650 : cd.set_any_lhs_with_defaults ();
2354 :
2355 20650 : const int callee_fndecl_flags = flags_from_decl_or_type (callee_fndecl);
2356 20650 : if (callee_fndecl_flags & (ECF_CONST | ECF_PURE))
2357 : return false; /* No side effects. */
2358 :
2359 19138 : if (fndecl_built_in_p (callee_fndecl))
2360 : return true; /* Unknown side effects. */
2361 :
2362 18344 : if (!fndecl_has_gimple_body_p (callee_fndecl))
2363 : {
2364 18344 : check_for_throw_inside_call (call, callee_fndecl, ctxt);
2365 18344 : return true; /* Unknown side effects. */
2366 : }
2367 :
2368 : return false; /* No side effects. */
2369 : }
2370 :
2371 : /* Update this model for the CALL stmt, using CTXT to report any
2372 : diagnostics - the second half.
2373 :
2374 : Updates to the region_model that should be made *after* sm-states
2375 : are updated are done here; other updates to the region_model are done
2376 : in region_model::on_call_pre.
2377 :
2378 : If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
2379 : to purge state. */
2380 :
2381 : void
2382 76750 : region_model::on_call_post (const gcall &call,
2383 : bool unknown_side_effects,
2384 : region_model_context *ctxt)
2385 : {
2386 76750 : if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
2387 : {
2388 69859 : call_details cd (call, this, ctxt);
2389 69859 : if (const known_function *kf = get_known_function (callee_fndecl, cd))
2390 : {
2391 49267 : kf->impl_call_post (cd);
2392 99021 : return;
2393 : }
2394 : /* Was this fndecl referenced by
2395 : __attribute__((malloc(FOO)))? */
2396 20592 : if (lookup_attribute ("*dealloc", DECL_ATTRIBUTES (callee_fndecl)))
2397 : {
2398 487 : impl_deallocation_call (cd);
2399 487 : return;
2400 : }
2401 : }
2402 :
2403 26996 : if (unknown_side_effects)
2404 : {
2405 18008 : handle_unrecognized_call (call, ctxt);
2406 18008 : if (ctxt)
2407 12230 : ctxt->maybe_did_work ();
2408 : }
2409 : }
2410 :
2411 : /* Purge state involving SVAL from this region_model, using CTXT
2412 : (if non-NULL) to purge other state in a program_state.
2413 :
2414 : For example, if we're at the def-stmt of an SSA name, then we need to
2415 : purge any state for svalues that involve that SSA name. This avoids
2416 : false positives in loops, since a symbolic value referring to the
2417 : SSA name will be referring to the previous value of that SSA name.
2418 :
2419 : For example, in:
2420 : while ((e = hashmap_iter_next(&iter))) {
2421 : struct oid2strbuf *e_strbuf = (struct oid2strbuf *)e;
2422 : free (e_strbuf->value);
2423 : }
2424 : at the def-stmt of e_8:
2425 : e_8 = hashmap_iter_next (&iter);
2426 : we should purge the "freed" state of:
2427 : INIT_VAL(CAST_REG(‘struct oid2strbuf’, (*INIT_VAL(e_8))).value)
2428 : which is the "e_strbuf->value" value from the previous iteration,
2429 : or we will erroneously report a double-free - the "e_8" within it
2430 : refers to the previous value. */
2431 :
2432 : void
2433 30046 : region_model::purge_state_involving (const svalue *sval,
2434 : region_model_context *ctxt)
2435 : {
2436 30046 : if (!sval->can_have_associated_state_p ())
2437 : return;
2438 30046 : m_store.purge_state_involving (sval, m_mgr);
2439 30046 : m_constraints->purge_state_involving (sval);
2440 30046 : m_dynamic_extents.purge_state_involving (sval);
2441 30046 : if (ctxt)
2442 17748 : ctxt->purge_state_involving (sval);
2443 : }
2444 :
2445 : /* A pending_note subclass for adding a note about an
2446 : __attribute__((access, ...)) to a diagnostic. */
2447 :
2448 : class reason_attr_access : public pending_note_subclass<reason_attr_access>
2449 : {
2450 : public:
2451 22 : reason_attr_access (tree callee_fndecl, const attr_access &access)
2452 22 : : m_callee_fndecl (callee_fndecl),
2453 22 : m_ptr_argno (access.ptrarg),
2454 22 : m_access_str (TREE_STRING_POINTER (access.to_external_string ()))
2455 : {
2456 22 : }
2457 :
2458 116 : const char *get_kind () const final override { return "reason_attr_access"; }
2459 :
2460 18 : void emit () const final override
2461 : {
2462 18 : auto_urlify_attributes sentinel;
2463 18 : inform (DECL_SOURCE_LOCATION (m_callee_fndecl),
2464 : "parameter %i of %qD marked with attribute %qs",
2465 18 : m_ptr_argno + 1, m_callee_fndecl, m_access_str);
2466 18 : }
2467 :
2468 58 : bool operator== (const reason_attr_access &other) const
2469 : {
2470 58 : return (m_callee_fndecl == other.m_callee_fndecl
2471 22 : && m_ptr_argno == other.m_ptr_argno
2472 80 : && !strcmp (m_access_str, other.m_access_str));
2473 : }
2474 :
2475 : private:
2476 : tree m_callee_fndecl;
2477 : unsigned m_ptr_argno;
2478 : const char *m_access_str;
2479 : };
2480 :
2481 : /* Check CALL a call to external function CALLEE_FNDECL based on
2482 : any __attribute__ ((access, ....) on the latter, complaining to
2483 : CTXT about any issues.
2484 :
2485 : Currently we merely call check_region_for_write on any regions
2486 : pointed to by arguments marked with a "write_only" or "read_write"
2487 : attribute. */
2488 :
2489 : void
2490 1257 : region_model::check_function_attr_access (const gcall &call,
2491 : tree callee_fndecl,
2492 : region_model_context *ctxt,
2493 : rdwr_map &rdwr_idx) const
2494 : {
2495 1257 : gcc_assert (callee_fndecl);
2496 1257 : gcc_assert (ctxt);
2497 :
2498 1257 : tree fntype = TREE_TYPE (callee_fndecl);
2499 1257 : gcc_assert (fntype);
2500 :
2501 1257 : unsigned argno = 0;
2502 :
2503 4776 : for (tree iter = TYPE_ARG_TYPES (fntype); iter;
2504 3519 : iter = TREE_CHAIN (iter), ++argno)
2505 : {
2506 3519 : const attr_access* access = rdwr_idx.get (argno);
2507 3519 : if (!access)
2508 3241 : continue;
2509 :
2510 : /* Ignore any duplicate entry in the map for the size argument. */
2511 278 : if (access->ptrarg != argno)
2512 114 : continue;
2513 :
2514 164 : if (access->mode == access_write_only
2515 164 : || access->mode == access_read_write)
2516 : {
2517 : /* Subclass of annotating_context that
2518 : adds a note about the attr access to any saved diagnostics. */
2519 40 : class annotating_ctxt : public annotating_context
2520 : {
2521 : public:
2522 40 : annotating_ctxt (tree callee_fndecl,
2523 : const attr_access &access,
2524 : region_model_context *ctxt)
2525 40 : : annotating_context (ctxt),
2526 40 : m_callee_fndecl (callee_fndecl),
2527 40 : m_access (access)
2528 : {
2529 : }
2530 22 : void add_annotations () final override
2531 : {
2532 22 : add_note (std::make_unique<reason_attr_access>
2533 22 : (m_callee_fndecl, m_access));
2534 22 : }
2535 : private:
2536 : tree m_callee_fndecl;
2537 : const attr_access &m_access;
2538 : };
2539 :
2540 : /* Use this ctxt below so that any diagnostics get the
2541 : note added to them. */
2542 40 : annotating_ctxt my_ctxt (callee_fndecl, *access, ctxt);
2543 :
2544 40 : tree ptr_tree = gimple_call_arg (&call, access->ptrarg);
2545 40 : const svalue *ptr_sval = get_rvalue (ptr_tree, &my_ctxt);
2546 40 : const region *reg = deref_rvalue (ptr_sval, ptr_tree, &my_ctxt);
2547 40 : check_region_for_write (reg, nullptr, &my_ctxt);
2548 : /* We don't use the size arg for now. */
2549 : }
2550 : }
2551 1257 : }
2552 :
2553 : /* Subroutine of region_model::check_function_attr_null_terminated_string_arg,
2554 : checking one instance of __attribute__((null_terminated_string_arg)). */
2555 :
2556 : void
2557 200 : region_model::
2558 : check_one_function_attr_null_terminated_string_arg (const gcall &call,
2559 : tree callee_fndecl,
2560 : region_model_context *ctxt,
2561 : rdwr_map &rdwr_idx,
2562 : tree attr)
2563 : {
2564 200 : gcc_assert (callee_fndecl);
2565 200 : gcc_assert (ctxt);
2566 200 : gcc_assert (attr);
2567 :
2568 200 : tree arg = TREE_VALUE (attr);
2569 200 : if (!arg)
2570 76 : return;
2571 :
2572 : /* Convert from 1-based to 0-based index. */
2573 200 : unsigned int arg_idx = TREE_INT_CST_LOW (TREE_VALUE (arg)) - 1;
2574 :
2575 : /* If there's also an "access" attribute on the ptr param
2576 : for reading with a size param specified, then that size
2577 : limits the size of the possible read from the pointer. */
2578 200 : if (const attr_access* access = rdwr_idx.get (arg_idx))
2579 104 : if ((access->mode == access_read_only
2580 104 : || access->mode == access_read_write)
2581 104 : && access->sizarg != UINT_MAX)
2582 : {
2583 76 : call_details cd_checked (call, this, ctxt);
2584 76 : const svalue *limit_sval
2585 76 : = cd_checked.get_arg_svalue (access->sizarg);
2586 76 : const svalue *ptr_sval
2587 76 : = cd_checked.get_arg_svalue (arg_idx);
2588 : /* Try reading all of the bytes expressed by the size param,
2589 : but without emitting warnings (via a null context). */
2590 76 : const svalue *limited_sval
2591 76 : = read_bytes (deref_rvalue (ptr_sval, NULL_TREE, nullptr),
2592 : NULL_TREE,
2593 : limit_sval,
2594 : nullptr);
2595 76 : if (limited_sval->get_kind () == SK_POISONED)
2596 : {
2597 : /* Reading up to the truncation limit caused issues.
2598 : Assume that the string is meant to be terminated
2599 : before then, so perform a *checked* check for the
2600 : terminator. */
2601 24 : check_for_null_terminated_string_arg (cd_checked,
2602 : arg_idx);
2603 : }
2604 : else
2605 : {
2606 : /* Reading up to the truncation limit seems OK; repeat
2607 : the read, but with checking enabled. */
2608 52 : read_bytes (deref_rvalue (ptr_sval, NULL_TREE, ctxt),
2609 : NULL_TREE,
2610 : limit_sval,
2611 : ctxt);
2612 : }
2613 76 : return;
2614 : }
2615 :
2616 : /* Otherwise, we don't have an access-attribute limiting the read.
2617 : Simulate a read up to the null terminator (if any). */
2618 :
2619 124 : call_details cd (call, this, ctxt);
2620 124 : check_for_null_terminated_string_arg (cd, arg_idx);
2621 : }
2622 :
2623 : /* Check CALL a call to external function CALLEE_FNDECL for any uses
2624 : of __attribute__ ((null_terminated_string_arg)), compaining
2625 : to CTXT about any issues.
2626 :
2627 : Use RDWR_IDX for tracking uses of __attribute__ ((access, ....). */
2628 :
2629 : void
2630 1257 : region_model::
2631 : check_function_attr_null_terminated_string_arg (const gcall &call,
2632 : tree callee_fndecl,
2633 : region_model_context *ctxt,
2634 : rdwr_map &rdwr_idx)
2635 : {
2636 1257 : gcc_assert (callee_fndecl);
2637 1257 : gcc_assert (ctxt);
2638 :
2639 1257 : tree fntype = TREE_TYPE (callee_fndecl);
2640 1257 : gcc_assert (fntype);
2641 :
2642 : /* A function declaration can specify multiple attribute
2643 : null_terminated_string_arg, each with one argument. */
2644 1457 : for (tree attr = TYPE_ATTRIBUTES (fntype); attr; attr = TREE_CHAIN (attr))
2645 : {
2646 1281 : attr = lookup_attribute ("null_terminated_string_arg", attr);
2647 1281 : if (!attr)
2648 : return;
2649 :
2650 200 : check_one_function_attr_null_terminated_string_arg (call, callee_fndecl,
2651 : ctxt, rdwr_idx,
2652 : attr);
2653 : }
2654 : }
2655 :
2656 : /* Check CALL a call to external function CALLEE_FNDECL for any
2657 : function attributes, complaining to CTXT about any issues. */
2658 :
2659 : void
2660 11847 : region_model::check_function_attrs (const gcall &call,
2661 : tree callee_fndecl,
2662 : region_model_context *ctxt)
2663 : {
2664 11847 : gcc_assert (callee_fndecl);
2665 11847 : gcc_assert (ctxt);
2666 :
2667 11847 : tree fntype = TREE_TYPE (callee_fndecl);
2668 11847 : if (!fntype)
2669 10590 : return;
2670 :
2671 11847 : if (!TYPE_ATTRIBUTES (fntype))
2672 : return;
2673 :
2674 : /* Initialize a map of attribute access specifications for arguments
2675 : to the function call. */
2676 1257 : rdwr_map rdwr_idx;
2677 1257 : init_attr_rdwr_indices (&rdwr_idx, TYPE_ATTRIBUTES (fntype));
2678 :
2679 1257 : check_function_attr_access (call, callee_fndecl, ctxt, rdwr_idx);
2680 1257 : check_function_attr_null_terminated_string_arg (call, callee_fndecl,
2681 : ctxt, rdwr_idx);
2682 1257 : }
2683 :
2684 : /* Handle a call CALL to a function with unknown behavior.
2685 :
2686 : Traverse the regions in this model, determining what regions are
2687 : reachable from pointer arguments to CALL and from global variables,
2688 : recursively.
2689 :
2690 : Set all reachable regions to new unknown values and purge sm-state
2691 : from their values, and from values that point to them. */
2692 :
2693 : void
2694 18008 : region_model::handle_unrecognized_call (const gcall &call,
2695 : region_model_context *ctxt)
2696 : {
2697 18008 : tree fndecl = get_fndecl_for_call (call, ctxt);
2698 :
2699 18008 : if (fndecl && ctxt)
2700 11847 : check_function_attrs (call, fndecl, ctxt);
2701 :
2702 18008 : reachable_regions reachable_regs (this);
2703 :
2704 : /* Determine the reachable regions and their mutability. */
2705 18008 : {
2706 : /* Add globals and regions that already escaped in previous
2707 : unknown calls. */
2708 18008 : m_store.for_each_cluster (reachable_regions::init_cluster_cb,
2709 : &reachable_regs);
2710 :
2711 : /* Params that are pointers. */
2712 18008 : tree iter_param_types = NULL_TREE;
2713 18008 : if (fndecl)
2714 17509 : iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2715 37723 : for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (&call);
2716 : arg_idx++)
2717 : {
2718 : /* Track expected param type, where available. */
2719 19715 : tree param_type = NULL_TREE;
2720 19715 : if (iter_param_types)
2721 : {
2722 18179 : param_type = TREE_VALUE (iter_param_types);
2723 18179 : gcc_assert (param_type);
2724 18179 : iter_param_types = TREE_CHAIN (iter_param_types);
2725 : }
2726 :
2727 19715 : tree parm = gimple_call_arg (&call, arg_idx);
2728 19715 : const svalue *parm_sval = get_rvalue (parm, ctxt);
2729 19715 : reachable_regs.handle_parm (parm_sval, param_type);
2730 : }
2731 : }
2732 :
2733 18008 : uncertainty_t *uncertainty = ctxt ? ctxt->get_uncertainty () : nullptr;
2734 :
2735 : /* Purge sm-state for the svalues that were reachable,
2736 : both in non-mutable and mutable form. */
2737 46379 : for (svalue_set::iterator iter
2738 18008 : = reachable_regs.begin_reachable_svals ();
2739 74750 : iter != reachable_regs.end_reachable_svals (); ++iter)
2740 : {
2741 28371 : const svalue *sval = (*iter);
2742 28371 : if (ctxt)
2743 21979 : ctxt->on_unknown_change (sval, false);
2744 : }
2745 59337 : for (svalue_set::iterator iter
2746 18008 : = reachable_regs.begin_mutable_svals ();
2747 100666 : iter != reachable_regs.end_mutable_svals (); ++iter)
2748 : {
2749 41329 : const svalue *sval = (*iter);
2750 41329 : if (ctxt)
2751 32267 : ctxt->on_unknown_change (sval, true);
2752 41329 : if (uncertainty)
2753 31060 : uncertainty->on_mutable_sval_at_unknown_call (sval);
2754 : }
2755 :
2756 : /* Mark any clusters that have escaped. */
2757 18008 : reachable_regs.mark_escaped_clusters (ctxt);
2758 :
2759 : /* Update bindings for all clusters that have escaped, whether above,
2760 : or previously. */
2761 18008 : m_store.on_unknown_fncall (call, m_mgr->get_store_manager (),
2762 18008 : conjured_purge (this, ctxt));
2763 :
2764 : /* Purge dynamic extents from any regions that have escaped mutably:
2765 : realloc could have been called on them. */
2766 44022 : for (hash_set<const region *>::iterator
2767 18008 : iter = reachable_regs.begin_mutable_base_regs ();
2768 44022 : iter != reachable_regs.end_mutable_base_regs ();
2769 26014 : ++iter)
2770 : {
2771 26014 : const region *base_reg = (*iter);
2772 26014 : unset_dynamic_extents (base_reg);
2773 : }
2774 18008 : }
2775 :
2776 : /* Traverse the regions in this model, determining what regions are
2777 : reachable from the store and populating *OUT.
2778 :
2779 : If EXTRA_SVAL is non-NULL, treat it as an additional "root"
2780 : for reachability (for handling return values from functions when
2781 : analyzing return of the only function on the stack).
2782 :
2783 : If UNCERTAINTY is non-NULL, treat any svalues that were recorded
2784 : within it as being maybe-bound as additional "roots" for reachability.
2785 :
2786 : Find svalues that haven't leaked. */
2787 :
2788 : void
2789 971878 : region_model::get_reachable_svalues (svalue_set *out,
2790 : const svalue *extra_sval,
2791 : const uncertainty_t *uncertainty)
2792 : {
2793 971878 : reachable_regions reachable_regs (this);
2794 :
2795 : /* Add globals and regions that already escaped in previous
2796 : unknown calls. */
2797 971878 : m_store.for_each_cluster (reachable_regions::init_cluster_cb,
2798 : &reachable_regs);
2799 :
2800 971878 : if (extra_sval)
2801 5270 : reachable_regs.handle_sval (extra_sval);
2802 :
2803 971878 : if (uncertainty)
2804 446500 : for (uncertainty_t::iterator iter
2805 432200 : = uncertainty->begin_maybe_bound_svals ();
2806 893000 : iter != uncertainty->end_maybe_bound_svals (); ++iter)
2807 14300 : reachable_regs.handle_sval (*iter);
2808 :
2809 : /* Get regions for locals that have explicitly bound values. */
2810 9478187 : for (store::cluster_map_t::iterator iter = m_store.begin ();
2811 17984496 : iter != m_store.end (); ++iter)
2812 : {
2813 8506309 : const region *base_reg = (*iter).first;
2814 8506309 : if (const region *parent = base_reg->get_parent_region ())
2815 8506309 : if (parent->get_kind () == RK_FRAME)
2816 5589897 : reachable_regs.add (base_reg, false);
2817 : }
2818 :
2819 : /* Populate *OUT based on the values that were reachable. */
2820 971878 : for (svalue_set::iterator iter
2821 971878 : = reachable_regs.begin_reachable_svals ();
2822 18584288 : iter != reachable_regs.end_reachable_svals (); ++iter)
2823 8806205 : out->add (*iter);
2824 971878 : }
2825 :
2826 : /* Update this model for the RETURN_STMT, using CTXT to report any
2827 : diagnostics. */
2828 :
2829 : void
2830 0 : region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
2831 : {
2832 0 : tree callee = get_current_function ()->decl;
2833 0 : tree lhs = DECL_RESULT (callee);
2834 0 : tree rhs = gimple_return_retval (return_stmt);
2835 :
2836 0 : if (lhs && rhs)
2837 : {
2838 0 : const svalue *sval = get_rvalue (rhs, ctxt);
2839 0 : const region *ret_reg = get_lvalue (lhs, ctxt);
2840 0 : set_value (ret_reg, sval, ctxt);
2841 : }
2842 0 : }
2843 :
2844 : /* Implementation of region_model::get_lvalue; the latter adds type-checking.
2845 :
2846 : Get the id of the region for PV within this region_model,
2847 : emitting any diagnostics to CTXT. */
2848 :
2849 : const region *
2850 2570395 : region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt) const
2851 : {
2852 2570395 : tree expr = pv.m_tree;
2853 :
2854 2570395 : gcc_assert (expr);
2855 :
2856 2570395 : switch (TREE_CODE (expr))
2857 : {
2858 84 : default:
2859 84 : return m_mgr->get_region_for_unexpected_tree_code (ctxt, expr,
2860 84 : dump_location_t ());
2861 :
2862 27746 : case ARRAY_REF:
2863 27746 : {
2864 27746 : tree array = TREE_OPERAND (expr, 0);
2865 27746 : tree index = TREE_OPERAND (expr, 1);
2866 :
2867 27746 : const region *array_reg = get_lvalue (array, ctxt);
2868 27746 : const svalue *index_sval = get_rvalue (index, ctxt);
2869 27746 : return m_mgr->get_element_region (array_reg,
2870 27746 : TREE_TYPE (TREE_TYPE (array)),
2871 27746 : index_sval);
2872 : }
2873 189 : break;
2874 :
2875 189 : case BIT_FIELD_REF:
2876 189 : {
2877 189 : tree inner_expr = TREE_OPERAND (expr, 0);
2878 189 : const region *inner_reg = get_lvalue (inner_expr, ctxt);
2879 189 : tree num_bits = TREE_OPERAND (expr, 1);
2880 189 : tree first_bit_offset = TREE_OPERAND (expr, 2);
2881 189 : gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2882 189 : gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2883 189 : bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2884 189 : TREE_INT_CST_LOW (num_bits));
2885 189 : return m_mgr->get_bit_range (inner_reg, TREE_TYPE (expr), bits);
2886 : }
2887 72853 : break;
2888 :
2889 72853 : case MEM_REF:
2890 72853 : {
2891 72853 : tree ptr = TREE_OPERAND (expr, 0);
2892 72853 : tree offset = TREE_OPERAND (expr, 1);
2893 72853 : const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2894 72853 : const svalue *offset_sval = get_rvalue (offset, ctxt);
2895 72853 : const region *star_ptr = deref_rvalue (ptr_sval, ptr, ctxt);
2896 72853 : return m_mgr->get_offset_region (star_ptr,
2897 72853 : TREE_TYPE (expr),
2898 72853 : offset_sval);
2899 : }
2900 896656 : break;
2901 :
2902 896656 : case FUNCTION_DECL:
2903 896656 : return m_mgr->get_region_for_fndecl (expr);
2904 :
2905 343 : case LABEL_DECL:
2906 343 : return m_mgr->get_region_for_label (expr);
2907 :
2908 157033 : case VAR_DECL:
2909 : /* Handle globals. */
2910 157033 : if (is_global_var (expr))
2911 52249 : return m_mgr->get_region_for_global (expr);
2912 :
2913 : /* Fall through. */
2914 :
2915 1456720 : case SSA_NAME:
2916 1456720 : case PARM_DECL:
2917 1456720 : case RESULT_DECL:
2918 1456720 : {
2919 1456720 : gcc_assert (TREE_CODE (expr) == SSA_NAME
2920 : || TREE_CODE (expr) == PARM_DECL
2921 : || VAR_P (expr)
2922 : || TREE_CODE (expr) == RESULT_DECL);
2923 :
2924 1456720 : int stack_index = pv.m_stack_depth;
2925 1456720 : const frame_region *frame = get_frame_at_index (stack_index);
2926 1456720 : gcc_assert (frame);
2927 1456720 : return frame->get_region_for_local (m_mgr, expr, ctxt);
2928 : }
2929 :
2930 47942 : case COMPONENT_REF:
2931 47942 : {
2932 : /* obj.field */
2933 47942 : tree obj = TREE_OPERAND (expr, 0);
2934 47942 : tree field = TREE_OPERAND (expr, 1);
2935 47942 : const region *obj_reg = get_lvalue (obj, ctxt);
2936 47942 : return m_mgr->get_field_region (obj_reg, field);
2937 : }
2938 15613 : break;
2939 :
2940 15613 : case STRING_CST:
2941 15613 : return m_mgr->get_region_for_string (expr);
2942 : }
2943 : }
2944 :
2945 : /* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
2946 :
2947 : static void
2948 5710149 : assert_compat_types (tree src_type, tree dst_type)
2949 : {
2950 5710149 : if (src_type && dst_type && !VOID_TYPE_P (dst_type))
2951 : {
2952 : #if CHECKING_P
2953 5709764 : if (!(useless_type_conversion_p (src_type, dst_type)))
2954 0 : internal_error ("incompatible types: %qT and %qT", src_type, dst_type);
2955 : #endif
2956 : }
2957 5710149 : }
2958 :
2959 : /* Return true if SRC_TYPE can be converted to DST_TYPE as a no-op. */
2960 :
2961 : bool
2962 14739 : compat_types_p (tree src_type, tree dst_type)
2963 : {
2964 14739 : if (src_type && dst_type && !VOID_TYPE_P (dst_type))
2965 14739 : if (!(useless_type_conversion_p (src_type, dst_type)))
2966 : return false;
2967 : return true;
2968 : }
2969 :
2970 : /* Get the region for PV within this region_model,
2971 : emitting any diagnostics to CTXT. */
2972 :
2973 : const region *
2974 2570395 : region_model::get_lvalue (path_var pv, region_model_context *ctxt) const
2975 : {
2976 2570395 : if (pv.m_tree == NULL_TREE)
2977 : return nullptr;
2978 :
2979 2570395 : const region *result_reg = get_lvalue_1 (pv, ctxt);
2980 2570395 : assert_compat_types (result_reg->get_type (), TREE_TYPE (pv.m_tree));
2981 2570395 : return result_reg;
2982 : }
2983 :
2984 : /* Get the region for EXPR within this region_model (assuming the most
2985 : recent stack frame if it's a local). */
2986 :
2987 : const region *
2988 1590699 : region_model::get_lvalue (tree expr, region_model_context *ctxt) const
2989 : {
2990 1590699 : return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2991 : }
2992 :
2993 : /* Implementation of region_model::get_rvalue; the latter adds type-checking.
2994 :
2995 : Get the value of PV within this region_model,
2996 : emitting any diagnostics to CTXT. */
2997 :
2998 : const svalue *
2999 3125404 : region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt) const
3000 : {
3001 3125404 : gcc_assert (pv.m_tree);
3002 :
3003 3125404 : switch (TREE_CODE (pv.m_tree))
3004 : {
3005 45 : default:
3006 45 : return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
3007 :
3008 963120 : case ADDR_EXPR:
3009 963120 : {
3010 : /* "&EXPR". */
3011 963120 : tree expr = pv.m_tree;
3012 963120 : tree op0 = TREE_OPERAND (expr, 0);
3013 963120 : const region *expr_reg = get_lvalue (op0, ctxt);
3014 963120 : return m_mgr->get_ptr_svalue (TREE_TYPE (expr), expr_reg);
3015 : }
3016 130 : break;
3017 :
3018 130 : case BIT_FIELD_REF:
3019 130 : {
3020 130 : tree expr = pv.m_tree;
3021 130 : tree op0 = TREE_OPERAND (expr, 0);
3022 130 : const region *reg = get_lvalue (op0, ctxt);
3023 130 : tree num_bits = TREE_OPERAND (expr, 1);
3024 130 : tree first_bit_offset = TREE_OPERAND (expr, 2);
3025 130 : gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
3026 130 : gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
3027 130 : bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
3028 130 : TREE_INT_CST_LOW (num_bits));
3029 130 : return get_rvalue_for_bits (TREE_TYPE (expr), reg, bits, ctxt);
3030 : }
3031 :
3032 36335 : case VAR_DECL:
3033 36335 : if (DECL_HARD_REGISTER (pv.m_tree))
3034 : {
3035 : /* If it has a hard register, it doesn't have a memory region
3036 : and can't be referred to as an lvalue. */
3037 43 : return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
3038 : }
3039 : /* Fall through. */
3040 929424 : case PARM_DECL:
3041 929424 : case SSA_NAME:
3042 929424 : case RESULT_DECL:
3043 929424 : case ARRAY_REF:
3044 929424 : {
3045 929424 : const region *reg = get_lvalue (pv, ctxt);
3046 929424 : return get_store_value (reg, ctxt);
3047 : }
3048 :
3049 142 : case REALPART_EXPR:
3050 142 : case IMAGPART_EXPR:
3051 142 : case VIEW_CONVERT_EXPR:
3052 142 : {
3053 142 : tree expr = pv.m_tree;
3054 142 : tree arg = TREE_OPERAND (expr, 0);
3055 142 : const svalue *arg_sval = get_rvalue (arg, ctxt);
3056 142 : const svalue *sval_unaryop
3057 142 : = m_mgr->get_or_create_unaryop (TREE_TYPE (expr), TREE_CODE (expr),
3058 : arg_sval);
3059 142 : return sval_unaryop;
3060 1180303 : };
3061 :
3062 1180303 : case INTEGER_CST:
3063 1180303 : case REAL_CST:
3064 1180303 : case COMPLEX_CST:
3065 1180303 : case VECTOR_CST:
3066 1180303 : case STRING_CST:
3067 1180303 : case RAW_DATA_CST:
3068 1180303 : return m_mgr->get_or_create_constant_svalue (pv.m_tree);
3069 :
3070 8 : case POINTER_PLUS_EXPR:
3071 8 : {
3072 8 : tree expr = pv.m_tree;
3073 8 : tree ptr = TREE_OPERAND (expr, 0);
3074 8 : tree offset = TREE_OPERAND (expr, 1);
3075 8 : const svalue *ptr_sval = get_rvalue (ptr, ctxt);
3076 8 : const svalue *offset_sval = get_rvalue (offset, ctxt);
3077 8 : const svalue *sval_binop
3078 8 : = m_mgr->get_or_create_binop (TREE_TYPE (expr), POINTER_PLUS_EXPR,
3079 : ptr_sval, offset_sval);
3080 8 : return sval_binop;
3081 : }
3082 :
3083 : /* Binary ops. */
3084 94 : case PLUS_EXPR:
3085 94 : case MULT_EXPR:
3086 94 : case BIT_AND_EXPR:
3087 94 : case BIT_IOR_EXPR:
3088 94 : case BIT_XOR_EXPR:
3089 94 : {
3090 94 : tree expr = pv.m_tree;
3091 94 : tree arg0 = TREE_OPERAND (expr, 0);
3092 94 : tree arg1 = TREE_OPERAND (expr, 1);
3093 94 : const svalue *arg0_sval = get_rvalue (arg0, ctxt);
3094 94 : const svalue *arg1_sval = get_rvalue (arg1, ctxt);
3095 94 : const svalue *sval_binop
3096 94 : = m_mgr->get_or_create_binop (TREE_TYPE (expr), TREE_CODE (expr),
3097 : arg0_sval, arg1_sval);
3098 94 : return sval_binop;
3099 : }
3100 :
3101 50232 : case COMPONENT_REF:
3102 50232 : case MEM_REF:
3103 50232 : {
3104 50232 : const region *ref_reg = get_lvalue (pv, ctxt);
3105 50232 : return get_store_value (ref_reg, ctxt);
3106 : }
3107 1863 : case OBJ_TYPE_REF:
3108 1863 : {
3109 1863 : tree expr = OBJ_TYPE_REF_EXPR (pv.m_tree);
3110 1863 : return get_rvalue (expr, ctxt);
3111 : }
3112 : }
3113 : }
3114 :
3115 : /* Get the value of PV within this region_model,
3116 : emitting any diagnostics to CTXT. */
3117 :
3118 : const svalue *
3119 3161771 : region_model::get_rvalue (path_var pv, region_model_context *ctxt) const
3120 : {
3121 3161771 : if (pv.m_tree == NULL_TREE)
3122 : return nullptr;
3123 :
3124 3125404 : const svalue *result_sval = get_rvalue_1 (pv, ctxt);
3125 :
3126 3125404 : assert_compat_types (result_sval->get_type (), TREE_TYPE (pv.m_tree));
3127 :
3128 3125404 : result_sval = check_for_poison (result_sval, pv.m_tree, nullptr, ctxt);
3129 :
3130 3125404 : return result_sval;
3131 : }
3132 :
3133 : /* Get the value of EXPR within this region_model (assuming the most
3134 : recent stack frame if it's a local). */
3135 :
3136 : const svalue *
3137 3161277 : region_model::get_rvalue (tree expr, region_model_context *ctxt) const
3138 : {
3139 3161277 : return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
3140 : }
3141 :
3142 : /* Return true if this model is on a path with "main" as the entrypoint
3143 : (as opposed to one in which we're merely analyzing a subset of the
3144 : path through the code). */
3145 :
3146 : bool
3147 224665 : region_model::called_from_main_p () const
3148 : {
3149 224665 : if (!m_current_frame)
3150 : return false;
3151 : /* Determine if the oldest stack frame in this model is for "main". */
3152 218302 : const frame_region *frame0 = get_frame_at_index (0);
3153 218302 : gcc_assert (frame0);
3154 218302 : return id_equal (DECL_NAME (frame0->get_function ().decl), "main");
3155 : }
3156 :
3157 : /* Subroutine of region_model::get_store_value for when REG is (or is within)
3158 : a global variable that hasn't been touched since the start of this path
3159 : (or was implicitly touched due to a call to an unknown function). */
3160 :
3161 : const svalue *
3162 233934 : region_model::get_initial_value_for_global (const region *reg) const
3163 : {
3164 : /* Get the decl that REG is for (or is within). */
3165 233934 : const decl_region *base_reg
3166 233934 : = reg->get_base_region ()->dyn_cast_decl_region ();
3167 233934 : gcc_assert (base_reg);
3168 233934 : tree decl = base_reg->get_decl ();
3169 :
3170 : /* Special-case: to avoid having to explicitly update all previously
3171 : untracked globals when calling an unknown fn, they implicitly have
3172 : an unknown value if an unknown call has occurred, unless this is
3173 : static to-this-TU and hasn't escaped. Globals that have escaped
3174 : are explicitly tracked, so we shouldn't hit this case for them. */
3175 233934 : if (m_store.called_unknown_fn_p ()
3176 71656 : && TREE_PUBLIC (decl)
3177 251571 : && !TREE_READONLY (decl))
3178 9325 : return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
3179 :
3180 : /* If we are on a path from the entrypoint from "main" and we have a
3181 : global decl defined in this TU that hasn't been touched yet, then
3182 : the initial value of REG can be taken from the initialization value
3183 : of the decl. */
3184 224609 : if (called_from_main_p () || TREE_READONLY (decl))
3185 14709 : return reg->get_initial_value_at_main (m_mgr);
3186 :
3187 : /* Otherwise, return INIT_VAL(REG). */
3188 209900 : return m_mgr->get_or_create_initial_value (reg);
3189 : }
3190 :
3191 : /* Get a value for REG, looking it up in the store, or otherwise falling
3192 : back to "initial" or "unknown" values.
3193 : Use CTXT to report any warnings associated with reading from REG. */
3194 :
3195 : const svalue *
3196 4134074 : region_model::get_store_value (const region *reg,
3197 : region_model_context *ctxt) const
3198 : {
3199 : /* Getting the value of an empty region gives an unknown_svalue. */
3200 4134074 : if (reg->empty_p ())
3201 52 : return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
3202 :
3203 4134022 : bool check_poisoned = true;
3204 4134022 : if (check_region_for_read (reg, ctxt))
3205 436 : check_poisoned = false;
3206 :
3207 : /* Special-case: handle var_decls in the constant pool. */
3208 4134022 : if (const decl_region *decl_reg = reg->dyn_cast_decl_region ())
3209 3469313 : if (const svalue *sval = decl_reg->maybe_get_constant_value (m_mgr))
3210 : return sval;
3211 :
3212 4134006 : const svalue *sval
3213 4134006 : = m_store.get_any_binding (m_mgr->get_store_manager (), reg);
3214 4134006 : if (sval)
3215 : {
3216 1172502 : if (reg->get_type ())
3217 1170278 : sval = m_mgr->get_or_create_cast (reg->get_type (), sval);
3218 1172502 : return sval;
3219 : }
3220 :
3221 : /* Special-case: read at a constant index within a STRING_CST. */
3222 2961504 : if (const offset_region *offset_reg = reg->dyn_cast_offset_region ())
3223 133418 : if (tree byte_offset_cst
3224 133418 : = offset_reg->get_byte_offset ()->maybe_get_constant ())
3225 8385 : if (const string_region *str_reg
3226 8385 : = reg->get_parent_region ()->dyn_cast_string_region ())
3227 : {
3228 205 : tree string_cst = str_reg->get_string_cst ();
3229 410 : if (const svalue *char_sval
3230 205 : = m_mgr->maybe_get_char_from_string_cst (string_cst,
3231 : byte_offset_cst))
3232 201 : return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
3233 : }
3234 :
3235 : /* Special-case: read the initial char of a STRING_CST. */
3236 2961303 : if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
3237 4984 : if (const string_region *str_reg
3238 2492 : = cast_reg->get_parent_region ()->dyn_cast_string_region ())
3239 : {
3240 205 : tree string_cst = str_reg->get_string_cst ();
3241 205 : tree byte_offset_cst = integer_zero_node;
3242 410 : if (const svalue *char_sval
3243 205 : = m_mgr->maybe_get_char_from_string_cst (string_cst,
3244 : byte_offset_cst))
3245 205 : return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
3246 : }
3247 :
3248 : /* Otherwise we implicitly have the initial value of the region
3249 : (if the cluster had been touched, binding_cluster::get_any_binding,
3250 : would have returned UNKNOWN, and we would already have returned
3251 : that above). */
3252 :
3253 : /* Handle globals. */
3254 2961098 : if (reg->get_base_region ()->get_parent_region ()->get_kind ()
3255 : == RK_GLOBALS)
3256 233934 : return get_initial_value_for_global (reg);
3257 :
3258 2727164 : return m_mgr->get_or_create_initial_value (reg, check_poisoned);
3259 : }
3260 :
3261 : /* Return false if REG does not exist, true if it may do.
3262 : This is for detecting regions within the stack that don't exist anymore
3263 : after frames are popped. */
3264 :
3265 : bool
3266 2647106 : region_model::region_exists_p (const region *reg) const
3267 : {
3268 : /* If within a stack frame, check that the stack frame is live. */
3269 2647106 : if (const frame_region *enclosing_frame = reg->maybe_get_frame_region ())
3270 : {
3271 : /* Check that the current frame is the enclosing frame, or is called
3272 : by it. */
3273 2615155 : for (const frame_region *iter_frame = get_current_frame (); iter_frame;
3274 560654 : iter_frame = iter_frame->get_calling_frame ())
3275 2598818 : if (iter_frame == enclosing_frame)
3276 : return true;
3277 : return false;
3278 : }
3279 :
3280 : return true;
3281 : }
3282 :
3283 : /* Get a region for referencing PTR_SVAL, creating a region if need be, and
3284 : potentially generating warnings via CTXT.
3285 : PTR_SVAL must be of pointer type.
3286 : PTR_TREE if non-NULL can be used when emitting diagnostics. */
3287 :
3288 : const region *
3289 126557 : region_model::deref_rvalue (const svalue *ptr_sval, tree ptr_tree,
3290 : region_model_context *ctxt,
3291 : bool add_nonnull_constraint) const
3292 : {
3293 126557 : gcc_assert (ptr_sval);
3294 126557 : gcc_assert (POINTER_TYPE_P (ptr_sval->get_type ()));
3295 :
3296 : /* If we're dereferencing PTR_SVAL, assume that it is non-NULL; add this
3297 : as a constraint. This suppresses false positives from
3298 : -Wanalyzer-null-dereference for the case where we later have an
3299 : if (PTR_SVAL) that would occur if we considered the false branch
3300 : and transitioned the malloc state machine from start->null. */
3301 126557 : if (add_nonnull_constraint)
3302 : {
3303 121382 : tree null_ptr_cst = build_int_cst (ptr_sval->get_type (), 0);
3304 121382 : const svalue *null_ptr
3305 121382 : = m_mgr->get_or_create_constant_svalue (null_ptr_cst);
3306 121382 : m_constraints->add_constraint (ptr_sval, NE_EXPR, null_ptr);
3307 : }
3308 :
3309 126557 : switch (ptr_sval->get_kind ())
3310 : {
3311 : default:
3312 : break;
3313 :
3314 50720 : case SK_REGION:
3315 50720 : {
3316 50720 : const region_svalue *region_sval
3317 50720 : = as_a <const region_svalue *> (ptr_sval);
3318 50720 : return region_sval->get_pointee ();
3319 : }
3320 :
3321 21998 : case SK_BINOP:
3322 21998 : {
3323 21998 : const binop_svalue *binop_sval
3324 21998 : = as_a <const binop_svalue *> (ptr_sval);
3325 21998 : switch (binop_sval->get_op ())
3326 : {
3327 21998 : case POINTER_PLUS_EXPR:
3328 21998 : {
3329 : /* If we have a symbolic value expressing pointer arithmentic,
3330 : try to convert it to a suitable region. */
3331 21998 : const region *parent_region
3332 21998 : = deref_rvalue (binop_sval->get_arg0 (), NULL_TREE, ctxt);
3333 21998 : const svalue *offset = binop_sval->get_arg1 ();
3334 21998 : tree type= TREE_TYPE (ptr_sval->get_type ());
3335 21998 : return m_mgr->get_offset_region (parent_region, type, offset);
3336 : }
3337 : default:
3338 : break;
3339 : }
3340 : }
3341 : break;
3342 :
3343 2656 : case SK_POISONED:
3344 2656 : {
3345 2656 : if (ctxt)
3346 : {
3347 624 : tree ptr = get_representative_tree (ptr_sval);
3348 : /* If we can't get a representative tree for PTR_SVAL
3349 : (e.g. if it hasn't been bound into the store), then
3350 : fall back on PTR_TREE, if non-NULL. */
3351 624 : if (!ptr)
3352 624 : ptr = ptr_tree;
3353 624 : if (ptr)
3354 : {
3355 0 : const poisoned_svalue *poisoned_sval
3356 0 : = as_a <const poisoned_svalue *> (ptr_sval);
3357 0 : enum poison_kind pkind = poisoned_sval->get_poison_kind ();
3358 0 : ctxt->warn (make_poisoned_value_diagnostic
3359 0 : (ptr, pkind, nullptr, nullptr));
3360 : }
3361 : }
3362 : }
3363 : break;
3364 : }
3365 :
3366 53839 : return m_mgr->get_symbolic_region (ptr_sval);
3367 : }
3368 :
3369 : /* Attempt to get BITS within any value of REG, as TYPE.
3370 : In particular, extract values from compound_svalues for the case
3371 : where there's a concrete binding at BITS.
3372 : Return an unknown svalue if we can't handle the given case.
3373 : Use CTXT to report any warnings associated with reading from REG. */
3374 :
3375 : const svalue *
3376 130 : region_model::get_rvalue_for_bits (tree type,
3377 : const region *reg,
3378 : const bit_range &bits,
3379 : region_model_context *ctxt) const
3380 : {
3381 130 : const svalue *sval = get_store_value (reg, ctxt);
3382 130 : return m_mgr->get_or_create_bits_within (type, bits, sval);
3383 : }
3384 :
3385 : /* Use CTXT to warn If DEST_REG is a region that shouldn't be written to. */
3386 :
3387 : void
3388 272471 : region_model::check_for_writable_region (const region* dest_reg,
3389 : region_model_context *ctxt) const
3390 : {
3391 : /* Fail gracefully if CTXT is nullptr. */
3392 272471 : if (!ctxt)
3393 : return;
3394 :
3395 272471 : const region *base_reg = dest_reg->get_base_region ();
3396 272471 : switch (base_reg->get_kind ())
3397 : {
3398 : default:
3399 : break;
3400 9 : case RK_FUNCTION:
3401 9 : {
3402 9 : const function_region *func_reg = as_a <const function_region *> (base_reg);
3403 9 : tree fndecl = func_reg->get_fndecl ();
3404 9 : ctxt->warn (make_write_to_const_diagnostic (func_reg, fndecl));
3405 : }
3406 9 : break;
3407 4 : case RK_LABEL:
3408 4 : {
3409 4 : const label_region *label_reg = as_a <const label_region *> (base_reg);
3410 4 : tree label = label_reg->get_label ();
3411 4 : ctxt->warn (make_write_to_const_diagnostic (label_reg, label));
3412 : }
3413 4 : break;
3414 255444 : case RK_DECL:
3415 255444 : {
3416 255444 : const decl_region *decl_reg = as_a <const decl_region *> (base_reg);
3417 255444 : tree decl = decl_reg->get_decl ();
3418 : /* Warn about writes to const globals.
3419 : Don't warn for writes to const locals, and params in particular,
3420 : since we would warn in push_frame when setting them up (e.g the
3421 : "this" param is "T* const"). */
3422 255444 : if (TREE_READONLY (decl)
3423 255444 : && is_global_var (decl))
3424 20 : ctxt->warn (make_write_to_const_diagnostic (dest_reg, decl));
3425 : }
3426 : break;
3427 51 : case RK_STRING:
3428 51 : ctxt->warn (make_write_to_string_literal_diagnostic (dest_reg));
3429 51 : break;
3430 : }
3431 : }
3432 :
3433 : /* Get the capacity of REG in bytes. */
3434 :
3435 : const svalue *
3436 849278 : region_model::get_capacity (const region *reg) const
3437 : {
3438 849293 : switch (reg->get_kind ())
3439 : {
3440 : default:
3441 : break;
3442 787825 : case RK_DECL:
3443 787825 : {
3444 787825 : const decl_region *decl_reg = as_a <const decl_region *> (reg);
3445 787825 : tree decl = decl_reg->get_decl ();
3446 787825 : if (TREE_CODE (decl) == SSA_NAME)
3447 : {
3448 694483 : tree type = TREE_TYPE (decl);
3449 694483 : tree size = TYPE_SIZE (type);
3450 694483 : return get_rvalue (size, nullptr);
3451 : }
3452 : else
3453 : {
3454 93342 : tree size = decl_init_size (decl, false);
3455 93342 : if (size)
3456 93175 : return get_rvalue (size, nullptr);
3457 : }
3458 : }
3459 : break;
3460 15 : case RK_SIZED:
3461 : /* Look through sized regions to get at the capacity
3462 : of the underlying regions. */
3463 15 : return get_capacity (reg->get_parent_region ());
3464 523 : case RK_STRING:
3465 523 : {
3466 : /* "Capacity" here means "size". */
3467 523 : const string_region *string_reg = as_a <const string_region *> (reg);
3468 523 : tree string_cst = string_reg->get_string_cst ();
3469 523 : return m_mgr->get_or_create_int_cst (size_type_node,
3470 523 : TREE_STRING_LENGTH (string_cst));
3471 : }
3472 61097 : break;
3473 : }
3474 :
3475 61097 : if (const svalue *recorded = get_dynamic_extents (reg))
3476 : return recorded;
3477 :
3478 48786 : return m_mgr->get_or_create_unknown_svalue (sizetype);
3479 : }
3480 :
3481 : /* If CTXT is non-NULL, use it to warn about any problems accessing REG,
3482 : using DIR to determine if this access is a read or write.
3483 : Return TRUE if an OOB access was detected.
3484 : If SVAL_HINT is non-NULL, use it as a hint in diagnostics
3485 : about the value that would be written to REG. */
3486 :
3487 : bool
3488 4505802 : region_model::check_region_access (const region *reg,
3489 : enum access_direction dir,
3490 : const svalue *sval_hint,
3491 : region_model_context *ctxt) const
3492 : {
3493 : /* Fail gracefully if CTXT is NULL. */
3494 4505802 : if (!ctxt)
3495 : return false;
3496 :
3497 840262 : bool oob_access_detected = false;
3498 840262 : check_region_for_taint (reg, dir, ctxt);
3499 840262 : if (!check_region_bounds (reg, dir, sval_hint, ctxt))
3500 779 : oob_access_detected = true;
3501 :
3502 840262 : switch (dir)
3503 : {
3504 0 : default:
3505 0 : gcc_unreachable ();
3506 : case access_direction::read:
3507 : /* Currently a no-op. */
3508 : break;
3509 272471 : case access_direction::write:
3510 272471 : check_for_writable_region (reg, ctxt);
3511 272471 : break;
3512 : }
3513 : return oob_access_detected;
3514 : }
3515 :
3516 : /* If CTXT is non-NULL, use it to warn about any problems writing to REG. */
3517 :
3518 : void
3519 371780 : region_model::check_region_for_write (const region *dest_reg,
3520 : const svalue *sval_hint,
3521 : region_model_context *ctxt) const
3522 : {
3523 371780 : check_region_access (dest_reg, access_direction::write, sval_hint, ctxt);
3524 371780 : }
3525 :
3526 : /* If CTXT is non-NULL, use it to warn about any problems reading from REG.
3527 : Returns TRUE if an OOB read was detected. */
3528 :
3529 : bool
3530 4134022 : region_model::check_region_for_read (const region *src_reg,
3531 : region_model_context *ctxt) const
3532 : {
3533 4134022 : return check_region_access (src_reg, access_direction::read, nullptr, ctxt);
3534 : }
3535 :
3536 : /* Concrete subclass for casts of pointers that lead to trailing bytes. */
3537 :
3538 : class dubious_allocation_size
3539 : : public pending_diagnostic_subclass<dubious_allocation_size>
3540 : {
3541 : public:
3542 111 : dubious_allocation_size (const region *lhs, const region *rhs,
3543 : const svalue *capacity_sval, tree expr,
3544 : const gimple *stmt)
3545 111 : : m_lhs (lhs), m_rhs (rhs),
3546 111 : m_capacity_sval (capacity_sval), m_expr (expr),
3547 111 : m_stmt (stmt),
3548 111 : m_has_allocation_event (false)
3549 : {
3550 111 : gcc_assert (m_capacity_sval);
3551 : }
3552 :
3553 1186 : const char *get_kind () const final override
3554 : {
3555 1186 : return "dubious_allocation_size";
3556 : }
3557 :
3558 111 : bool operator== (const dubious_allocation_size &other) const
3559 : {
3560 111 : return (m_stmt == other.m_stmt
3561 111 : && pending_diagnostic::same_tree_p (m_expr, other.m_expr));
3562 : }
3563 :
3564 222 : int get_controlling_option () const final override
3565 : {
3566 222 : return OPT_Wanalyzer_allocation_size;
3567 : }
3568 :
3569 111 : bool emit (diagnostic_emission_context &ctxt) final override
3570 : {
3571 111 : ctxt.add_cwe (131);
3572 :
3573 111 : return ctxt.warn ("allocated buffer size is not a multiple"
3574 111 : " of the pointee's size");
3575 : }
3576 :
3577 : bool
3578 222 : describe_final_event (pretty_printer &pp,
3579 : const evdesc::final_event &) final override
3580 : {
3581 222 : tree pointee_type = TREE_TYPE (m_lhs->get_type ());
3582 222 : if (m_has_allocation_event)
3583 : {
3584 214 : pp_printf (&pp,
3585 : "assigned to %qT here;"
3586 : " %<sizeof (%T)%> is %qE",
3587 214 : m_lhs->get_type (), pointee_type,
3588 : size_in_bytes (pointee_type));
3589 214 : return true;
3590 : }
3591 : /* Fallback: Typically, we should always see an allocation_event
3592 : before. */
3593 8 : if (m_expr)
3594 : {
3595 8 : if (TREE_CODE (m_expr) == INTEGER_CST)
3596 : {
3597 8 : pp_printf (&pp,
3598 : "allocated %E bytes and assigned to"
3599 : " %qT here; %<sizeof (%T)%> is %qE",
3600 8 : m_expr, m_lhs->get_type (), pointee_type,
3601 : size_in_bytes (pointee_type));
3602 8 : return true;
3603 : }
3604 : else
3605 : {
3606 0 : pp_printf (&pp,
3607 : "allocated %qE bytes and assigned to"
3608 : " %qT here; %<sizeof (%T)%> is %qE",
3609 0 : m_expr, m_lhs->get_type (), pointee_type,
3610 : size_in_bytes (pointee_type));
3611 0 : return true;
3612 : }
3613 : }
3614 :
3615 0 : pp_printf (&pp,
3616 : "allocated and assigned to %qT here;"
3617 : " %<sizeof (%T)%> is %qE",
3618 0 : m_lhs->get_type (), pointee_type,
3619 : size_in_bytes (pointee_type));
3620 0 : return true;
3621 : }
3622 :
3623 : void
3624 107 : add_region_creation_events (const region *,
3625 : tree capacity,
3626 : const event_loc_info &loc_info,
3627 : checker_path &emission_path) final override
3628 : {
3629 107 : emission_path.add_event
3630 107 : (std::make_unique<region_creation_event_allocation_size>
3631 107 : (capacity, loc_info));
3632 :
3633 107 : m_has_allocation_event = true;
3634 107 : }
3635 :
3636 222 : void mark_interesting_stuff (interesting_t *interest) final override
3637 : {
3638 222 : interest->add_region_creation (m_rhs);
3639 222 : }
3640 :
3641 : void
3642 0 : maybe_add_sarif_properties (diagnostics::sarif_object &result_obj)
3643 : const final override
3644 : {
3645 0 : auto &props = result_obj.get_or_create_properties ();
3646 : #define PROPERTY_PREFIX "gcc/analyzer/dubious_allocation_size/"
3647 0 : props.set (PROPERTY_PREFIX "lhs", m_lhs->to_json ());
3648 0 : props.set (PROPERTY_PREFIX "rhs", m_rhs->to_json ());
3649 0 : props.set (PROPERTY_PREFIX "capacity_sval", m_capacity_sval->to_json ());
3650 : #undef PROPERTY_PREFIX
3651 0 : }
3652 :
3653 : private:
3654 : const region *m_lhs;
3655 : const region *m_rhs;
3656 : const svalue *m_capacity_sval;
3657 : const tree m_expr;
3658 : const gimple *m_stmt;
3659 : bool m_has_allocation_event;
3660 : };
3661 :
3662 : /* Return true on dubious allocation sizes for constant sizes. */
3663 :
3664 : static bool
3665 1887 : capacity_compatible_with_type (tree cst, tree pointee_size_tree,
3666 : bool is_struct)
3667 : {
3668 1887 : gcc_assert (TREE_CODE (cst) == INTEGER_CST);
3669 1887 : gcc_assert (TREE_CODE (pointee_size_tree) == INTEGER_CST);
3670 :
3671 1887 : unsigned HOST_WIDE_INT pointee_size = TREE_INT_CST_LOW (pointee_size_tree);
3672 1887 : unsigned HOST_WIDE_INT alloc_size = TREE_INT_CST_LOW (cst);
3673 :
3674 1887 : if (is_struct)
3675 639 : return alloc_size == 0 || alloc_size >= pointee_size;
3676 1248 : return alloc_size % pointee_size == 0;
3677 : }
3678 :
3679 : static bool
3680 394 : capacity_compatible_with_type (tree cst, tree pointee_size_tree)
3681 : {
3682 0 : return capacity_compatible_with_type (cst, pointee_size_tree, false);
3683 : }
3684 :
3685 : /* Checks whether SVAL could be a multiple of SIZE_CST.
3686 :
3687 : It works by visiting all svalues inside SVAL until it reaches
3688 : atomic nodes. From those, it goes back up again and adds each
3689 : node that is not a multiple of SIZE_CST to the RESULT_SET. */
3690 :
3691 2268 : class size_visitor : public visitor
3692 : {
3693 : public:
3694 1134 : size_visitor (tree size_cst, const svalue *root_sval, constraint_manager *cm)
3695 1134 : : m_size_cst (size_cst), m_root_sval (root_sval), m_cm (cm)
3696 : {
3697 1134 : m_root_sval->accept (this);
3698 1134 : }
3699 :
3700 1134 : bool is_dubious_capacity ()
3701 : {
3702 1134 : return result_set.contains (m_root_sval);
3703 : }
3704 :
3705 410 : void visit_constant_svalue (const constant_svalue *sval) final override
3706 : {
3707 410 : check_constant (sval->get_constant (), sval);
3708 410 : }
3709 :
3710 250 : void visit_unaryop_svalue (const unaryop_svalue *sval) final override
3711 : {
3712 250 : if (CONVERT_EXPR_CODE_P (sval->get_op ())
3713 291 : && result_set.contains (sval->get_arg ()))
3714 105 : result_set.add (sval);
3715 250 : }
3716 :
3717 406 : void visit_binop_svalue (const binop_svalue *sval) final override
3718 : {
3719 406 : const svalue *arg0 = sval->get_arg0 ();
3720 406 : const svalue *arg1 = sval->get_arg1 ();
3721 :
3722 406 : switch (sval->get_op ())
3723 : {
3724 288 : case MULT_EXPR:
3725 288 : if (result_set.contains (arg0) && result_set.contains (arg1))
3726 24 : result_set.add (sval);
3727 : break;
3728 90 : case PLUS_EXPR:
3729 90 : case MINUS_EXPR:
3730 90 : if (result_set.contains (arg0) || result_set.contains (arg1))
3731 28 : result_set.add (sval);
3732 : break;
3733 : default:
3734 : break;
3735 : }
3736 406 : }
3737 :
3738 0 : void visit_unmergeable_svalue (const unmergeable_svalue *sval) final override
3739 : {
3740 0 : if (result_set.contains (sval->get_arg ()))
3741 0 : result_set.add (sval);
3742 0 : }
3743 :
3744 12 : void visit_widening_svalue (const widening_svalue *sval) final override
3745 : {
3746 12 : const svalue *base = sval->get_base_svalue ();
3747 12 : const svalue *iter = sval->get_iter_svalue ();
3748 :
3749 12 : if (result_set.contains (base) || result_set.contains (iter))
3750 8 : result_set.add (sval);
3751 12 : }
3752 :
3753 303 : void visit_initial_svalue (const initial_svalue *sval) final override
3754 : {
3755 303 : equiv_class_id id = equiv_class_id::null ();
3756 303 : if (m_cm->get_equiv_class_by_svalue (sval, &id))
3757 : {
3758 75 : if (tree cst = id.get_obj (*m_cm).get_any_constant ())
3759 0 : check_constant (cst, sval);
3760 : }
3761 228 : else if (!m_cm->sval_constrained_p (sval))
3762 : {
3763 174 : result_set.add (sval);
3764 : }
3765 303 : }
3766 :
3767 30 : void visit_conjured_svalue (const conjured_svalue *sval) final override
3768 : {
3769 30 : equiv_class_id id = equiv_class_id::null ();
3770 30 : if (m_cm->get_equiv_class_by_svalue (sval, &id))
3771 13 : if (tree cst = id.get_obj (*m_cm).get_any_constant ())
3772 8 : check_constant (cst, sval);
3773 30 : }
3774 :
3775 : private:
3776 418 : void check_constant (tree cst, const svalue *sval)
3777 : {
3778 418 : switch (TREE_CODE (cst))
3779 : {
3780 : default:
3781 : /* Assume all unhandled operands are compatible. */
3782 : break;
3783 394 : case INTEGER_CST:
3784 394 : if (!capacity_compatible_with_type (cst, m_size_cst))
3785 68 : result_set.add (sval);
3786 : break;
3787 : }
3788 418 : }
3789 :
3790 : tree m_size_cst;
3791 : const svalue *m_root_sval;
3792 : constraint_manager *m_cm;
3793 : svalue_set result_set; /* Used as a mapping of svalue*->bool. */
3794 : };
3795 :
3796 : /* Return true if SIZE_CST is a power of 2, and we have
3797 : CAPACITY_SVAL == ((X | (Y - 1) ) + 1), since it is then a multiple
3798 : of SIZE_CST, as used by Linux kernel's round_up macro. */
3799 :
3800 : static bool
3801 1138 : is_round_up (tree size_cst,
3802 : const svalue *capacity_sval)
3803 : {
3804 1138 : if (!integer_pow2p (size_cst))
3805 : return false;
3806 1138 : const binop_svalue *binop_sval = capacity_sval->dyn_cast_binop_svalue ();
3807 1138 : if (!binop_sval)
3808 : return false;
3809 272 : if (binop_sval->get_op () != PLUS_EXPR)
3810 : return false;
3811 70 : tree rhs_cst = binop_sval->get_arg1 ()->maybe_get_constant ();
3812 70 : if (!rhs_cst)
3813 : return false;
3814 70 : if (!integer_onep (rhs_cst))
3815 : return false;
3816 :
3817 : /* We have CAPACITY_SVAL == (LHS + 1) for some LHS expression. */
3818 :
3819 4 : const binop_svalue *lhs_binop_sval
3820 4 : = binop_sval->get_arg0 ()->dyn_cast_binop_svalue ();
3821 4 : if (!lhs_binop_sval)
3822 : return false;
3823 4 : if (lhs_binop_sval->get_op () != BIT_IOR_EXPR)
3824 : return false;
3825 :
3826 4 : tree inner_rhs_cst = lhs_binop_sval->get_arg1 ()->maybe_get_constant ();
3827 4 : if (!inner_rhs_cst)
3828 : return false;
3829 :
3830 4 : if (wi::to_widest (inner_rhs_cst) + 1 != wi::to_widest (size_cst))
3831 : return false;
3832 : return true;
3833 : }
3834 :
3835 : /* Return true if CAPACITY_SVAL is known to be a multiple of SIZE_CST. */
3836 :
3837 : static bool
3838 1138 : is_multiple_p (tree size_cst,
3839 : const svalue *capacity_sval)
3840 : {
3841 1194 : if (const svalue *sval = capacity_sval->maybe_undo_cast ())
3842 : return is_multiple_p (size_cst, sval);
3843 :
3844 1138 : if (is_round_up (size_cst, capacity_sval))
3845 : return true;
3846 :
3847 : return false;
3848 : }
3849 :
3850 : /* Return true if we should emit a dubious_allocation_size warning
3851 : on assigning a region of capacity CAPACITY_SVAL bytes to a pointer
3852 : of type with size SIZE_CST, where CM expresses known constraints. */
3853 :
3854 : static bool
3855 1138 : is_dubious_capacity (tree size_cst,
3856 : const svalue *capacity_sval,
3857 : constraint_manager *cm)
3858 : {
3859 1138 : if (is_multiple_p (size_cst, capacity_sval))
3860 : return false;
3861 1134 : size_visitor v (size_cst, capacity_sval, cm);
3862 1134 : return v.is_dubious_capacity ();
3863 1134 : }
3864 :
3865 :
3866 : /* Return true if a struct or union either uses the inheritance pattern,
3867 : where the first field is a base struct, or the flexible array member
3868 : pattern, where the last field is an array without a specified size. */
3869 :
3870 : static bool
3871 3606 : struct_or_union_with_inheritance_p (tree struc)
3872 : {
3873 3606 : tree iter = TYPE_FIELDS (struc);
3874 3606 : if (iter == NULL_TREE)
3875 : return false;
3876 3598 : if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (iter)))
3877 : return true;
3878 :
3879 : tree last_field;
3880 52118 : while (iter != NULL_TREE)
3881 : {
3882 48773 : last_field = iter;
3883 48773 : iter = DECL_CHAIN (iter);
3884 : }
3885 :
3886 3345 : if (last_field != NULL_TREE
3887 3345 : && TREE_CODE (TREE_TYPE (last_field)) == ARRAY_TYPE)
3888 : return true;
3889 :
3890 : return false;
3891 : }
3892 :
3893 : /* Return true if the lhs and rhs of an assignment have different types. */
3894 :
3895 : static bool
3896 188702 : is_any_cast_p (const gimple *stmt)
3897 : {
3898 188702 : if (const gassign *assign = dyn_cast <const gassign *> (stmt))
3899 148336 : return gimple_assign_cast_p (assign)
3900 271700 : || !pending_diagnostic::same_tree_p (
3901 123364 : TREE_TYPE (gimple_assign_lhs (assign)),
3902 123364 : TREE_TYPE (gimple_assign_rhs1 (assign)));
3903 40366 : else if (const gcall *call = dyn_cast <const gcall *> (stmt))
3904 : {
3905 39958 : tree lhs = gimple_call_lhs (call);
3906 70122 : return lhs != NULL_TREE && !pending_diagnostic::same_tree_p (
3907 30164 : TREE_TYPE (gimple_call_lhs (call)),
3908 : gimple_call_return_type (call));
3909 : }
3910 :
3911 : return false;
3912 : }
3913 :
3914 : /* On pointer assignments, check whether the buffer size of
3915 : RHS_SVAL is compatible with the type of the LHS_REG.
3916 : Use a non-null CTXT to report allocation size warnings. */
3917 :
3918 : void
3919 370095 : region_model::check_region_size (const region *lhs_reg, const svalue *rhs_sval,
3920 : region_model_context *ctxt) const
3921 : {
3922 370095 : if (!ctxt || ctxt->get_stmt () == nullptr)
3923 365089 : return;
3924 : /* Only report warnings on assignments that actually change the type. */
3925 188702 : if (!is_any_cast_p (ctxt->get_stmt ()))
3926 : return;
3927 :
3928 59277 : tree pointer_type = lhs_reg->get_type ();
3929 59277 : if (pointer_type == NULL_TREE || !POINTER_TYPE_P (pointer_type))
3930 : return;
3931 :
3932 15767 : tree pointee_type = TREE_TYPE (pointer_type);
3933 : /* Make sure that the type on the left-hand size actually has a size. */
3934 15767 : if (pointee_type == NULL_TREE || VOID_TYPE_P (pointee_type)
3935 31117 : || TYPE_SIZE_UNIT (pointee_type) == NULL_TREE)
3936 : return;
3937 :
3938 : /* Bail out early on function pointers. */
3939 15250 : if (TREE_CODE (pointee_type) == FUNCTION_TYPE)
3940 : return;
3941 :
3942 : /* Bail out early on pointers to structs where we can
3943 : not deduce whether the buffer size is compatible. */
3944 14962 : bool is_struct = RECORD_OR_UNION_TYPE_P (pointee_type);
3945 14962 : if (is_struct && struct_or_union_with_inheritance_p (pointee_type))
3946 : return;
3947 :
3948 14674 : tree pointee_size_tree = size_in_bytes (pointee_type);
3949 : /* We give up if the type size is not known at compile-time or the
3950 : type size is always compatible regardless of the buffer size. */
3951 14674 : if (TREE_CODE (pointee_size_tree) != INTEGER_CST
3952 14595 : || integer_zerop (pointee_size_tree)
3953 29248 : || integer_onep (pointee_size_tree))
3954 9668 : return;
3955 :
3956 5006 : const region *rhs_reg = deref_rvalue (rhs_sval, NULL_TREE, ctxt, false);
3957 5006 : const svalue *capacity = get_capacity (rhs_reg);
3958 5006 : switch (capacity->get_kind ())
3959 : {
3960 1493 : case svalue_kind::SK_CONSTANT:
3961 1493 : {
3962 1493 : const constant_svalue *cst_cap_sval
3963 1493 : = as_a <const constant_svalue *> (capacity);
3964 1493 : tree cst_cap = cst_cap_sval->get_constant ();
3965 1493 : if (TREE_CODE (cst_cap) == INTEGER_CST
3966 1493 : && !capacity_compatible_with_type (cst_cap, pointee_size_tree,
3967 : is_struct))
3968 63 : ctxt->warn
3969 126 : (std::make_unique <dubious_allocation_size> (lhs_reg, rhs_reg,
3970 : capacity, cst_cap,
3971 126 : ctxt->get_stmt ()));
3972 : }
3973 1493 : break;
3974 3513 : default:
3975 3513 : {
3976 3513 : if (!is_struct)
3977 : {
3978 1138 : if (is_dubious_capacity (pointee_size_tree,
3979 : capacity,
3980 1138 : m_constraints))
3981 : {
3982 48 : tree expr = get_representative_tree (capacity);
3983 48 : ctxt->warn
3984 96 : (std::make_unique <dubious_allocation_size> (lhs_reg,
3985 : rhs_reg,
3986 : capacity, expr,
3987 96 : ctxt->get_stmt ()));
3988 : }
3989 : }
3990 : break;
3991 : }
3992 : }
3993 : }
3994 :
3995 : /* Set the value of the region given by LHS_REG to the value given
3996 : by RHS_SVAL.
3997 : Use CTXT to report any warnings associated with writing to LHS_REG. */
3998 :
3999 : void
4000 370115 : region_model::set_value (const region *lhs_reg, const svalue *rhs_sval,
4001 : region_model_context *ctxt)
4002 : {
4003 370115 : gcc_assert (lhs_reg);
4004 370115 : gcc_assert (rhs_sval);
4005 :
4006 : /* Setting the value of an empty region is a no-op. */
4007 370115 : if (lhs_reg->empty_p ())
4008 : return;
4009 :
4010 370095 : check_region_size (lhs_reg, rhs_sval, ctxt);
4011 :
4012 370095 : check_region_for_write (lhs_reg, rhs_sval, ctxt);
4013 :
4014 641613 : m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
4015 271518 : ctxt ? ctxt->get_uncertainty () : nullptr);
4016 : }
4017 :
4018 : /* Set the value of the region given by LHS to the value given by RHS. */
4019 :
4020 : void
4021 84 : region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt)
4022 : {
4023 84 : const region *lhs_reg = get_lvalue (lhs, ctxt);
4024 84 : const svalue *rhs_sval = get_rvalue (rhs, ctxt);
4025 84 : gcc_assert (lhs_reg);
4026 84 : gcc_assert (rhs_sval);
4027 84 : set_value (lhs_reg, rhs_sval, ctxt);
4028 84 : }
4029 :
4030 : /* Issue a note specifying that a particular function parameter is expected
4031 : to be a valid null-terminated string. */
4032 :
4033 : static void
4034 152 : inform_about_expected_null_terminated_string_arg (const call_arg_details &ad)
4035 : {
4036 : // TODO: ideally we'd underline the param here
4037 152 : inform (DECL_SOURCE_LOCATION (ad.m_called_fndecl),
4038 : "argument %d of %qD must be a pointer to a null-terminated string",
4039 152 : ad.m_arg_idx + 1, ad.m_called_fndecl);
4040 152 : }
4041 :
4042 : /* A binding of a specific svalue at a concrete byte range. */
4043 :
4044 : struct fragment
4045 : {
4046 3829 : fragment ()
4047 3829 : : m_byte_range (0, 0), m_sval (nullptr)
4048 : {
4049 3829 : }
4050 :
4051 1010 : fragment (const byte_range &bytes, const svalue *sval)
4052 1010 : : m_byte_range (bytes), m_sval (sval)
4053 : {
4054 : }
4055 :
4056 1994 : static int cmp_ptrs (const void *p1, const void *p2)
4057 : {
4058 1994 : const fragment *f1 = (const fragment *)p1;
4059 1994 : const fragment *f2 = (const fragment *)p2;
4060 1994 : return byte_range::cmp (f1->m_byte_range, f2->m_byte_range);
4061 : }
4062 :
4063 : void
4064 2 : dump_to_pp (pretty_printer *pp) const
4065 : {
4066 2 : pp_string (pp, "fragment(");
4067 2 : m_byte_range.dump_to_pp (pp);
4068 2 : pp_string (pp, ", sval: ");
4069 2 : if (m_sval)
4070 2 : m_sval->dump_to_pp (pp, true);
4071 : else
4072 0 : pp_string (pp, "nullptr");
4073 2 : pp_string (pp, ")");
4074 2 : }
4075 :
4076 : byte_range m_byte_range;
4077 : const svalue *m_sval;
4078 : };
4079 :
4080 : /* Determine if there is a zero terminator somewhere in the
4081 : part of STRING_CST covered by BYTES (where BYTES is relative to the
4082 : start of the constant).
4083 :
4084 : Return a tristate:
4085 : - true if there definitely is a zero byte, writing to *OUT_BYTES_READ
4086 : the number of bytes from that would be read, including the zero byte.
4087 : - false if there definitely isn't a zero byte
4088 : - unknown if we don't know. */
4089 :
4090 : static tristate
4091 463 : string_cst_has_null_terminator (tree string_cst,
4092 : const byte_range &bytes,
4093 : byte_offset_t *out_bytes_read)
4094 : {
4095 463 : gcc_assert (bytes.m_start_byte_offset >= 0);
4096 :
4097 : /* If we're beyond the string_cst, reads are unsuccessful. */
4098 463 : if (tree cst_size = get_string_cst_size (string_cst))
4099 463 : if (TREE_CODE (cst_size) == INTEGER_CST)
4100 463 : if (bytes.m_start_byte_offset >= TREE_INT_CST_LOW (cst_size))
4101 0 : return tristate::unknown ();
4102 :
4103 : /* Assume all bytes after TREE_STRING_LENGTH are zero. This handles
4104 : the case where an array is initialized with a string_cst that isn't
4105 : as long as the array, where the remaining elements are
4106 : empty-initialized and thus zeroed. */
4107 463 : if (bytes.m_start_byte_offset >= TREE_STRING_LENGTH (string_cst))
4108 : {
4109 2 : *out_bytes_read = 1;
4110 2 : return tristate (true);
4111 : }
4112 :
4113 : /* Look for the first 0 byte within STRING_CST
4114 : from START_READ_OFFSET onwards. */
4115 461 : const byte_offset_t num_bytes_to_search
4116 922 : = std::min<byte_offset_t> ((TREE_STRING_LENGTH (string_cst)
4117 461 : - bytes.m_start_byte_offset),
4118 461 : bytes.m_size_in_bytes);
4119 461 : const char *start = (TREE_STRING_POINTER (string_cst)
4120 461 : + bytes.m_start_byte_offset.slow ());
4121 461 : if (num_bytes_to_search >= 0)
4122 461 : if (const void *p = memchr (start, 0, bytes.m_size_in_bytes.slow ()))
4123 : {
4124 332 : *out_bytes_read = (const char *)p - start + 1;
4125 332 : return tristate (true);
4126 : }
4127 :
4128 129 : *out_bytes_read = bytes.m_size_in_bytes;
4129 129 : return tristate (false);
4130 : }
4131 :
4132 : static tristate
4133 : svalue_byte_range_has_null_terminator (const svalue *sval,
4134 : const byte_range &bytes,
4135 : byte_offset_t *out_bytes_read,
4136 : logger *logger);
4137 :
4138 : /* Determine if there is a zero terminator somewhere in the
4139 : part of SVAL covered by BYTES (where BYTES is relative to the svalue).
4140 :
4141 : Return a tristate:
4142 : - true if there definitely is a zero byte, writing to *OUT_BYTES_READ
4143 : the number of bytes from that would be read, including the zero byte.
4144 : - false if there definitely isn't a zero byte
4145 : - unknown if we don't know.
4146 :
4147 : Use LOGGER (if non-null) for any logging. */
4148 :
4149 : static tristate
4150 833 : svalue_byte_range_has_null_terminator_1 (const svalue *sval,
4151 : const byte_range &bytes,
4152 : byte_offset_t *out_bytes_read,
4153 : logger *logger)
4154 : {
4155 833 : if (bytes.m_start_byte_offset == 0
4156 833 : && sval->all_zeroes_p ())
4157 : {
4158 : /* The initial byte of an all-zeroes SVAL is a zero byte. */
4159 22 : *out_bytes_read = 1;
4160 22 : return tristate (true);
4161 : }
4162 :
4163 811 : switch (sval->get_kind ())
4164 : {
4165 341 : case SK_CONSTANT:
4166 341 : {
4167 341 : tree cst
4168 341 : = as_a <const constant_svalue *> (sval)->get_constant ();
4169 341 : switch (TREE_CODE (cst))
4170 : {
4171 326 : case STRING_CST:
4172 326 : return string_cst_has_null_terminator (cst, bytes, out_bytes_read);
4173 15 : case INTEGER_CST:
4174 15 : if (bytes.m_start_byte_offset == 0
4175 15 : && integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (cst))))
4176 : {
4177 : /* Model accesses to the initial byte of a 1-byte
4178 : INTEGER_CST. */
4179 13 : *out_bytes_read = 1;
4180 13 : if (zerop (cst))
4181 0 : return tristate (true);
4182 : else
4183 13 : return tristate (false);
4184 : }
4185 : /* Treat any other access to an INTEGER_CST as unknown. */
4186 2 : return tristate::TS_UNKNOWN;
4187 :
4188 : default:
4189 : break;
4190 : }
4191 : }
4192 : break;
4193 :
4194 146 : case SK_INITIAL:
4195 146 : {
4196 146 : const initial_svalue *initial_sval = (const initial_svalue *)sval;
4197 146 : const region *reg = initial_sval->get_region ();
4198 146 : if (const string_region *string_reg = reg->dyn_cast_string_region ())
4199 : {
4200 137 : tree string_cst = string_reg->get_string_cst ();
4201 137 : return string_cst_has_null_terminator (string_cst,
4202 : bytes,
4203 137 : out_bytes_read);
4204 : }
4205 9 : return tristate::TS_UNKNOWN;
4206 : }
4207 77 : break;
4208 :
4209 77 : case SK_BITS_WITHIN:
4210 77 : {
4211 77 : const bits_within_svalue *bits_within_sval
4212 : = (const bits_within_svalue *)sval;
4213 77 : byte_range bytes_within_inner (0, 0);
4214 77 : if (bits_within_sval->get_bits ().as_byte_range (&bytes_within_inner))
4215 : {
4216 : /* Consider e.g. looking for null terminator of
4217 : bytes 2-4 of BITS_WITHIN(bytes 10-15 of inner_sval)
4218 :
4219 : This is equivalent to looking within bytes 12-14 of
4220 : inner_sval. */
4221 77 : const byte_offset_t start_byte_relative_to_inner
4222 77 : = (bytes.m_start_byte_offset
4223 77 : + bytes_within_inner.m_start_byte_offset);
4224 77 : const byte_offset_t next_byte_relative_to_inner
4225 77 : = (bytes.get_next_byte_offset ()
4226 77 : + bytes_within_inner.m_start_byte_offset);
4227 77 : if (next_byte_relative_to_inner > start_byte_relative_to_inner)
4228 : {
4229 77 : const byte_range relative_to_inner
4230 : (start_byte_relative_to_inner,
4231 77 : next_byte_relative_to_inner - start_byte_relative_to_inner);
4232 77 : const svalue *inner_sval
4233 77 : = bits_within_sval->get_inner_svalue ();
4234 77 : return svalue_byte_range_has_null_terminator (inner_sval,
4235 : relative_to_inner,
4236 : out_bytes_read,
4237 : logger);
4238 : }
4239 : }
4240 : }
4241 0 : break;
4242 :
4243 : default:
4244 : // TODO: it may be possible to handle other cases here.
4245 : break;
4246 : }
4247 247 : return tristate::TS_UNKNOWN;
4248 : }
4249 :
4250 : /* Like svalue_byte_range_has_null_terminator_1, but add logging. */
4251 :
4252 : static tristate
4253 833 : svalue_byte_range_has_null_terminator (const svalue *sval,
4254 : const byte_range &bytes,
4255 : byte_offset_t *out_bytes_read,
4256 : logger *logger)
4257 : {
4258 833 : LOG_SCOPE (logger);
4259 833 : if (logger)
4260 : {
4261 1 : pretty_printer *pp = logger->get_printer ();
4262 1 : logger->start_log_line ();
4263 1 : bytes.dump_to_pp (pp);
4264 1 : logger->log_partial (" of sval: ");
4265 1 : sval->dump_to_pp (pp, true);
4266 1 : logger->end_log_line ();
4267 : }
4268 833 : tristate ts
4269 833 : = svalue_byte_range_has_null_terminator_1 (sval, bytes,
4270 : out_bytes_read, logger);
4271 833 : if (logger)
4272 : {
4273 1 : pretty_printer *pp = logger->get_printer ();
4274 1 : logger->start_log_line ();
4275 1 : pp_printf (pp, "has null terminator: %s", ts.as_string ());
4276 1 : if (ts.is_true ())
4277 : {
4278 1 : pp_string (pp, "; bytes read: ");
4279 1 : pp_wide_int (pp, *out_bytes_read, SIGNED);
4280 : }
4281 1 : logger->end_log_line ();
4282 : }
4283 1666 : return ts;
4284 833 : }
4285 :
4286 : /* A frozen copy of a single base region's binding_cluster within a store,
4287 : optimized for traversal of the concrete parts in byte order.
4288 : This only captures concrete bindings, and is an implementation detail
4289 : of region_model::scan_for_null_terminator. */
4290 :
4291 3687 : class iterable_cluster
4292 : {
4293 : public:
4294 3687 : iterable_cluster (const binding_cluster *cluster)
4295 3687 : {
4296 3687 : if (!cluster)
4297 : return;
4298 3173 : for (auto iter : cluster->get_map ().get_concrete_bindings ())
4299 : {
4300 1010 : const bit_range &bits = iter.first;
4301 1010 : const svalue *sval = iter.second;
4302 :
4303 1010 : byte_range fragment_bytes (0, 0);
4304 1010 : if (bits.as_byte_range (&fragment_bytes))
4305 1010 : m_fragments.safe_push (fragment (fragment_bytes, sval));
4306 : }
4307 2287 : for (auto iter : cluster->get_map ().get_symbolic_bindings ())
4308 124 : m_symbolic_bindings.safe_push (iter);
4309 2163 : m_fragments.qsort (fragment::cmp_ptrs);
4310 : }
4311 :
4312 : bool
4313 3829 : get_fragment_for_byte (byte_offset_t byte, fragment *out_frag) const
4314 : {
4315 : /* TODO: binary search rather than linear. */
4316 3829 : unsigned iter_idx;
4317 4054 : for (iter_idx = 0; iter_idx < m_fragments.length (); iter_idx++)
4318 : {
4319 981 : if (m_fragments[iter_idx].m_byte_range.contains_p (byte))
4320 : {
4321 756 : *out_frag = m_fragments[iter_idx];
4322 756 : return true;
4323 : }
4324 : }
4325 : return false;
4326 : }
4327 :
4328 3073 : bool has_symbolic_bindings_p () const
4329 : {
4330 6146 : return !m_symbolic_bindings.is_empty ();
4331 : }
4332 :
4333 2 : void dump_to_pp (pretty_printer *pp) const
4334 : {
4335 2 : pp_string (pp, "iterable_cluster (fragments: [");
4336 5 : for (auto const &iter : &m_fragments)
4337 : {
4338 2 : if (&iter != m_fragments.begin ())
4339 0 : pp_string (pp, ", ");
4340 1 : iter.dump_to_pp (pp);
4341 : }
4342 2 : pp_printf (pp, "], symbolic bindings: [");
4343 2 : for (auto const &iter : m_symbolic_bindings)
4344 : {
4345 0 : if (&iter != m_symbolic_bindings.begin ())
4346 0 : pp_string (pp, ", ");
4347 0 : iter.m_region->dump_to_pp (pp, true);
4348 : }
4349 2 : pp_string (pp, "])");
4350 2 : }
4351 :
4352 : private:
4353 : auto_vec<fragment> m_fragments;
4354 : auto_vec<binding_map::symbolic_binding> m_symbolic_bindings;
4355 : };
4356 :
4357 : /* Simulate reading the bytes at BYTES from BASE_REG.
4358 : Complain to CTXT about any issues with the read e.g. out-of-bounds. */
4359 :
4360 : const svalue *
4361 8153 : region_model::get_store_bytes (const region *base_reg,
4362 : const byte_range &bytes,
4363 : region_model_context *ctxt) const
4364 : {
4365 : /* Shortcut reading all of a string_region. */
4366 8153 : if (bytes.get_start_byte_offset () == 0)
4367 7926 : if (const string_region *string_reg = base_reg->dyn_cast_string_region ())
4368 4702 : if (bytes.m_size_in_bytes
4369 4702 : == TREE_STRING_LENGTH (string_reg->get_string_cst ()))
4370 4702 : return m_mgr->get_or_create_initial_value (base_reg);
4371 :
4372 3451 : const svalue *index_sval
4373 3451 : = m_mgr->get_or_create_int_cst (size_type_node,
4374 3451 : bytes.get_start_byte_offset ());
4375 3451 : const region *offset_reg = m_mgr->get_offset_region (base_reg,
4376 : NULL_TREE,
4377 : index_sval);
4378 3451 : const svalue *byte_size_sval
4379 3451 : = m_mgr->get_or_create_int_cst (size_type_node, bytes.m_size_in_bytes);
4380 3451 : const region *read_reg = m_mgr->get_sized_region (offset_reg,
4381 : NULL_TREE,
4382 : byte_size_sval);
4383 :
4384 : /* Simulate reading those bytes from the store. */
4385 3451 : const svalue *sval = get_store_value (read_reg, ctxt);
4386 3451 : return sval;
4387 : }
4388 :
4389 : static tree
4390 2823 : get_tree_for_byte_offset (tree ptr_expr, byte_offset_t byte_offset)
4391 : {
4392 2823 : gcc_assert (ptr_expr);
4393 2823 : tree ptype = build_pointer_type_for_mode (char_type_node, ptr_mode, true);
4394 2823 : return fold_build2 (MEM_REF,
4395 : char_type_node,
4396 : ptr_expr, wide_int_to_tree (ptype, byte_offset));
4397 : }
4398 :
4399 : /* Simulate a series of reads of REG until we find a 0 byte
4400 : (equivalent to calling strlen).
4401 :
4402 : Complain to CTXT and return NULL if:
4403 : - the buffer pointed to isn't null-terminated
4404 : - the buffer pointed to has any uninitialized bytes before any 0-terminator
4405 : - any of the reads aren't within the bounds of the underlying base region
4406 :
4407 : Otherwise, return a svalue for the number of bytes read (strlen + 1),
4408 : and, if OUT_SVAL is non-NULL, write to *OUT_SVAL with an svalue
4409 : representing the content of REG up to and including the terminator.
4410 :
4411 : Algorithm
4412 : =========
4413 :
4414 : Get offset for first byte to read.
4415 : Find the binding (if any) that contains it.
4416 : Find the size in bits of that binding.
4417 : Round to the nearest byte (which way???)
4418 : Or maybe give up if we have a partial binding there.
4419 : Get the svalue from the binding.
4420 : Determine the strlen (if any) of that svalue.
4421 : Does it have a 0-terminator within it?
4422 : If so, we have a partial read up to and including that terminator
4423 : Read those bytes from the store; add to the result in the correct place.
4424 : Finish
4425 : If not, we have a full read of that svalue
4426 : Read those bytes from the store; add to the result in the correct place.
4427 : Update read/write offsets
4428 : Continue
4429 : If unknown:
4430 : Result is unknown
4431 : Finish
4432 : */
4433 :
4434 : const svalue *
4435 8508 : region_model::scan_for_null_terminator_1 (const region *reg,
4436 : tree expr,
4437 : const svalue **out_sval,
4438 : region_model_context *ctxt) const
4439 : {
4440 8508 : logger *logger = ctxt ? ctxt->get_logger () : nullptr;
4441 :
4442 8508 : region_offset offset = reg->get_offset (m_mgr);
4443 8508 : if (offset.symbolic_p ())
4444 : {
4445 115 : if (out_sval)
4446 0 : *out_sval = get_store_value (reg, nullptr);
4447 115 : if (logger)
4448 0 : logger->log ("offset is symbolic");
4449 115 : return m_mgr->get_or_create_unknown_svalue (size_type_node);
4450 : }
4451 8393 : byte_offset_t src_byte_offset;
4452 8393 : if (!offset.get_concrete_byte_offset (&src_byte_offset))
4453 : {
4454 0 : if (out_sval)
4455 0 : *out_sval = get_store_value (reg, nullptr);
4456 0 : if (logger)
4457 0 : logger->log ("can't get concrete byte offset");
4458 0 : return m_mgr->get_or_create_unknown_svalue (size_type_node);
4459 : }
4460 8393 : const byte_offset_t initial_src_byte_offset = src_byte_offset;
4461 8393 : byte_offset_t dst_byte_offset = 0;
4462 :
4463 8393 : const region *base_reg = reg->get_base_region ();
4464 :
4465 8393 : if (const string_region *str_reg = base_reg->dyn_cast_string_region ())
4466 : {
4467 4708 : tree string_cst = str_reg->get_string_cst ();
4468 4710 : if (src_byte_offset >= 0
4469 4707 : && src_byte_offset < TREE_STRING_LENGTH (string_cst)
4470 9414 : && wi::fits_shwi_p (src_byte_offset))
4471 : {
4472 4706 : HOST_WIDE_INT str_byte_offset = src_byte_offset.to_shwi ();
4473 4706 : const char *effective_start
4474 4706 : = TREE_STRING_POINTER (string_cst) + str_byte_offset;
4475 4706 : size_t effective_len
4476 4706 : = TREE_STRING_LENGTH (string_cst) - str_byte_offset;
4477 4706 : if (const void *p = memchr (effective_start, 0, effective_len))
4478 : {
4479 4706 : size_t num_bytes_read
4480 4706 : = (const char *)p - effective_start + 1;
4481 : /* Simulate the read. */
4482 4706 : byte_range bytes_to_read (0, num_bytes_read);
4483 4706 : const svalue *sval = get_store_bytes (reg, bytes_to_read, ctxt);
4484 4706 : if (out_sval)
4485 834 : *out_sval = sval;
4486 4706 : if (logger)
4487 0 : logger->log ("using string_cst");
4488 4706 : return m_mgr->get_or_create_int_cst (size_type_node,
4489 4706 : num_bytes_read);
4490 : }
4491 : }
4492 : }
4493 3687 : const binding_cluster *cluster = m_store.get_cluster (base_reg);
4494 3687 : iterable_cluster c (cluster);
4495 3687 : if (logger)
4496 : {
4497 2 : pretty_printer *pp = logger->get_printer ();
4498 2 : logger->start_log_line ();
4499 2 : c.dump_to_pp (pp);
4500 2 : logger->end_log_line ();
4501 : }
4502 :
4503 3687 : concrete_binding_map result;
4504 :
4505 142 : while (1)
4506 : {
4507 3829 : fragment f;
4508 3829 : if (c.get_fragment_for_byte (src_byte_offset, &f))
4509 : {
4510 756 : if (logger)
4511 : {
4512 1 : logger->start_log_line ();
4513 1 : pretty_printer *pp = logger->get_printer ();
4514 1 : pp_printf (pp, "src_byte_offset: ");
4515 1 : pp_wide_int (pp, src_byte_offset, SIGNED);
4516 1 : pp_string (pp, ": ");
4517 1 : f.dump_to_pp (pp);
4518 1 : logger->end_log_line ();
4519 : }
4520 756 : gcc_assert (f.m_byte_range.contains_p (src_byte_offset));
4521 : /* src_byte_offset and f.m_byte_range are both expressed relative to
4522 : the base region.
4523 : Convert to a byte_range relative to the svalue. */
4524 756 : const byte_range bytes_relative_to_svalue
4525 756 : (src_byte_offset - f.m_byte_range.get_start_byte_offset (),
4526 756 : f.m_byte_range.get_next_byte_offset () - src_byte_offset);
4527 756 : byte_offset_t fragment_bytes_read;
4528 756 : tristate is_terminated
4529 756 : = svalue_byte_range_has_null_terminator (f.m_sval,
4530 : bytes_relative_to_svalue,
4531 : &fragment_bytes_read,
4532 : logger);
4533 756 : if (is_terminated.is_unknown ())
4534 : {
4535 258 : if (out_sval)
4536 2 : *out_sval = get_store_value (reg, nullptr);
4537 614 : return m_mgr->get_or_create_unknown_svalue (size_type_node);
4538 : }
4539 :
4540 : /* Simulate reading those bytes from the store. */
4541 498 : byte_range bytes_to_read (src_byte_offset, fragment_bytes_read);
4542 498 : const svalue *sval = get_store_bytes (base_reg, bytes_to_read, ctxt);
4543 498 : check_for_poison (sval, expr, nullptr, ctxt);
4544 :
4545 498 : if (out_sval)
4546 : {
4547 9 : byte_range bytes_to_write (dst_byte_offset, fragment_bytes_read);
4548 9 : result.insert (bytes_to_write, sval);
4549 : }
4550 :
4551 498 : src_byte_offset += fragment_bytes_read;
4552 498 : dst_byte_offset += fragment_bytes_read;
4553 :
4554 498 : if (is_terminated.is_true ())
4555 : {
4556 356 : if (out_sval)
4557 6 : *out_sval = m_mgr->get_or_create_compound_svalue (NULL_TREE,
4558 : std::move (result));
4559 356 : if (logger)
4560 1 : logger->log ("got terminator");
4561 356 : return m_mgr->get_or_create_int_cst (size_type_node,
4562 356 : dst_byte_offset);
4563 : }
4564 : }
4565 : else
4566 : break;
4567 : }
4568 :
4569 : /* No binding for this base_region, or no binding at src_byte_offset
4570 : (or a symbolic binding). */
4571 :
4572 3073 : if (c.has_symbolic_bindings_p ())
4573 : {
4574 124 : if (out_sval)
4575 40 : *out_sval = get_store_value (reg, nullptr);
4576 124 : if (logger)
4577 0 : logger->log ("got symbolic binding");
4578 124 : return m_mgr->get_or_create_unknown_svalue (size_type_node);
4579 : }
4580 :
4581 : /* TODO: the various special-cases seen in
4582 : region_model::get_store_value. */
4583 :
4584 : /* Simulate reading from this byte, then give up. */
4585 2949 : byte_range bytes_to_read (src_byte_offset, 1);
4586 2949 : const svalue *sval = get_store_bytes (base_reg, bytes_to_read, ctxt);
4587 2949 : tree byte_expr
4588 : = (expr
4589 5772 : ? get_tree_for_byte_offset (expr,
4590 : src_byte_offset - initial_src_byte_offset)
4591 : : NULL_TREE);
4592 2949 : check_for_poison (sval, byte_expr, nullptr, ctxt);
4593 2949 : if (base_reg->can_have_initial_svalue_p ())
4594 : {
4595 2741 : if (out_sval)
4596 276 : *out_sval = get_store_value (reg, nullptr);
4597 2741 : return m_mgr->get_or_create_unknown_svalue (size_type_node);
4598 : }
4599 : else
4600 : return nullptr;
4601 7374 : }
4602 :
4603 : /* Like region_model::scan_for_null_terminator_1, but add logging. */
4604 :
4605 : const svalue *
4606 8508 : region_model::scan_for_null_terminator (const region *reg,
4607 : tree expr,
4608 : const svalue **out_sval,
4609 : region_model_context *ctxt) const
4610 : {
4611 8508 : logger *logger = ctxt ? ctxt->get_logger () : nullptr;
4612 8508 : LOG_SCOPE (logger);
4613 8508 : if (logger)
4614 : {
4615 2 : pretty_printer *pp = logger->get_printer ();
4616 2 : logger->start_log_line ();
4617 2 : logger->log_partial ("region: ");
4618 2 : reg->dump_to_pp (pp, true);
4619 2 : logger->end_log_line ();
4620 : }
4621 8508 : if (out_sval)
4622 1165 : *out_sval = nullptr;
4623 8508 : const svalue *sval = scan_for_null_terminator_1 (reg, expr, out_sval, ctxt);
4624 8508 : if (sval && out_sval)
4625 1158 : gcc_assert (*out_sval);
4626 8508 : if (logger)
4627 : {
4628 2 : pretty_printer *pp = logger->get_printer ();
4629 2 : logger->start_log_line ();
4630 2 : logger->log_partial ("length result: ");
4631 2 : if (sval)
4632 1 : sval->dump_to_pp (pp, true);
4633 : else
4634 1 : pp_printf (pp, "NULL");
4635 2 : logger->end_log_line ();
4636 2 : if (out_sval)
4637 : {
4638 2 : logger->start_log_line ();
4639 2 : logger->log_partial ("content result: ");
4640 2 : if (*out_sval)
4641 1 : (*out_sval)->dump_to_pp (pp, true);
4642 : else
4643 1 : pp_printf (pp, "NULL");
4644 2 : logger->end_log_line ();
4645 : }
4646 : }
4647 17016 : return sval;
4648 8508 : }
4649 :
4650 : /* Check that argument ARG_IDX (0-based) to the call described by CD
4651 : is a pointer to a valid null-terminated string.
4652 :
4653 : Simulate scanning through the buffer, reading until we find a 0 byte
4654 : (equivalent to calling strlen).
4655 :
4656 : Complain and return nullptr if:
4657 : - the buffer pointed to isn't null-terminated
4658 : - the buffer pointed to has any uninitalized bytes before any 0-terminator
4659 : - any of the reads aren't within the bounds of the underlying base region
4660 :
4661 : Otherwise, return a svalue for strlen of the buffer (*not* including
4662 : the null terminator).
4663 :
4664 : TODO: we should also complain if:
4665 : - the pointer is NULL (or could be). */
4666 :
4667 : const svalue *
4668 209 : region_model::check_for_null_terminated_string_arg (const call_details &cd,
4669 : unsigned arg_idx) const
4670 : {
4671 209 : return check_for_null_terminated_string_arg (cd,
4672 : arg_idx,
4673 : false, /* include_terminator */
4674 209 : nullptr); // out_sval
4675 : }
4676 :
4677 :
4678 : /* Check that argument ARG_IDX (0-based) to the call described by CD
4679 : is a pointer to a valid null-terminated string.
4680 :
4681 : Simulate scanning through the buffer, reading until we find a 0 byte
4682 : (equivalent to calling strlen).
4683 :
4684 : Complain and return nullptr if:
4685 : - the buffer pointed to isn't null-terminated
4686 : - the buffer pointed to has any uninitalized bytes before any 0-terminator
4687 : - any of the reads aren't within the bounds of the underlying base region
4688 :
4689 : Otherwise, return a svalue. This will be the number of bytes read
4690 : (including the null terminator) if INCLUDE_TERMINATOR is true, or strlen
4691 : of the buffer (not including the null terminator) if it is false.
4692 :
4693 : Also, when returning an svalue, if OUT_SVAL is non-nullptr, write to
4694 : *OUT_SVAL with an svalue representing the content of the buffer up to
4695 : and including the terminator.
4696 :
4697 : TODO: we should also complain if:
4698 : - the pointer is NULL (or could be). */
4699 :
4700 : const svalue *
4701 8037 : region_model::check_for_null_terminated_string_arg (const call_details &cd,
4702 : unsigned arg_idx,
4703 : bool include_terminator,
4704 : const svalue **out_sval) const
4705 : {
4706 0 : class null_terminator_check_event : public custom_event
4707 : {
4708 : public:
4709 164 : null_terminator_check_event (const event_loc_info &loc_info,
4710 : const call_arg_details &arg_details)
4711 164 : : custom_event (loc_info),
4712 164 : m_arg_details (arg_details)
4713 : {
4714 : }
4715 :
4716 310 : void print_desc (pretty_printer &pp) const final override
4717 : {
4718 310 : if (m_arg_details.m_arg_expr)
4719 310 : pp_printf (&pp,
4720 : "while looking for null terminator"
4721 : " for argument %i (%qE) of %qD...",
4722 310 : m_arg_details.m_arg_idx + 1,
4723 : m_arg_details.m_arg_expr,
4724 310 : m_arg_details.m_called_fndecl);
4725 : else
4726 0 : pp_printf (&pp,
4727 : "while looking for null terminator"
4728 : " for argument %i of %qD...",
4729 0 : m_arg_details.m_arg_idx + 1,
4730 0 : m_arg_details.m_called_fndecl);
4731 310 : }
4732 :
4733 : private:
4734 : const call_arg_details m_arg_details;
4735 : };
4736 :
4737 0 : class null_terminator_check_decl_note
4738 : : public pending_note_subclass<null_terminator_check_decl_note>
4739 : {
4740 : public:
4741 164 : null_terminator_check_decl_note (const call_arg_details &arg_details)
4742 164 : : m_arg_details (arg_details)
4743 : {
4744 : }
4745 :
4746 1300 : const char *get_kind () const final override
4747 : {
4748 1300 : return "null_terminator_check_decl_note";
4749 : }
4750 :
4751 152 : void emit () const final override
4752 : {
4753 152 : inform_about_expected_null_terminated_string_arg (m_arg_details);
4754 152 : }
4755 :
4756 650 : bool operator== (const null_terminator_check_decl_note &other) const
4757 : {
4758 650 : return m_arg_details == other.m_arg_details;
4759 : }
4760 :
4761 : private:
4762 : const call_arg_details m_arg_details;
4763 : };
4764 :
4765 : /* Subclass of decorated_region_model_context that
4766 : adds the above event and note to any saved diagnostics. */
4767 8037 : class annotating_ctxt : public annotating_context
4768 : {
4769 : public:
4770 8037 : annotating_ctxt (const call_details &cd,
4771 : unsigned arg_idx)
4772 8037 : : annotating_context (cd.get_ctxt ()),
4773 8037 : m_cd (cd),
4774 8037 : m_arg_idx (arg_idx)
4775 : {
4776 : }
4777 164 : void add_annotations () final override
4778 : {
4779 164 : call_arg_details arg_details (m_cd, m_arg_idx);
4780 328 : event_loc_info loc_info (m_cd.get_location (),
4781 164 : m_cd.get_model ()->get_current_function ()->decl,
4782 328 : m_cd.get_model ()->get_stack_depth ());
4783 :
4784 164 : add_event
4785 164 : (std::make_unique<null_terminator_check_event> (loc_info,
4786 : arg_details));
4787 164 : add_note
4788 164 : (std::make_unique <null_terminator_check_decl_note> (arg_details));
4789 164 : }
4790 : private:
4791 : const call_details &m_cd;
4792 : unsigned m_arg_idx;
4793 : };
4794 :
4795 : /* Use this ctxt below so that any diagnostics that get added
4796 : get annotated. */
4797 8037 : annotating_ctxt my_ctxt (cd, arg_idx);
4798 :
4799 8037 : const svalue *arg_sval = cd.get_arg_svalue (arg_idx);
4800 8037 : const region *buf_reg
4801 8037 : = deref_rvalue (arg_sval, cd.get_arg_tree (arg_idx), &my_ctxt);
4802 :
4803 16074 : if (const svalue *num_bytes_read_sval
4804 8037 : = scan_for_null_terminator (buf_reg,
4805 : cd.get_arg_tree (arg_idx),
4806 : out_sval,
4807 : &my_ctxt))
4808 : {
4809 7867 : if (out_sval)
4810 1158 : gcc_assert (*out_sval);
4811 7867 : if (include_terminator)
4812 : return num_bytes_read_sval;
4813 : else
4814 : {
4815 : /* strlen is (bytes_read - 1). */
4816 6709 : const svalue *one = m_mgr->get_or_create_int_cst (size_type_node, 1);
4817 6709 : return m_mgr->get_or_create_binop (size_type_node,
4818 : MINUS_EXPR,
4819 : num_bytes_read_sval,
4820 6709 : one);
4821 : }
4822 : }
4823 : else
4824 : return nullptr;
4825 : }
4826 :
4827 : /* Remove all bindings overlapping REG within the store. */
4828 :
4829 : void
4830 6656 : region_model::clobber_region (const region *reg)
4831 : {
4832 6656 : m_store.clobber_region (m_mgr->get_store_manager(), reg);
4833 6656 : }
4834 :
4835 : /* Remove any bindings for REG within the store. */
4836 :
4837 : void
4838 217114 : region_model::purge_region (const region *reg)
4839 : {
4840 217114 : m_store.purge_region (m_mgr->get_store_manager(), reg);
4841 217114 : }
4842 :
4843 : /* Fill REG with SVAL.
4844 : Use CTXT to report any warnings associated with the write
4845 : (e.g. out-of-bounds). */
4846 :
4847 : void
4848 742 : region_model::fill_region (const region *reg,
4849 : const svalue *sval,
4850 : region_model_context *ctxt)
4851 : {
4852 742 : check_region_for_write (reg, nullptr, ctxt);
4853 742 : m_store.fill_region (m_mgr->get_store_manager(), reg, sval);
4854 742 : }
4855 :
4856 : /* Zero-fill REG.
4857 : Use CTXT to report any warnings associated with the write
4858 : (e.g. out-of-bounds). */
4859 :
4860 : void
4861 846 : region_model::zero_fill_region (const region *reg,
4862 : region_model_context *ctxt)
4863 : {
4864 846 : check_region_for_write (reg, nullptr, ctxt);
4865 846 : m_store.zero_fill_region (m_mgr->get_store_manager(), reg);
4866 846 : }
4867 :
4868 : /* Copy NUM_BYTES_SVAL of SVAL to DEST_REG.
4869 : Use CTXT to report any warnings associated with the copy
4870 : (e.g. out-of-bounds writes). */
4871 :
4872 : void
4873 2218 : region_model::write_bytes (const region *dest_reg,
4874 : const svalue *num_bytes_sval,
4875 : const svalue *sval,
4876 : region_model_context *ctxt)
4877 : {
4878 2218 : const region *sized_dest_reg
4879 2218 : = m_mgr->get_sized_region (dest_reg, NULL_TREE, num_bytes_sval);
4880 2218 : set_value (sized_dest_reg, sval, ctxt);
4881 2218 : }
4882 :
4883 : /* Read NUM_BYTES_SVAL from SRC_REG.
4884 : Use CTXT to report any warnings associated with the copy
4885 : (e.g. out-of-bounds reads, copying of uninitialized values, etc). */
4886 :
4887 : const svalue *
4888 1188 : region_model::read_bytes (const region *src_reg,
4889 : tree src_ptr_expr,
4890 : const svalue *num_bytes_sval,
4891 : region_model_context *ctxt) const
4892 : {
4893 1188 : if (num_bytes_sval->get_kind () == SK_UNKNOWN)
4894 223 : return m_mgr->get_or_create_unknown_svalue (NULL_TREE);
4895 965 : const region *sized_src_reg
4896 965 : = m_mgr->get_sized_region (src_reg, NULL_TREE, num_bytes_sval);
4897 965 : const svalue *src_contents_sval = get_store_value (sized_src_reg, ctxt);
4898 965 : check_for_poison (src_contents_sval, src_ptr_expr,
4899 : sized_src_reg, ctxt);
4900 965 : return src_contents_sval;
4901 : }
4902 :
4903 : /* Copy NUM_BYTES_SVAL bytes from SRC_REG to DEST_REG.
4904 : Use CTXT to report any warnings associated with the copy
4905 : (e.g. out-of-bounds reads/writes, copying of uninitialized values,
4906 : etc). */
4907 :
4908 : void
4909 564 : region_model::copy_bytes (const region *dest_reg,
4910 : const region *src_reg,
4911 : tree src_ptr_expr,
4912 : const svalue *num_bytes_sval,
4913 : region_model_context *ctxt)
4914 : {
4915 564 : const svalue *data_sval
4916 564 : = read_bytes (src_reg, src_ptr_expr, num_bytes_sval, ctxt);
4917 564 : write_bytes (dest_reg, num_bytes_sval, data_sval, ctxt);
4918 564 : }
4919 :
4920 : /* Mark REG as having unknown content. */
4921 :
4922 : void
4923 309 : region_model::mark_region_as_unknown (const region *reg,
4924 : uncertainty_t *uncertainty)
4925 : {
4926 309 : svalue_set maybe_live_values;
4927 309 : m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg,
4928 : uncertainty, &maybe_live_values);
4929 309 : m_store.on_maybe_live_values (*m_mgr->get_store_manager (),
4930 : maybe_live_values);
4931 309 : }
4932 :
4933 : /* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
4934 : this model. */
4935 :
4936 : tristate
4937 216078 : region_model::eval_condition (const svalue *lhs,
4938 : enum tree_code op,
4939 : const svalue *rhs) const
4940 : {
4941 216078 : gcc_assert (lhs);
4942 216078 : gcc_assert (rhs);
4943 :
4944 : /* For now, make no attempt to capture constraints on floating-point
4945 : values. */
4946 216078 : if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
4947 374788 : || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
4948 72 : return tristate::unknown ();
4949 :
4950 : /* See what we know based on the values. */
4951 :
4952 : /* Unwrap any unmergeable values. */
4953 216006 : lhs = lhs->unwrap_any_unmergeable ();
4954 216006 : rhs = rhs->unwrap_any_unmergeable ();
4955 :
4956 216006 : if (lhs == rhs)
4957 : {
4958 : /* If we have the same svalue, then we have equality
4959 : (apart from NaN-handling).
4960 : TODO: should this definitely be the case for poisoned values? */
4961 : /* Poisoned and unknown values are "unknowable". */
4962 21810 : if (lhs->get_kind () == SK_POISONED
4963 21810 : || lhs->get_kind () == SK_UNKNOWN)
4964 9595 : return tristate::TS_UNKNOWN;
4965 :
4966 12215 : switch (op)
4967 : {
4968 9175 : case EQ_EXPR:
4969 9175 : case GE_EXPR:
4970 9175 : case LE_EXPR:
4971 9175 : return tristate::TS_TRUE;
4972 :
4973 3040 : case NE_EXPR:
4974 3040 : case GT_EXPR:
4975 3040 : case LT_EXPR:
4976 3040 : return tristate::TS_FALSE;
4977 :
4978 : default:
4979 : /* For other ops, use the logic below. */
4980 : break;
4981 : }
4982 : }
4983 :
4984 : /* If we have a pair of region_svalues, compare them. */
4985 194196 : if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
4986 21472 : if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
4987 : {
4988 344 : tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
4989 344 : if (res.is_known ())
4990 336 : return res;
4991 : /* Otherwise, only known through constraints. */
4992 : }
4993 :
4994 193860 : if (const constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
4995 : {
4996 : /* If we have a pair of constants, compare them. */
4997 48764 : if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
4998 15190 : return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
4999 : else
5000 : {
5001 : /* When we have one constant, put it on the RHS. */
5002 33574 : std::swap (lhs, rhs);
5003 33574 : op = swap_tree_comparison (op);
5004 : }
5005 : }
5006 178670 : gcc_assert (lhs->get_kind () != SK_CONSTANT);
5007 :
5008 : /* Handle comparison against zero. */
5009 178670 : if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
5010 149686 : if (zerop (cst_rhs->get_constant ()))
5011 : {
5012 95688 : if (const region_svalue *ptr = lhs->dyn_cast_region_svalue ())
5013 : {
5014 : /* A region_svalue is a non-NULL pointer, except in certain
5015 : special cases (see the comment for region::non_null_p). */
5016 20878 : const region *pointee = ptr->get_pointee ();
5017 20878 : if (pointee->non_null_p ())
5018 : {
5019 10498 : switch (op)
5020 : {
5021 0 : default:
5022 0 : gcc_unreachable ();
5023 :
5024 215 : case EQ_EXPR:
5025 215 : case GE_EXPR:
5026 215 : case LE_EXPR:
5027 215 : return tristate::TS_FALSE;
5028 :
5029 10283 : case NE_EXPR:
5030 10283 : case GT_EXPR:
5031 10283 : case LT_EXPR:
5032 10283 : return tristate::TS_TRUE;
5033 : }
5034 : }
5035 : }
5036 74810 : else if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
5037 : {
5038 : /* Treat offsets from a non-NULL pointer as being non-NULL. This
5039 : isn't strictly true, in that eventually ptr++ will wrap
5040 : around and be NULL, but it won't occur in practise and thus
5041 : can be used to suppress effectively false positives that we
5042 : shouldn't warn for. */
5043 20056 : if (binop->get_op () == POINTER_PLUS_EXPR)
5044 : {
5045 12364 : tristate lhs_ts = eval_condition (binop->get_arg0 (), op, rhs);
5046 12364 : if (lhs_ts.is_known ())
5047 11706 : return lhs_ts;
5048 : }
5049 : }
5050 109508 : else if (const unaryop_svalue *unaryop
5051 54754 : = lhs->dyn_cast_unaryop_svalue ())
5052 : {
5053 2837 : if (unaryop->get_op () == NEGATE_EXPR)
5054 : {
5055 : /* e.g. "-X <= 0" is equivalent to X >= 0". */
5056 51 : tristate lhs_ts = eval_condition (unaryop->get_arg (),
5057 : swap_tree_comparison (op),
5058 : rhs);
5059 51 : if (lhs_ts.is_known ())
5060 48 : return lhs_ts;
5061 : }
5062 : }
5063 : }
5064 :
5065 : /* Handle rejection of equality for comparisons of the initial values of
5066 : "external" values (such as params) with the address of locals. */
5067 156418 : if (const initial_svalue *init_lhs = lhs->dyn_cast_initial_svalue ())
5068 37629 : if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
5069 : {
5070 89 : tristate res = compare_initial_and_pointer (init_lhs, rhs_ptr);
5071 89 : if (res.is_known ())
5072 32 : return res;
5073 : }
5074 156386 : if (const initial_svalue *init_rhs = rhs->dyn_cast_initial_svalue ())
5075 5170 : if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
5076 : {
5077 170 : tristate res = compare_initial_and_pointer (init_rhs, lhs_ptr);
5078 170 : if (res.is_known ())
5079 0 : return res;
5080 : }
5081 :
5082 156386 : if (const widening_svalue *widen_lhs = lhs->dyn_cast_widening_svalue ())
5083 5218 : if (tree rhs_cst = rhs->maybe_get_constant ())
5084 : {
5085 2838 : tristate res = widen_lhs->eval_condition_without_cm (op, rhs_cst);
5086 2838 : if (res.is_known ())
5087 65 : return res;
5088 : }
5089 :
5090 : /* Handle comparisons between two svalues with more than one operand. */
5091 156321 : if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
5092 : {
5093 26896 : switch (op)
5094 : {
5095 : default:
5096 : break;
5097 3727 : case EQ_EXPR:
5098 3727 : {
5099 : /* TODO: binops can be equal even if they are not structurally
5100 : equal in case of commutative operators. */
5101 3727 : tristate res = structural_equality (lhs, rhs);
5102 3727 : if (res.is_true ())
5103 44 : return res;
5104 : }
5105 3683 : break;
5106 1089 : case LE_EXPR:
5107 1089 : {
5108 1089 : tristate res = structural_equality (lhs, rhs);
5109 1089 : if (res.is_true ())
5110 0 : return res;
5111 : }
5112 1089 : break;
5113 7206 : case GE_EXPR:
5114 7206 : {
5115 7206 : tristate res = structural_equality (lhs, rhs);
5116 7206 : if (res.is_true ())
5117 46 : return res;
5118 7160 : res = symbolic_greater_than (binop, rhs);
5119 7160 : if (res.is_true ())
5120 36 : return res;
5121 : }
5122 : break;
5123 8584 : case GT_EXPR:
5124 8584 : {
5125 8584 : tristate res = symbolic_greater_than (binop, rhs);
5126 8584 : if (res.is_true ())
5127 158 : return res;
5128 : }
5129 8426 : break;
5130 : }
5131 : }
5132 :
5133 : /* Try range_op, but avoid cases where we have been sloppy about types. */
5134 156037 : if (lhs->get_type ()
5135 109275 : && rhs->get_type ()
5136 259777 : && range_compatible_p (lhs->get_type (), rhs->get_type ()))
5137 : {
5138 97486 : value_range lhs_vr, rhs_vr;
5139 97486 : if (lhs->maybe_get_value_range (lhs_vr))
5140 45769 : if (rhs->maybe_get_value_range (rhs_vr))
5141 : {
5142 45195 : range_op_handler handler (op);
5143 45195 : if (handler)
5144 : {
5145 45195 : int_range_max out;
5146 45195 : if (handler.fold_range (out, boolean_type_node, lhs_vr, rhs_vr))
5147 : {
5148 45195 : if (out.zero_p ())
5149 187 : return tristate::TS_FALSE;
5150 45008 : if (out.nonzero_p ())
5151 155 : return tristate::TS_TRUE;
5152 : }
5153 45195 : }
5154 : }
5155 97486 : }
5156 :
5157 : /* Attempt to unwrap cast if there is one, and the types match. */
5158 155695 : tree lhs_type = lhs->get_type ();
5159 155695 : tree rhs_type = rhs->get_type ();
5160 155695 : if (lhs_type && rhs_type)
5161 : {
5162 103398 : const unaryop_svalue *lhs_un_op = dyn_cast <const unaryop_svalue *> (lhs);
5163 103398 : const unaryop_svalue *rhs_un_op = dyn_cast <const unaryop_svalue *> (rhs);
5164 3559 : if (lhs_un_op && CONVERT_EXPR_CODE_P (lhs_un_op->get_op ())
5165 3398 : && rhs_un_op && CONVERT_EXPR_CODE_P (rhs_un_op->get_op ())
5166 103515 : && lhs_type == rhs_type)
5167 : {
5168 117 : tristate res = eval_condition (lhs_un_op->get_arg (),
5169 : op,
5170 : rhs_un_op->get_arg ());
5171 117 : if (res.is_known ())
5172 0 : return res;
5173 : }
5174 3442 : else if (lhs_un_op && CONVERT_EXPR_CODE_P (lhs_un_op->get_op ())
5175 106562 : && lhs_type == rhs_type)
5176 : {
5177 2690 : tristate res = eval_condition (lhs_un_op->get_arg (), op, rhs);
5178 2690 : if (res.is_known ())
5179 36 : return res;
5180 : }
5181 2420 : else if (rhs_un_op && CONVERT_EXPR_CODE_P (rhs_un_op->get_op ())
5182 103011 : && lhs_type == rhs_type)
5183 : {
5184 1557 : tristate res = eval_condition (lhs, op, rhs_un_op->get_arg ());
5185 1557 : if (res.is_known ())
5186 0 : return res;
5187 : }
5188 : }
5189 :
5190 : /* Otherwise, try constraints.
5191 : Cast to const to ensure we don't change the constraint_manager as we
5192 : do this (e.g. by creating equivalence classes). */
5193 155659 : const constraint_manager *constraints = m_constraints;
5194 155659 : return constraints->eval_condition (lhs, op, rhs);
5195 : }
5196 :
5197 : /* Subroutine of region_model::eval_condition, for rejecting
5198 : equality of INIT_VAL(PARM) with &LOCAL. */
5199 :
5200 : tristate
5201 259 : region_model::compare_initial_and_pointer (const initial_svalue *init,
5202 : const region_svalue *ptr) const
5203 : {
5204 259 : const region *pointee = ptr->get_pointee ();
5205 :
5206 : /* If we have a pointer to something within a stack frame, it can't be the
5207 : initial value of a param. */
5208 259 : if (pointee->maybe_get_frame_region ())
5209 32 : if (init->initial_value_of_param_p ())
5210 32 : return tristate::TS_FALSE;
5211 :
5212 227 : return tristate::TS_UNKNOWN;
5213 : }
5214 :
5215 : /* Return true if SVAL is definitely positive. */
5216 :
5217 : static bool
5218 14313 : is_positive_svalue (const svalue *sval)
5219 : {
5220 14313 : if (tree cst = sval->maybe_get_constant ())
5221 14037 : return !zerop (cst) && get_range_pos_neg (cst) == 1;
5222 276 : tree type = sval->get_type ();
5223 276 : if (!type)
5224 : return false;
5225 : /* Consider a binary operation size_t + int. The analyzer wraps the int in
5226 : an unaryop_svalue, converting it to a size_t, but in the dynamic execution
5227 : the result is smaller than the first operand. Thus, we have to look if
5228 : the argument of the unaryop_svalue is also positive. */
5229 217 : if (const unaryop_svalue *un_op = dyn_cast <const unaryop_svalue *> (sval))
5230 10 : return CONVERT_EXPR_CODE_P (un_op->get_op ()) && TYPE_UNSIGNED (type)
5231 18 : && is_positive_svalue (un_op->get_arg ());
5232 207 : return TYPE_UNSIGNED (type);
5233 : }
5234 :
5235 : /* Return true if A is definitely larger than B.
5236 :
5237 : Limitation: does not account for integer overflows and does not try to
5238 : return false, so it can not be used negated. */
5239 :
5240 : tristate
5241 15744 : region_model::symbolic_greater_than (const binop_svalue *bin_a,
5242 : const svalue *b) const
5243 : {
5244 15744 : if (bin_a->get_op () == PLUS_EXPR || bin_a->get_op () == MULT_EXPR)
5245 : {
5246 : /* Eliminate the right-hand side of both svalues. */
5247 14345 : if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
5248 2544 : if (bin_a->get_op () == bin_b->get_op ()
5249 1412 : && eval_condition (bin_a->get_arg1 (),
5250 : GT_EXPR,
5251 1412 : bin_b->get_arg1 ()).is_true ()
5252 3956 : && eval_condition (bin_a->get_arg0 (),
5253 : GE_EXPR,
5254 63 : bin_b->get_arg0 ()).is_true ())
5255 40 : return tristate (tristate::TS_TRUE);
5256 :
5257 : /* Otherwise, try to remove a positive offset or factor from BIN_A. */
5258 14305 : if (is_positive_svalue (bin_a->get_arg1 ())
5259 14305 : && eval_condition (bin_a->get_arg0 (),
5260 13587 : GE_EXPR, b).is_true ())
5261 154 : return tristate (tristate::TS_TRUE);
5262 : }
5263 15550 : return tristate::unknown ();
5264 : }
5265 :
5266 : /* Return true if A and B are equal structurally.
5267 :
5268 : Structural equality means that A and B are equal if the svalues A and B have
5269 : the same nodes at the same positions in the tree and the leafs are equal.
5270 : Equality for conjured_svalues and initial_svalues is determined by comparing
5271 : the pointers while constants are compared by value. That behavior is useful
5272 : to check for binaryop_svlaues that evaluate to the same concrete value but
5273 : might use one operand with a different type but the same constant value.
5274 :
5275 : For example,
5276 : binop_svalue (mult_expr,
5277 : initial_svalue (‘size_t’, decl_region (..., 'some_var')),
5278 : constant_svalue (‘size_t’, 4))
5279 : and
5280 : binop_svalue (mult_expr,
5281 : initial_svalue (‘size_t’, decl_region (..., 'some_var'),
5282 : constant_svalue (‘sizetype’, 4))
5283 : are structurally equal. A concrete C code example, where this occurs, can
5284 : be found in test7 of out-of-bounds-5.c. */
5285 :
5286 : tristate
5287 14864 : region_model::structural_equality (const svalue *a, const svalue *b) const
5288 : {
5289 : /* If A and B are referentially equal, they are also structurally equal. */
5290 14864 : if (a == b)
5291 431 : return tristate (tristate::TS_TRUE);
5292 :
5293 14433 : switch (a->get_kind ())
5294 : {
5295 1227 : default:
5296 1227 : return tristate::unknown ();
5297 : /* SK_CONJURED and SK_INITIAL are already handled
5298 : by the referential equality above. */
5299 1057 : case SK_CONSTANT:
5300 1057 : {
5301 1057 : tree a_cst = a->maybe_get_constant ();
5302 1057 : tree b_cst = b->maybe_get_constant ();
5303 1057 : if (a_cst && b_cst)
5304 1813 : return tristate (tree_int_cst_equal (a_cst, b_cst));
5305 : }
5306 126 : return tristate (tristate::TS_FALSE);
5307 9 : case SK_UNARYOP:
5308 9 : {
5309 9 : const unaryop_svalue *un_a = as_a <const unaryop_svalue *> (a);
5310 9 : if (const unaryop_svalue *un_b = dyn_cast <const unaryop_svalue *> (b))
5311 8 : return tristate (pending_diagnostic::same_tree_p (un_a->get_type (),
5312 : un_b->get_type ())
5313 8 : && un_a->get_op () == un_b->get_op ()
5314 : && structural_equality (un_a->get_arg (),
5315 16 : un_b->get_arg ()));
5316 : }
5317 1 : return tristate (tristate::TS_FALSE);
5318 12140 : case SK_BINOP:
5319 12140 : {
5320 12140 : const binop_svalue *bin_a = as_a <const binop_svalue *> (a);
5321 12140 : if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
5322 2349 : return tristate (bin_a->get_op () == bin_b->get_op ()
5323 : && structural_equality (bin_a->get_arg0 (),
5324 2834 : bin_b->get_arg0 ())
5325 : && structural_equality (bin_a->get_arg1 (),
5326 2834 : bin_b->get_arg1 ()));
5327 : }
5328 10723 : return tristate (tristate::TS_FALSE);
5329 : }
5330 : }
5331 :
5332 : /* Handle various constraints of the form:
5333 : LHS: ((bool)INNER_LHS INNER_OP INNER_RHS))
5334 : OP : == or !=
5335 : RHS: zero
5336 : and (with a cast):
5337 : LHS: CAST([long]int, ((bool)INNER_LHS INNER_OP INNER_RHS))
5338 : OP : == or !=
5339 : RHS: zero
5340 : by adding constraints for INNER_LHS INNEROP INNER_RHS.
5341 :
5342 : Return true if this function can fully handle the constraint; if
5343 : so, add the implied constraint(s) and write true to *OUT if they
5344 : are consistent with existing constraints, or write false to *OUT
5345 : if they contradicts existing constraints.
5346 :
5347 : Return false for cases that this function doeesn't know how to handle.
5348 :
5349 : For example, if we're checking a stored conditional, we'll have
5350 : something like:
5351 : LHS: CAST(long int, (&HEAP_ALLOCATED_REGION(8)!=(int *)0B))
5352 : OP : NE_EXPR
5353 : RHS: zero
5354 : which this function can turn into an add_constraint of:
5355 : (&HEAP_ALLOCATED_REGION(8) != (int *)0B)
5356 :
5357 : Similarly, optimized && and || conditionals lead to e.g.
5358 : if (p && q)
5359 : becoming gimple like this:
5360 : _1 = p_6 == 0B;
5361 : _2 = q_8 == 0B
5362 : _3 = _1 | _2
5363 : On the "_3 is false" branch we can have constraints of the form:
5364 : ((&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
5365 : | (&HEAP_ALLOCATED_REGION(10)!=(int *)0B))
5366 : == 0
5367 : which implies that both _1 and _2 are false,
5368 : which this function can turn into a pair of add_constraints of
5369 : (&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
5370 : and:
5371 : (&HEAP_ALLOCATED_REGION(10)!=(int *)0B). */
5372 :
5373 : bool
5374 56501 : region_model::add_constraints_from_binop (const svalue *outer_lhs,
5375 : enum tree_code outer_op,
5376 : const svalue *outer_rhs,
5377 : bool *out,
5378 : region_model_context *ctxt)
5379 : {
5380 58560 : while (const svalue *cast = outer_lhs->maybe_undo_cast ())
5381 : outer_lhs = cast;
5382 56501 : const binop_svalue *binop_sval = outer_lhs->dyn_cast_binop_svalue ();
5383 56501 : if (!binop_sval)
5384 : return false;
5385 8247 : if (!outer_rhs->all_zeroes_p ())
5386 : return false;
5387 :
5388 5747 : const svalue *inner_lhs = binop_sval->get_arg0 ();
5389 5747 : enum tree_code inner_op = binop_sval->get_op ();
5390 5747 : const svalue *inner_rhs = binop_sval->get_arg1 ();
5391 :
5392 5747 : if (outer_op != NE_EXPR && outer_op != EQ_EXPR)
5393 : return false;
5394 :
5395 : /* We have either
5396 : - "OUTER_LHS != false" (i.e. OUTER is true), or
5397 : - "OUTER_LHS == false" (i.e. OUTER is false). */
5398 5087 : bool is_true = outer_op == NE_EXPR;
5399 :
5400 5087 : switch (inner_op)
5401 : {
5402 : default:
5403 : return false;
5404 :
5405 2742 : case EQ_EXPR:
5406 2742 : case NE_EXPR:
5407 2742 : case GE_EXPR:
5408 2742 : case GT_EXPR:
5409 2742 : case LE_EXPR:
5410 2742 : case LT_EXPR:
5411 2742 : {
5412 : /* ...and "(inner_lhs OP inner_rhs) == 0"
5413 : then (inner_lhs OP inner_rhs) must have the same
5414 : logical value as LHS. */
5415 2742 : if (!is_true)
5416 1293 : inner_op = invert_tree_comparison (inner_op, false /* honor_nans */);
5417 2742 : *out = add_constraint (inner_lhs, inner_op, inner_rhs, ctxt);
5418 2742 : return true;
5419 : }
5420 937 : break;
5421 :
5422 937 : case BIT_AND_EXPR:
5423 937 : if (is_true)
5424 : {
5425 : /* ...and "(inner_lhs & inner_rhs) != 0"
5426 : then both inner_lhs and inner_rhs must be true. */
5427 469 : const svalue *false_sval
5428 469 : = m_mgr->get_or_create_constant_svalue (boolean_false_node);
5429 469 : bool sat1 = add_constraint (inner_lhs, NE_EXPR, false_sval, ctxt);
5430 469 : bool sat2 = add_constraint (inner_rhs, NE_EXPR, false_sval, ctxt);
5431 469 : *out = sat1 && sat2;
5432 469 : return true;
5433 : }
5434 : return false;
5435 :
5436 644 : case BIT_IOR_EXPR:
5437 644 : if (!is_true)
5438 : {
5439 : /* ...and "(inner_lhs | inner_rhs) == 0"
5440 : i.e. "(inner_lhs | inner_rhs)" is false
5441 : then both inner_lhs and inner_rhs must be false. */
5442 362 : const svalue *false_sval
5443 362 : = m_mgr->get_or_create_constant_svalue (boolean_false_node);
5444 362 : bool sat1 = add_constraint (inner_lhs, EQ_EXPR, false_sval, ctxt);
5445 362 : bool sat2 = add_constraint (inner_rhs, EQ_EXPR, false_sval, ctxt);
5446 362 : *out = sat1 && sat2;
5447 362 : return true;
5448 : }
5449 : return false;
5450 : }
5451 : }
5452 :
5453 : /* Attempt to add the constraint "LHS OP RHS" to this region_model.
5454 : If it is consistent with existing constraints, add it, and return true.
5455 : Return false if it contradicts existing constraints.
5456 : Use CTXT for reporting any diagnostics associated with the accesses. */
5457 :
5458 : bool
5459 77208 : region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
5460 : region_model_context *ctxt)
5461 : {
5462 : /* For now, make no attempt to capture constraints on floating-point
5463 : values. */
5464 77208 : if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
5465 : return true;
5466 :
5467 76838 : const svalue *lhs_sval = get_rvalue (lhs, ctxt);
5468 76838 : const svalue *rhs_sval = get_rvalue (rhs, ctxt);
5469 :
5470 76838 : return add_constraint (lhs_sval, op, rhs_sval, ctxt);
5471 : }
5472 :
5473 : static bool
5474 17090 : unusable_in_infinite_loop_constraint_p (const svalue *sval)
5475 : {
5476 17090 : if (sval->get_kind () == SK_WIDENING)
5477 0 : return true;
5478 : return false;
5479 : }
5480 :
5481 : /* Attempt to add the constraint "LHS OP RHS" to this region_model.
5482 : If it is consistent with existing constraints, add it, and return true.
5483 : Return false if it contradicts existing constraints.
5484 : Use CTXT for reporting any diagnostics associated with the accesses. */
5485 :
5486 : bool
5487 86306 : region_model::add_constraint (const svalue *lhs,
5488 : enum tree_code op,
5489 : const svalue *rhs,
5490 : region_model_context *ctxt)
5491 : {
5492 86306 : const bool checking_for_infinite_loop
5493 86306 : = ctxt ? ctxt->checking_for_infinite_loop_p () : false;
5494 :
5495 8672 : if (checking_for_infinite_loop)
5496 : {
5497 17090 : if (unusable_in_infinite_loop_constraint_p (lhs)
5498 86306 : || unusable_in_infinite_loop_constraint_p (rhs))
5499 : {
5500 257 : gcc_assert (ctxt);
5501 257 : ctxt->on_unusable_in_infinite_loop ();
5502 257 : return false;
5503 : }
5504 : }
5505 :
5506 86049 : tristate t_cond = eval_condition (lhs, op, rhs);
5507 :
5508 : /* If we already have the condition, do nothing. */
5509 86049 : if (t_cond.is_true ())
5510 : return true;
5511 :
5512 : /* Reject a constraint that would contradict existing knowledge, as
5513 : unsatisfiable. */
5514 69471 : if (t_cond.is_false ())
5515 : return false;
5516 :
5517 60080 : if (checking_for_infinite_loop)
5518 : {
5519 : /* Here, we don't have a definite true/false value, so bail out
5520 : when checking for infinite loops. */
5521 3579 : gcc_assert (ctxt);
5522 3579 : ctxt->on_unusable_in_infinite_loop ();
5523 3579 : return false;
5524 : }
5525 :
5526 56501 : bool out;
5527 56501 : if (add_constraints_from_binop (lhs, op, rhs, &out, ctxt))
5528 3573 : return out;
5529 :
5530 : /* Attempt to store the constraint. */
5531 52928 : if (!m_constraints->add_constraint (lhs, op, rhs))
5532 : return false;
5533 :
5534 : /* Notify the context, if any. This exists so that the state machines
5535 : in a program_state can be notified about the condition, and so can
5536 : set sm-state for e.g. unchecked->checked, both for cfg-edges, and
5537 : when synthesizing constraints as above. */
5538 52824 : if (ctxt)
5539 34925 : ctxt->on_condition (lhs, op, rhs);
5540 :
5541 : /* If we have ®ION == NULL, then drop dynamic extents for REGION (for
5542 : the case where REGION is heap-allocated and thus could be NULL). */
5543 52824 : if (tree rhs_cst = rhs->maybe_get_constant ())
5544 43664 : if (op == EQ_EXPR && zerop (rhs_cst))
5545 13335 : if (const region_svalue *region_sval = lhs->dyn_cast_region_svalue ())
5546 1895 : unset_dynamic_extents (region_sval->get_pointee ());
5547 :
5548 : return true;
5549 : }
5550 :
5551 : /* As above, but when returning false, if OUT is non-NULL, write a
5552 : new rejected_constraint to *OUT. */
5553 :
5554 : bool
5555 76064 : region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
5556 : region_model_context *ctxt,
5557 : std::unique_ptr<rejected_constraint> *out)
5558 : {
5559 76064 : bool sat = add_constraint (lhs, op, rhs, ctxt);
5560 76064 : if (!sat && out)
5561 : {
5562 2213 : const svalue *lhs_sval = get_rvalue (lhs, nullptr);
5563 2213 : const svalue *rhs_sval = get_rvalue (rhs, nullptr);
5564 4426 : *out = std::make_unique <rejected_op_constraint> (*this,
5565 2213 : lhs_sval, op, rhs_sval);
5566 : }
5567 76064 : return sat;
5568 : }
5569 :
5570 : /* Determine what is known about the condition "LHS OP RHS" within
5571 : this model.
5572 : Use CTXT for reporting any diagnostics associated with the accesses. */
5573 :
5574 : tristate
5575 33345 : region_model::eval_condition (tree lhs,
5576 : enum tree_code op,
5577 : tree rhs,
5578 : region_model_context *ctxt) const
5579 : {
5580 : /* For now, make no attempt to model constraints on floating-point
5581 : values. */
5582 33345 : if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
5583 16 : return tristate::unknown ();
5584 :
5585 33329 : return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
5586 : }
5587 :
5588 : /* Implementation of region_model::get_representative_path_var.
5589 : Attempt to return a path_var that represents SVAL, or return NULL_TREE.
5590 : Use VISITED to prevent infinite mutual recursion with the overload for
5591 : regions. */
5592 :
5593 : path_var
5594 12977 : region_model::get_representative_path_var_1 (const svalue *sval,
5595 : svalue_set *visited,
5596 : logger *logger) const
5597 : {
5598 12977 : gcc_assert (sval);
5599 :
5600 : /* Prevent infinite recursion. */
5601 12977 : if (visited->contains (sval))
5602 : {
5603 14 : if (sval->get_kind () == SK_CONSTANT)
5604 14 : return path_var (sval->maybe_get_constant (), 0);
5605 : else
5606 0 : return path_var (NULL_TREE, 0);
5607 : }
5608 12963 : visited->add (sval);
5609 :
5610 : /* Handle casts by recursion into get_representative_path_var. */
5611 12963 : if (const svalue *cast_sval = sval->maybe_undo_cast ())
5612 : {
5613 402 : path_var result = get_representative_path_var (cast_sval, visited,
5614 : logger);
5615 402 : tree orig_type = sval->get_type ();
5616 : /* If necessary, wrap the result in a cast. */
5617 402 : if (result.m_tree && orig_type)
5618 332 : result.m_tree = build1 (NOP_EXPR, orig_type, result.m_tree);
5619 402 : return result;
5620 : }
5621 :
5622 12561 : auto_vec<path_var> pvs;
5623 12561 : m_store.get_representative_path_vars (this, visited, sval, logger, &pvs);
5624 :
5625 12561 : if (tree cst = sval->maybe_get_constant ())
5626 1793 : pvs.safe_push (path_var (cst, 0));
5627 :
5628 : /* Handle string literals and various other pointers. */
5629 12561 : if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
5630 : {
5631 4343 : const region *reg = ptr_sval->get_pointee ();
5632 4343 : if (path_var pv = get_representative_path_var (reg, visited, logger))
5633 30 : return path_var (build1 (ADDR_EXPR,
5634 : sval->get_type (),
5635 : pv.m_tree),
5636 30 : pv.m_stack_depth);
5637 : }
5638 :
5639 : /* If we have a sub_svalue, look for ways to represent the parent. */
5640 12531 : if (const sub_svalue *sub_sval = sval->dyn_cast_sub_svalue ())
5641 : {
5642 370 : const svalue *parent_sval = sub_sval->get_parent ();
5643 370 : const region *subreg = sub_sval->get_subregion ();
5644 740 : if (path_var parent_pv
5645 370 : = get_representative_path_var (parent_sval, visited, logger))
5646 153 : if (const field_region *field_reg = subreg->dyn_cast_field_region ())
5647 111 : return path_var (build3 (COMPONENT_REF,
5648 : sval->get_type (),
5649 : parent_pv.m_tree,
5650 : field_reg->get_field (),
5651 : NULL_TREE),
5652 111 : parent_pv.m_stack_depth);
5653 : }
5654 :
5655 : /* Handle binops. */
5656 12420 : if (const binop_svalue *binop_sval = sval->dyn_cast_binop_svalue ())
5657 534 : if (path_var lhs_pv
5658 534 : = get_representative_path_var (binop_sval->get_arg0 (), visited,
5659 534 : logger))
5660 468 : if (path_var rhs_pv
5661 468 : = get_representative_path_var (binop_sval->get_arg1 (), visited,
5662 468 : logger))
5663 439 : return path_var (build2 (binop_sval->get_op (),
5664 : sval->get_type (),
5665 : lhs_pv.m_tree, rhs_pv.m_tree),
5666 439 : lhs_pv.m_stack_depth);
5667 :
5668 11981 : if (pvs.length () < 1)
5669 2187 : return path_var (NULL_TREE, 0);
5670 :
5671 9794 : pvs.qsort (readability_comparator);
5672 9794 : return pvs[0];
5673 12561 : }
5674 :
5675 : /* Attempt to return a path_var that represents SVAL, or return NULL_TREE.
5676 : Use VISITED to prevent infinite mutual recursion with the overload for
5677 : regions
5678 :
5679 : This function defers to get_representative_path_var_1 to do the work;
5680 : it adds verification that get_representative_path_var_1 returned a tree
5681 : of the correct type. */
5682 :
5683 : path_var
5684 18365 : region_model::get_representative_path_var (const svalue *sval,
5685 : svalue_set *visited,
5686 : logger *logger) const
5687 : {
5688 18365 : if (sval == nullptr)
5689 5388 : return path_var (NULL_TREE, 0);
5690 :
5691 12977 : LOG_SCOPE (logger);
5692 12977 : if (logger)
5693 : {
5694 0 : logger->start_log_line ();
5695 0 : logger->log_partial ("sval: ");
5696 0 : sval->dump_to_pp (logger->get_printer (), true);
5697 0 : logger->end_log_line ();
5698 : }
5699 :
5700 12977 : tree orig_type = sval->get_type ();
5701 :
5702 12977 : path_var result = get_representative_path_var_1 (sval, visited, logger);
5703 :
5704 : /* Verify that the result has the same type as SVAL, if any. */
5705 12977 : if (result.m_tree && orig_type)
5706 10632 : gcc_assert (TREE_TYPE (result.m_tree) == orig_type);
5707 :
5708 12977 : if (logger)
5709 : {
5710 0 : logger->start_log_line ();
5711 0 : logger->log_partial ("sval: ");
5712 0 : sval->dump_to_pp (logger->get_printer (), true);
5713 0 : logger->end_log_line ();
5714 :
5715 0 : if (result.m_tree)
5716 0 : logger->log ("tree: %qE", result.m_tree);
5717 : else
5718 0 : logger->log ("tree: NULL");
5719 : }
5720 :
5721 12977 : return result;
5722 12977 : }
5723 :
5724 : /* Attempt to return a tree that represents SVAL, or return NULL_TREE.
5725 :
5726 : Strip off any top-level cast, to avoid messages like
5727 : double-free of '(void *)ptr'
5728 : from analyzer diagnostics. */
5729 :
5730 : tree
5731 14527 : region_model::get_representative_tree (const svalue *sval, logger *logger) const
5732 : {
5733 14527 : svalue_set visited;
5734 14527 : tree expr = get_representative_path_var (sval, &visited, logger).m_tree;
5735 :
5736 : /* Strip off any top-level cast. */
5737 14527 : if (expr && TREE_CODE (expr) == NOP_EXPR)
5738 459 : expr = TREE_OPERAND (expr, 0);
5739 :
5740 14527 : return fixup_tree_for_diagnostic (expr);
5741 14527 : }
5742 :
5743 : tree
5744 838 : region_model::get_representative_tree (const region *reg, logger *logger) const
5745 : {
5746 838 : svalue_set visited;
5747 838 : tree expr = get_representative_path_var (reg, &visited, logger).m_tree;
5748 :
5749 : /* Strip off any top-level cast. */
5750 838 : if (expr && TREE_CODE (expr) == NOP_EXPR)
5751 1 : expr = TREE_OPERAND (expr, 0);
5752 :
5753 838 : return fixup_tree_for_diagnostic (expr);
5754 838 : }
5755 :
5756 : /* Implementation of region_model::get_representative_path_var.
5757 :
5758 : Attempt to return a path_var that represents REG, or return
5759 : the NULL path_var.
5760 : For example, a region for a field of a local would be a path_var
5761 : wrapping a COMPONENT_REF.
5762 : Use VISITED to prevent infinite mutual recursion with the overload for
5763 : svalues. */
5764 :
5765 : path_var
5766 15544 : region_model::get_representative_path_var_1 (const region *reg,
5767 : svalue_set *visited,
5768 : logger *logger) const
5769 : {
5770 15544 : switch (reg->get_kind ())
5771 : {
5772 0 : default:
5773 0 : gcc_unreachable ();
5774 :
5775 0 : case RK_FRAME:
5776 0 : case RK_GLOBALS:
5777 0 : case RK_CODE:
5778 0 : case RK_HEAP:
5779 0 : case RK_STACK:
5780 0 : case RK_THREAD_LOCAL:
5781 0 : case RK_ROOT:
5782 : /* Regions that represent memory spaces are not expressible as trees. */
5783 0 : return path_var (NULL_TREE, 0);
5784 :
5785 1 : case RK_FUNCTION:
5786 1 : {
5787 1 : const function_region *function_reg
5788 1 : = as_a <const function_region *> (reg);
5789 1 : return path_var (function_reg->get_fndecl (), 0);
5790 : }
5791 1 : case RK_LABEL:
5792 1 : {
5793 1 : const label_region *label_reg = as_a <const label_region *> (reg);
5794 1 : return path_var (label_reg->get_label (), 0);
5795 : }
5796 :
5797 225 : case RK_SYMBOLIC:
5798 225 : {
5799 225 : const symbolic_region *symbolic_reg
5800 225 : = as_a <const symbolic_region *> (reg);
5801 225 : const svalue *pointer = symbolic_reg->get_pointer ();
5802 225 : path_var pointer_pv = get_representative_path_var (pointer, visited,
5803 : logger);
5804 225 : if (!pointer_pv)
5805 16 : return path_var (NULL_TREE, 0);
5806 209 : tree offset = build_int_cst (pointer->get_type (), 0);
5807 209 : return path_var (build2 (MEM_REF,
5808 : reg->get_type (),
5809 : pointer_pv.m_tree,
5810 : offset),
5811 209 : pointer_pv.m_stack_depth);
5812 : }
5813 9769 : case RK_DECL:
5814 9769 : {
5815 9769 : const decl_region *decl_reg = as_a <const decl_region *> (reg);
5816 9769 : return path_var (decl_reg->get_decl (), decl_reg->get_stack_depth ());
5817 : }
5818 791 : case RK_FIELD:
5819 791 : {
5820 791 : const field_region *field_reg = as_a <const field_region *> (reg);
5821 791 : path_var parent_pv
5822 791 : = get_representative_path_var (reg->get_parent_region (), visited,
5823 : logger);
5824 791 : if (!parent_pv)
5825 37 : return path_var (NULL_TREE, 0);
5826 754 : return path_var (build3 (COMPONENT_REF,
5827 : reg->get_type (),
5828 : parent_pv.m_tree,
5829 : field_reg->get_field (),
5830 : NULL_TREE),
5831 754 : parent_pv.m_stack_depth);
5832 : }
5833 :
5834 150 : case RK_ELEMENT:
5835 150 : {
5836 150 : const element_region *element_reg
5837 150 : = as_a <const element_region *> (reg);
5838 150 : path_var parent_pv
5839 150 : = get_representative_path_var (reg->get_parent_region (), visited,
5840 : logger);
5841 150 : if (!parent_pv)
5842 0 : return path_var (NULL_TREE, 0);
5843 150 : path_var index_pv
5844 150 : = get_representative_path_var (element_reg->get_index (), visited,
5845 : logger);
5846 150 : if (!index_pv)
5847 0 : return path_var (NULL_TREE, 0);
5848 150 : return path_var (build4 (ARRAY_REF,
5849 : reg->get_type (),
5850 : parent_pv.m_tree, index_pv.m_tree,
5851 : NULL_TREE, NULL_TREE),
5852 150 : parent_pv.m_stack_depth);
5853 : }
5854 :
5855 42 : case RK_OFFSET:
5856 42 : {
5857 42 : const offset_region *offset_reg
5858 42 : = as_a <const offset_region *> (reg);
5859 42 : path_var parent_pv
5860 42 : = get_representative_path_var (reg->get_parent_region (), visited,
5861 : logger);
5862 42 : if (!parent_pv)
5863 0 : return path_var (NULL_TREE, 0);
5864 42 : path_var offset_pv
5865 42 : = get_representative_path_var (offset_reg->get_byte_offset (),
5866 : visited, logger);
5867 42 : if (!offset_pv || TREE_CODE (offset_pv.m_tree) != INTEGER_CST)
5868 42 : return path_var (NULL_TREE, 0);
5869 0 : tree addr_parent = build1 (ADDR_EXPR,
5870 : build_pointer_type (reg->get_type ()),
5871 : parent_pv.m_tree);
5872 0 : tree ptype = build_pointer_type_for_mode (char_type_node, ptr_mode,
5873 : true);
5874 0 : return path_var (build2 (MEM_REF, reg->get_type (), addr_parent,
5875 : fold_convert (ptype, offset_pv.m_tree)),
5876 0 : parent_pv.m_stack_depth);
5877 : }
5878 :
5879 46 : case RK_SIZED:
5880 46 : return path_var (NULL_TREE, 0);
5881 :
5882 17 : case RK_CAST:
5883 17 : {
5884 17 : path_var parent_pv
5885 17 : = get_representative_path_var (reg->get_parent_region (), visited,
5886 : logger);
5887 17 : if (!parent_pv)
5888 1 : return path_var (NULL_TREE, 0);
5889 16 : return path_var (build1 (NOP_EXPR,
5890 : reg->get_type (),
5891 : parent_pv.m_tree),
5892 16 : parent_pv.m_stack_depth);
5893 : }
5894 :
5895 4488 : case RK_HEAP_ALLOCATED:
5896 4488 : case RK_ALLOCA:
5897 : /* No good way to express heap-allocated/alloca regions as trees. */
5898 4488 : return path_var (NULL_TREE, 0);
5899 :
5900 10 : case RK_STRING:
5901 10 : {
5902 10 : const string_region *string_reg = as_a <const string_region *> (reg);
5903 10 : return path_var (string_reg->get_string_cst (), 0);
5904 : }
5905 :
5906 4 : case RK_VAR_ARG:
5907 4 : case RK_ERRNO:
5908 4 : case RK_UNKNOWN:
5909 4 : case RK_PRIVATE:
5910 4 : return path_var (NULL_TREE, 0);
5911 : }
5912 : }
5913 :
5914 : /* Attempt to return a path_var that represents REG, or return
5915 : the NULL path_var.
5916 : For example, a region for a field of a local would be a path_var
5917 : wrapping a COMPONENT_REF.
5918 : Use VISITED to prevent infinite mutual recursion with the overload for
5919 : svalues.
5920 :
5921 : This function defers to get_representative_path_var_1 to do the work;
5922 : it adds verification that get_representative_path_var_1 returned a tree
5923 : of the correct type. */
5924 :
5925 : path_var
5926 15544 : region_model::get_representative_path_var (const region *reg,
5927 : svalue_set *visited,
5928 : logger *logger) const
5929 : {
5930 15544 : LOG_SCOPE (logger);
5931 15544 : if (logger)
5932 : {
5933 0 : logger->start_log_line ();
5934 0 : logger->log_partial ("reg: ");
5935 0 : reg->dump_to_pp (logger->get_printer (), true);
5936 0 : logger->end_log_line ();
5937 : }
5938 :
5939 15544 : path_var result = get_representative_path_var_1 (reg, visited, logger);
5940 :
5941 : /* Verify that the result has the same type as REG, if any. */
5942 15544 : if (result.m_tree && reg->get_type ())
5943 10909 : gcc_assert (TREE_TYPE (result.m_tree) == reg->get_type ());
5944 :
5945 15544 : if (logger)
5946 : {
5947 0 : logger->start_log_line ();
5948 0 : logger->log_partial ("reg: ");
5949 0 : reg->dump_to_pp (logger->get_printer (), true);
5950 0 : logger->end_log_line ();
5951 :
5952 0 : if (result.m_tree)
5953 0 : logger->log ("tree: %qE", result.m_tree);
5954 : else
5955 0 : logger->log ("tree: NULL");
5956 : }
5957 :
5958 31088 : return result;
5959 15544 : }
5960 :
5961 : /* Push a new frame_region on to the stack region.
5962 : Populate the frame_region with child regions for the function call's
5963 : parameters, using values from the arguments at the callsite in the
5964 : caller's frame. */
5965 :
5966 : void
5967 12747 : region_model::update_for_gcall (const gcall &call_stmt,
5968 : region_model_context *ctxt,
5969 : function *callee)
5970 : {
5971 : /* Build a vec of argument svalues, using the current top
5972 : frame for resolving tree expressions. */
5973 12747 : auto_vec<const svalue *> arg_svals (gimple_call_num_args (&call_stmt));
5974 :
5975 26943 : for (unsigned i = 0; i < gimple_call_num_args (&call_stmt); i++)
5976 : {
5977 14196 : tree arg = gimple_call_arg (&call_stmt, i);
5978 14196 : arg_svals.quick_push (get_rvalue (arg, ctxt));
5979 : }
5980 :
5981 12747 : if(!callee)
5982 : {
5983 : /* Get the function * from the gcall. */
5984 0 : tree fn_decl = get_fndecl_for_call (call_stmt, ctxt);
5985 0 : callee = DECL_STRUCT_FUNCTION (fn_decl);
5986 : }
5987 :
5988 0 : gcc_assert (callee);
5989 12747 : push_frame (*callee, &call_stmt, &arg_svals, ctxt);
5990 12747 : }
5991 :
5992 : /* Pop the top-most frame_region from the stack, and copy the return
5993 : region's values (if any) into the region for the lvalue of the LHS of
5994 : the call (if any). */
5995 :
5996 : void
5997 9042 : region_model::update_for_return_gcall (const gcall &call_stmt,
5998 : region_model_context *ctxt)
5999 : {
6000 : /* Get the lvalue for the result of the call, passing it to pop_frame,
6001 : so that pop_frame can determine the region with respect to the
6002 : *caller* frame. */
6003 9042 : tree lhs = gimple_call_lhs (&call_stmt);
6004 9042 : pop_frame (lhs, nullptr, ctxt, &call_stmt);
6005 9042 : }
6006 :
6007 : /* Attempt to use R to replay SUMMARY into this object.
6008 : Return true if it is possible. */
6009 :
6010 : bool
6011 1658 : region_model::replay_call_summary (call_summary_replay &r,
6012 : const region_model &summary)
6013 : {
6014 1658 : gcc_assert (summary.get_stack_depth () == 1);
6015 :
6016 1658 : m_store.replay_call_summary (r, summary.m_store);
6017 :
6018 1658 : if (r.get_ctxt ())
6019 1536 : r.get_ctxt ()->maybe_did_work ();
6020 :
6021 1658 : if (!m_constraints->replay_call_summary (r, *summary.m_constraints))
6022 : return false;
6023 :
6024 4495 : for (auto kv : summary.m_dynamic_extents)
6025 : {
6026 1479 : const region *summary_reg = kv.first;
6027 1479 : const region *caller_reg = r.convert_region_from_summary (summary_reg);
6028 1479 : if (!caller_reg)
6029 2 : continue;
6030 1477 : const svalue *summary_sval = kv.second;
6031 1477 : const svalue *caller_sval = r.convert_svalue_from_summary (summary_sval);
6032 1477 : if (!caller_sval)
6033 0 : continue;
6034 1477 : m_dynamic_extents.put (caller_reg, caller_sval);
6035 : }
6036 :
6037 1537 : return true;
6038 : }
6039 :
6040 : /* For use with push_frame when handling a top-level call within the analysis.
6041 : PARAM has a defined but unknown initial value.
6042 : Anything it points to has escaped, since the calling context "knows"
6043 : the pointer, and thus calls to unknown functions could read/write into
6044 : the region.
6045 : If NONNULL is true, then assume that PARAM must be non-NULL. */
6046 :
6047 : void
6048 22094 : region_model::on_top_level_param (tree param,
6049 : bool nonnull,
6050 : region_model_context *ctxt)
6051 : {
6052 22094 : if (POINTER_TYPE_P (TREE_TYPE (param)))
6053 : {
6054 10406 : const region *param_reg = get_lvalue (param, ctxt);
6055 10406 : const svalue *init_ptr_sval
6056 10406 : = m_mgr->get_or_create_initial_value (param_reg);
6057 10406 : const region *pointee_reg = m_mgr->get_symbolic_region (init_ptr_sval);
6058 10406 : store_manager *store_mgr = m_mgr->get_store_manager ();
6059 10406 : m_store.mark_as_escaped (*store_mgr, pointee_reg);
6060 10406 : if (nonnull)
6061 : {
6062 479 : const svalue *null_ptr_sval
6063 479 : = m_mgr->get_or_create_null_ptr (TREE_TYPE (param));
6064 479 : add_constraint (init_ptr_sval, NE_EXPR, null_ptr_sval, ctxt);
6065 : }
6066 : }
6067 22094 : }
6068 :
6069 : /* Update this region_model to reflect pushing a frame onto the stack
6070 : for a call to FUN.
6071 :
6072 : If CALL_STMT is non-NULL, this is for the interprocedural case where
6073 : we already have an execution path into the caller. It can be NULL for
6074 : top-level entrypoints into the analysis, or in selftests.
6075 :
6076 : If ARG_SVALS is non-NULL, use it to populate the parameters
6077 : in the new frame.
6078 : Otherwise, the params have their initial_svalues.
6079 :
6080 : Return the frame_region for the new frame. */
6081 :
6082 : const region *
6083 37120 : region_model::push_frame (const function &fun,
6084 : const gcall *call_stmt,
6085 : const vec<const svalue *> *arg_svals,
6086 : region_model_context *ctxt)
6087 : {
6088 37120 : tree fndecl = fun.decl;
6089 37120 : if (arg_svals)
6090 : {
6091 : /* If the result of the callee is DECL_BY_REFERENCE, then
6092 : we'll need to store a reference to the caller's lhs of
6093 : CALL_STMT within callee's result.
6094 : If so, determine the region of CALL_STMT's lhs within
6095 : the caller's frame before updating m_current_frame. */
6096 12747 : const region *caller_return_by_reference_reg = nullptr;
6097 12747 : if (tree result = DECL_RESULT (fndecl))
6098 12747 : if (DECL_BY_REFERENCE (result))
6099 : {
6100 39 : gcc_assert (call_stmt);
6101 39 : tree lhs = gimple_call_lhs (call_stmt);
6102 39 : gcc_assert (lhs);
6103 39 : caller_return_by_reference_reg = get_lvalue (lhs, ctxt);
6104 : }
6105 :
6106 : /* Update m_current_frame. */
6107 12747 : m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
6108 :
6109 : /* Arguments supplied from a caller frame. */
6110 12747 : unsigned idx = 0;
6111 26467 : for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
6112 13720 : iter_parm = DECL_CHAIN (iter_parm), ++idx)
6113 : {
6114 : /* If there's a mismatching declaration, the call stmt might
6115 : not have enough args. Handle this case by leaving the
6116 : rest of the params as uninitialized. */
6117 13724 : if (idx >= arg_svals->length ())
6118 : break;
6119 13720 : tree parm_lval = iter_parm;
6120 13720 : if (tree parm_default_ssa = get_ssa_default_def (fun, iter_parm))
6121 12554 : parm_lval = parm_default_ssa;
6122 13720 : const region *parm_reg = get_lvalue (parm_lval, ctxt);
6123 13720 : const svalue *arg_sval = (*arg_svals)[idx];
6124 13720 : set_value (parm_reg, arg_sval, ctxt);
6125 : }
6126 :
6127 : /* Handle any variadic args. */
6128 : unsigned va_arg_idx = 0;
6129 13223 : for (; idx < arg_svals->length (); idx++, va_arg_idx++)
6130 : {
6131 476 : const svalue *arg_sval = (*arg_svals)[idx];
6132 476 : const region *var_arg_reg
6133 476 : = m_mgr->get_var_arg_region (m_current_frame,
6134 : va_arg_idx);
6135 476 : set_value (var_arg_reg, arg_sval, ctxt);
6136 : }
6137 :
6138 : /* If the result of the callee is DECL_BY_REFERENCE, then above
6139 : we should have determined the region within the
6140 : caller's frame that the callee will be writing back to.
6141 : Use this now to initialize the reference in callee's frame. */
6142 12747 : if (tree result = DECL_RESULT (fndecl))
6143 12747 : if (DECL_BY_REFERENCE (result))
6144 : {
6145 : /* Get reference to the caller lhs. */
6146 39 : gcc_assert (caller_return_by_reference_reg);
6147 39 : const svalue *ref_sval
6148 39 : = m_mgr->get_ptr_svalue (TREE_TYPE (result),
6149 : caller_return_by_reference_reg);
6150 :
6151 : /* Get region for default val of DECL_RESULT within the
6152 : callee. */
6153 39 : if (tree result_default_ssa = get_ssa_default_def (fun, result))
6154 : {
6155 36 : const region *callee_result_reg
6156 36 : = get_lvalue (result_default_ssa, ctxt);
6157 :
6158 : /* Set the callee's reference to refer to the caller's lhs. */
6159 36 : set_value (callee_result_reg, ref_sval, ctxt);
6160 : }
6161 : }
6162 : }
6163 : else
6164 : {
6165 : /* Otherwise we have a top-level call within the analysis. The params
6166 : have defined but unknown initial values.
6167 : Anything they point to has escaped. */
6168 :
6169 : /* Update m_current_frame. */
6170 24373 : m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
6171 :
6172 : /* Handle "__attribute__((nonnull))". */
6173 24373 : tree fntype = TREE_TYPE (fndecl);
6174 24373 : bitmap nonnull_args = get_nonnull_args (fntype);
6175 :
6176 24373 : unsigned parm_idx = 0;
6177 46467 : for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
6178 22094 : iter_parm = DECL_CHAIN (iter_parm))
6179 : {
6180 22094 : bool non_null = (nonnull_args
6181 22094 : ? (bitmap_empty_p (nonnull_args)
6182 550 : || bitmap_bit_p (nonnull_args, parm_idx))
6183 22094 : : false);
6184 22094 : if (tree parm_default_ssa = get_ssa_default_def (fun, iter_parm))
6185 18820 : on_top_level_param (parm_default_ssa, non_null, ctxt);
6186 : else
6187 3274 : on_top_level_param (iter_parm, non_null, ctxt);
6188 22094 : parm_idx++;
6189 : }
6190 :
6191 24373 : BITMAP_FREE (nonnull_args);
6192 : }
6193 :
6194 37120 : return m_current_frame;
6195 : }
6196 :
6197 : /* Get the function of the top-most frame in this region_model's stack.
6198 : There must be such a frame. */
6199 :
6200 : const function *
6201 966 : region_model::get_current_function () const
6202 : {
6203 966 : const frame_region *frame = get_current_frame ();
6204 966 : gcc_assert (frame);
6205 966 : return &frame->get_function ();
6206 : }
6207 :
6208 : /* Custom region_model_context for the assignment to the result
6209 : at a call statement when popping a frame (PR analyzer/106203). */
6210 :
6211 : class caller_context : public region_model_context_decorator
6212 : {
6213 : public:
6214 4534 : caller_context (region_model_context *inner,
6215 : const gcall *call_stmt,
6216 : const frame_region &caller_frame)
6217 4534 : : region_model_context_decorator (inner),
6218 4534 : m_call_stmt (call_stmt),
6219 4534 : m_caller_frame (caller_frame)
6220 : {}
6221 :
6222 : pending_location
6223 9 : get_pending_location_for_diag () const override
6224 : {
6225 9 : pending_location ploc
6226 9 : = region_model_context_decorator::get_pending_location_for_diag ();
6227 :
6228 9 : ploc.m_event_loc_info
6229 9 : = event_loc_info (m_call_stmt->location,
6230 9 : m_caller_frame.get_fndecl (),
6231 9 : m_caller_frame.get_stack_depth ());
6232 :
6233 9 : return ploc;
6234 : }
6235 :
6236 9077 : const gimple *get_stmt () const override
6237 : {
6238 9077 : return m_call_stmt;
6239 : };
6240 :
6241 : private:
6242 : const gcall *m_call_stmt;
6243 : const frame_region &m_caller_frame;
6244 : };
6245 :
6246 :
6247 : /* Pop the topmost frame_region from this region_model's stack;
6248 :
6249 : If RESULT_LVALUE is non-null, copy any return value from the frame
6250 : into the corresponding region (evaluated with respect to the *caller*
6251 : frame, rather than the called frame).
6252 : If OUT_RESULT is non-null, copy any return value from the frame
6253 : into *OUT_RESULT.
6254 :
6255 : If non-null, use CALL_STMT as the location when complaining about
6256 : assignment of the return value to RESULT_LVALUE.
6257 :
6258 : If EVAL_RETURN_SVALUE is false, then don't evaluate the return value.
6259 : This is for use when unwinding frames e.g. due to longjmp, to suppress
6260 : erroneously reporting uninitialized return values.
6261 :
6262 : Purge the frame region and all its descendent regions.
6263 : Convert any pointers that point into such regions into
6264 : poison_kind::popped_stack svalues. */
6265 :
6266 : void
6267 27007 : region_model::pop_frame (tree result_lvalue,
6268 : const svalue **out_result,
6269 : region_model_context *ctxt,
6270 : const gcall *call_stmt,
6271 : bool eval_return_svalue)
6272 : {
6273 27007 : gcc_assert (m_current_frame);
6274 :
6275 27007 : const region_model pre_popped_model = *this;
6276 27007 : const frame_region *frame_reg = m_current_frame;
6277 :
6278 : /* Notify state machines. */
6279 27007 : if (ctxt)
6280 24027 : ctxt->on_pop_frame (frame_reg);
6281 :
6282 : /* Evaluate the result, within the callee frame. */
6283 27007 : tree fndecl = m_current_frame->get_function ().decl;
6284 27007 : tree result = DECL_RESULT (fndecl);
6285 27007 : const svalue *retval = nullptr;
6286 27007 : if (result
6287 26999 : && TREE_TYPE (result) != void_type_node
6288 39469 : && eval_return_svalue)
6289 : {
6290 10211 : retval = get_rvalue (result, ctxt);
6291 10211 : if (out_result)
6292 5270 : *out_result = retval;
6293 : }
6294 :
6295 : /* Pop the frame. */
6296 27007 : m_current_frame = m_current_frame->get_calling_frame ();
6297 :
6298 27007 : if (result_lvalue
6299 27007 : && retval
6300 : /* Don't write back for DECL_BY_REFERENCE; the writes
6301 : should have happened within the callee already. */
6302 27007 : && !DECL_BY_REFERENCE (result))
6303 : {
6304 4534 : gcc_assert (eval_return_svalue);
6305 :
6306 : /* Compute result_dst_reg using RESULT_LVALUE *after* popping
6307 : the frame, but before poisoning pointers into the old frame. */
6308 4534 : const region *result_dst_reg = get_lvalue (result_lvalue, ctxt);
6309 :
6310 : /* Assign retval to result_dst_reg, using caller_context
6311 : to set the call_stmt and the popped_frame for any diagnostics
6312 : due to the assignment. */
6313 4534 : gcc_assert (m_current_frame);
6314 4534 : caller_context caller_ctxt (ctxt, call_stmt, *m_current_frame);
6315 4534 : set_value (result_dst_reg, retval, call_stmt ? &caller_ctxt : ctxt);
6316 : }
6317 :
6318 27007 : unbind_region_and_descendents (frame_reg,poison_kind::popped_stack);
6319 :
6320 27007 : if (auto chan = g->get_channels ().analyzer_events_channel.get_if_active ())
6321 : {
6322 237 : gcc::topics::analyzer_events::on_frame_popped msg
6323 237 : {this, &pre_popped_model, retval, ctxt};
6324 237 : chan->publish (msg);
6325 : }
6326 27007 : }
6327 :
6328 : /* Get the number of frames in this region_model's stack. */
6329 :
6330 : int
6331 5548871 : region_model::get_stack_depth () const
6332 : {
6333 5548871 : const frame_region *frame = get_current_frame ();
6334 5548871 : if (frame)
6335 5531929 : return frame->get_stack_depth ();
6336 : else
6337 : return 0;
6338 : }
6339 :
6340 : /* Get the frame_region with the given index within the stack.
6341 : The frame_region must exist. */
6342 :
6343 : const frame_region *
6344 1675061 : region_model::get_frame_at_index (int index) const
6345 : {
6346 1675061 : const frame_region *frame = get_current_frame ();
6347 1675061 : gcc_assert (frame);
6348 1675061 : gcc_assert (index >= 0);
6349 1675061 : gcc_assert (index <= frame->get_index ());
6350 1901402 : while (index != frame->get_index ())
6351 : {
6352 226341 : frame = frame->get_calling_frame ();
6353 226341 : gcc_assert (frame);
6354 : }
6355 1675061 : return frame;
6356 : }
6357 :
6358 : /* Unbind svalues for any regions in REG and below.
6359 : Find any pointers to such regions; convert them to
6360 : poisoned values of kind PKIND.
6361 : Also purge any dynamic extents. */
6362 :
6363 : void
6364 37891 : region_model::unbind_region_and_descendents (const region *reg,
6365 : enum poison_kind pkind)
6366 : {
6367 : /* Gather a set of base regions to be unbound. */
6368 37891 : hash_set<const region *> base_regs;
6369 217544 : for (store::cluster_map_t::iterator iter = m_store.begin ();
6370 397197 : iter != m_store.end (); ++iter)
6371 : {
6372 179653 : const region *iter_base_reg = (*iter).first;
6373 179653 : if (iter_base_reg->descendent_of_p (reg))
6374 38291 : base_regs.add (iter_base_reg);
6375 : }
6376 76182 : for (hash_set<const region *>::iterator iter = base_regs.begin ();
6377 114473 : iter != base_regs.end (); ++iter)
6378 38291 : m_store.purge_cluster (*iter);
6379 :
6380 : /* Find any pointers to REG or its descendents; convert to poisoned. */
6381 37891 : poison_any_pointers_to_descendents (reg, pkind);
6382 :
6383 : /* Purge dynamic extents of any base regions in REG and below
6384 : (e.g. VLAs and alloca stack regions). */
6385 113850 : for (auto iter : m_dynamic_extents)
6386 : {
6387 19034 : const region *iter_reg = iter.first;
6388 19034 : if (iter_reg->descendent_of_p (reg))
6389 6243 : unset_dynamic_extents (iter_reg);
6390 : }
6391 37891 : }
6392 :
6393 : /* Find any pointers to REG or its descendents; convert them to
6394 : poisoned values of kind PKIND. */
6395 :
6396 : void
6397 37891 : region_model::poison_any_pointers_to_descendents (const region *reg,
6398 : enum poison_kind pkind)
6399 : {
6400 320615 : for (const auto &cluster_iter : m_store)
6401 : {
6402 141362 : binding_cluster *cluster = cluster_iter.second;
6403 141362 : for (auto iter = cluster->begin ();
6404 284246 : iter != cluster->end ();
6405 142884 : ++iter)
6406 : {
6407 142884 : auto bp = *iter;
6408 142884 : const svalue *sval = bp.m_sval;
6409 142884 : if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
6410 : {
6411 32914 : const region *ptr_dst = ptr_sval->get_pointee ();
6412 : /* Poison ptrs to descendents of REG, but not to REG itself,
6413 : otherwise double-free detection doesn't work (since sm-state
6414 : for "free" is stored on the original ptr svalue). */
6415 32914 : if (ptr_dst->descendent_of_p (reg)
6416 32914 : && ptr_dst != reg)
6417 : {
6418 286 : const svalue *new_sval
6419 286 : = m_mgr->get_or_create_poisoned_svalue (pkind,
6420 : sval->get_type ());
6421 286 : cluster->get_map ().overwrite (iter, new_sval);
6422 : }
6423 : }
6424 : }
6425 : }
6426 37891 : }
6427 :
6428 : /* Attempt to merge THIS with OTHER_MODEL, writing the result
6429 : to OUT_MODEL. Use POINT to distinguish values created as a
6430 : result of merging. */
6431 :
6432 : bool
6433 148384 : region_model::can_merge_with_p (const region_model &other_model,
6434 : const program_point &point,
6435 : region_model *out_model,
6436 : const extrinsic_state *ext_state,
6437 : const program_state *state_a,
6438 : const program_state *state_b) const
6439 : {
6440 148384 : gcc_assert (out_model);
6441 148384 : gcc_assert (m_mgr == other_model.m_mgr);
6442 148384 : gcc_assert (m_mgr == out_model->m_mgr);
6443 :
6444 148384 : if (m_current_frame != other_model.m_current_frame)
6445 : return false;
6446 148384 : out_model->m_current_frame = m_current_frame;
6447 :
6448 148384 : model_merger m (this, &other_model, point, out_model,
6449 148384 : ext_state, state_a, state_b);
6450 :
6451 148384 : if (!store::can_merge_p (&m_store, &other_model.m_store,
6452 148384 : &out_model->m_store, m_mgr->get_store_manager (),
6453 : &m))
6454 : return false;
6455 :
6456 42267 : if (!m_dynamic_extents.can_merge_with_p (other_model.m_dynamic_extents,
6457 : &out_model->m_dynamic_extents))
6458 : return false;
6459 :
6460 : /* Merge constraints. */
6461 40343 : constraint_manager::merge (*m_constraints,
6462 40343 : *other_model.m_constraints,
6463 : out_model->m_constraints);
6464 :
6465 41071 : for (auto iter : m.m_svals_changing_meaning)
6466 728 : out_model->m_constraints->purge_state_involving (iter);
6467 :
6468 40343 : if (m_thrown_exceptions_stack != other_model.m_thrown_exceptions_stack)
6469 : return false;
6470 40314 : out_model->m_thrown_exceptions_stack = m_thrown_exceptions_stack;
6471 :
6472 40314 : if (m_caught_exceptions_stack != other_model.m_caught_exceptions_stack)
6473 : return false;
6474 40314 : out_model->m_caught_exceptions_stack = m_caught_exceptions_stack;
6475 :
6476 40314 : return true;
6477 148384 : }
6478 :
6479 : /* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
6480 : otherwise. */
6481 :
6482 : tree
6483 946660 : region_model::get_fndecl_for_call (const gcall &call,
6484 : region_model_context *ctxt)
6485 : {
6486 946660 : tree fn_ptr = gimple_call_fn (&call);
6487 946660 : if (fn_ptr == NULL_TREE)
6488 : return NULL_TREE;
6489 903788 : const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
6490 1807576 : if (const region_svalue *fn_ptr_ptr
6491 903788 : = fn_ptr_sval->dyn_cast_region_svalue ())
6492 : {
6493 897540 : const region *reg = fn_ptr_ptr->get_pointee ();
6494 897540 : if (const function_region *fn_reg = reg->dyn_cast_function_region ())
6495 : {
6496 897484 : tree fn_decl = fn_reg->get_fndecl ();
6497 897484 : cgraph_node *node = cgraph_node::get (fn_decl);
6498 897484 : if (!node)
6499 : return NULL_TREE;
6500 897484 : const cgraph_node *ultimate_node = node->ultimate_alias_target ();
6501 897484 : if (ultimate_node)
6502 897484 : return ultimate_node->decl;
6503 : }
6504 : }
6505 :
6506 : return NULL_TREE;
6507 : }
6508 :
6509 : /* Would be much simpler to use a lambda here, if it were supported. */
6510 :
6511 : struct append_regions_cb_data
6512 : {
6513 : const region_model *model;
6514 : auto_vec<const decl_region *> *out;
6515 : };
6516 :
6517 : /* Populate *OUT with all decl_regions in the current
6518 : frame that have clusters within the store. */
6519 :
6520 : void
6521 395906 : region_model::
6522 : get_regions_for_current_frame (auto_vec<const decl_region *> *out) const
6523 : {
6524 395906 : append_regions_cb_data data;
6525 395906 : data.model = this;
6526 395906 : data.out = out;
6527 395906 : m_store.for_each_cluster (append_regions_cb, &data);
6528 395906 : }
6529 :
6530 : /* Implementation detail of get_regions_for_current_frame. */
6531 :
6532 : void
6533 3101278 : region_model::append_regions_cb (const region *base_reg,
6534 : append_regions_cb_data *cb_data)
6535 : {
6536 3101278 : if (base_reg->get_parent_region () != cb_data->model->m_current_frame)
6537 : return;
6538 1762544 : if (const decl_region *decl_reg = base_reg->dyn_cast_decl_region ())
6539 1744211 : cb_data->out->safe_push (decl_reg);
6540 : }
6541 :
6542 :
6543 : /* Abstract class for diagnostics related to the use of
6544 : floating-point arithmetic where precision is needed. */
6545 :
6546 25 : class imprecise_floating_point_arithmetic : public pending_diagnostic
6547 : {
6548 : public:
6549 50 : int get_controlling_option () const final override
6550 : {
6551 50 : return OPT_Wanalyzer_imprecise_fp_arithmetic;
6552 : }
6553 : };
6554 :
6555 : /* Concrete diagnostic to complain about uses of floating-point arithmetic
6556 : in the size argument of malloc etc. */
6557 :
6558 : class float_as_size_arg : public imprecise_floating_point_arithmetic
6559 : {
6560 : public:
6561 25 : float_as_size_arg (tree arg) : m_arg (arg)
6562 : {}
6563 :
6564 305 : const char *get_kind () const final override
6565 : {
6566 305 : return "float_as_size_arg_diagnostic";
6567 : }
6568 :
6569 25 : bool subclass_equal_p (const pending_diagnostic &other) const final override
6570 : {
6571 25 : return same_tree_p (m_arg, ((const float_as_size_arg &) other).m_arg);
6572 : }
6573 :
6574 25 : bool emit (diagnostic_emission_context &ctxt) final override
6575 : {
6576 25 : bool warned = ctxt.warn ("use of floating-point arithmetic here might"
6577 : " yield unexpected results");
6578 25 : if (warned)
6579 25 : inform (ctxt.get_location (),
6580 : "only use operands of an integer type"
6581 : " inside the size argument");
6582 25 : return warned;
6583 : }
6584 :
6585 : bool
6586 50 : describe_final_event (pretty_printer &pp,
6587 : const evdesc::final_event &) final override
6588 : {
6589 50 : if (m_arg)
6590 50 : pp_printf (&pp,
6591 : "operand %qE is of type %qT",
6592 50 : m_arg, TREE_TYPE (m_arg));
6593 : else
6594 0 : pp_printf (&pp,
6595 : "at least one operand of the size argument is"
6596 : " of a floating-point type");
6597 50 : return true;
6598 : }
6599 :
6600 : private:
6601 : tree m_arg;
6602 : };
6603 :
6604 : /* Visitor to find uses of floating-point variables/constants in an svalue. */
6605 :
6606 : class contains_floating_point_visitor : public visitor
6607 : {
6608 : public:
6609 7818 : contains_floating_point_visitor (const svalue *root_sval) : m_result (nullptr)
6610 : {
6611 7818 : root_sval->accept (this);
6612 : }
6613 :
6614 7818 : const svalue *get_svalue_to_report ()
6615 : {
6616 7818 : return m_result;
6617 : }
6618 :
6619 7600 : void visit_constant_svalue (const constant_svalue *sval) final override
6620 : {
6621 : /* At the point the analyzer runs, constant integer operands in a floating
6622 : point expression are already implictly converted to floating-points.
6623 : Thus, we do prefer to report non-constants such that the diagnostic
6624 : always reports a floating-point operand. */
6625 7600 : tree type = sval->get_type ();
6626 7600 : if (type && FLOAT_TYPE_P (type) && !m_result)
6627 9 : m_result = sval;
6628 7600 : }
6629 :
6630 496 : void visit_conjured_svalue (const conjured_svalue *sval) final override
6631 : {
6632 496 : tree type = sval->get_type ();
6633 496 : if (type && FLOAT_TYPE_P (type))
6634 0 : m_result = sval;
6635 496 : }
6636 :
6637 949 : void visit_initial_svalue (const initial_svalue *sval) final override
6638 : {
6639 949 : tree type = sval->get_type ();
6640 949 : if (type && FLOAT_TYPE_P (type))
6641 16 : m_result = sval;
6642 949 : }
6643 :
6644 : private:
6645 : /* Non-null if at least one floating-point operand was found. */
6646 : const svalue *m_result;
6647 : };
6648 :
6649 : /* May complain about uses of floating-point operands in SIZE_IN_BYTES. */
6650 :
6651 : void
6652 7818 : region_model::check_dynamic_size_for_floats (const svalue *size_in_bytes,
6653 : region_model_context *ctxt) const
6654 : {
6655 7818 : gcc_assert (ctxt);
6656 :
6657 7818 : contains_floating_point_visitor v (size_in_bytes);
6658 7818 : if (const svalue *float_sval = v.get_svalue_to_report ())
6659 : {
6660 25 : tree diag_arg = get_representative_tree (float_sval);
6661 25 : ctxt->warn (std::make_unique<float_as_size_arg> (diag_arg));
6662 : }
6663 7818 : }
6664 :
6665 : /* Return a region describing a heap-allocated block of memory.
6666 : Use CTXT to complain about tainted sizes.
6667 :
6668 : Reuse an existing heap_allocated_region if it's not being referenced by
6669 : this region_model; otherwise create a new one.
6670 :
6671 : Optionally (update_state_machine) transitions the pointer pointing to the
6672 : heap_allocated_region from start to assumed non-null. */
6673 :
6674 : const region *
6675 18862 : region_model::get_or_create_region_for_heap_alloc (const svalue *size_in_bytes,
6676 : region_model_context *ctxt,
6677 : bool update_state_machine,
6678 : const call_details *cd)
6679 : {
6680 : /* Determine which regions are referenced in this region_model, so that
6681 : we can reuse an existing heap_allocated_region if it's not in use on
6682 : this path. */
6683 18862 : auto_bitmap base_regs_in_use;
6684 18862 : get_referenced_base_regions (base_regs_in_use);
6685 :
6686 : /* Don't reuse regions that are marked as TOUCHED. */
6687 113906 : for (store::cluster_map_t::iterator iter = m_store.begin ();
6688 208950 : iter != m_store.end (); ++iter)
6689 95044 : if ((*iter).second->touched_p ())
6690 : {
6691 9896 : const region *base_reg = (*iter).first;
6692 9896 : bitmap_set_bit (base_regs_in_use, base_reg->get_id ());
6693 : }
6694 :
6695 18862 : const region *reg
6696 18862 : = m_mgr->get_or_create_region_for_heap_alloc (base_regs_in_use);
6697 18862 : if (size_in_bytes)
6698 13314 : if (compat_types_p (size_in_bytes->get_type (), size_type_node))
6699 13314 : set_dynamic_extents (reg, size_in_bytes, ctxt);
6700 :
6701 18862 : if (update_state_machine && cd)
6702 : {
6703 0 : const svalue *ptr_sval
6704 0 : = m_mgr->get_ptr_svalue (cd->get_lhs_type (), reg);
6705 0 : transition_ptr_sval_non_null (ctxt, ptr_sval);
6706 : }
6707 :
6708 18862 : return reg;
6709 18862 : }
6710 :
6711 : /* Populate OUT_IDS with the set of IDs of those base regions which are
6712 : reachable in this region_model. */
6713 :
6714 : void
6715 20799 : region_model::get_referenced_base_regions (auto_bitmap &out_ids) const
6716 : {
6717 20799 : reachable_regions reachable_regs (const_cast<region_model *> (this));
6718 20799 : m_store.for_each_cluster (reachable_regions::init_cluster_cb,
6719 : &reachable_regs);
6720 : /* Get regions for locals that have explicitly bound values. */
6721 147051 : for (store::cluster_map_t::iterator iter = m_store.begin ();
6722 273303 : iter != m_store.end (); ++iter)
6723 : {
6724 126252 : const region *base_reg = (*iter).first;
6725 126252 : if (const region *parent = base_reg->get_parent_region ())
6726 126252 : if (parent->get_kind () == RK_FRAME)
6727 80502 : reachable_regs.add (base_reg, false);
6728 : }
6729 :
6730 20803 : for (auto &eh_node : m_thrown_exceptions_stack)
6731 4 : eh_node.add_to_reachable_regions (reachable_regs);
6732 20859 : for (auto &eh_node : m_caught_exceptions_stack)
6733 60 : eh_node.add_to_reachable_regions (reachable_regs);
6734 :
6735 :
6736 20799 : bitmap_clear (out_ids);
6737 151129 : for (auto iter_reg : reachable_regs)
6738 130330 : bitmap_set_bit (out_ids, iter_reg->get_id ());
6739 20799 : }
6740 :
6741 : /* Return a new region describing a block of memory allocated within the
6742 : current frame.
6743 : Use CTXT to complain about tainted sizes. */
6744 :
6745 : const region *
6746 531 : region_model::create_region_for_alloca (const svalue *size_in_bytes,
6747 : region_model_context *ctxt)
6748 : {
6749 531 : const region *reg = m_mgr->create_region_for_alloca (m_current_frame);
6750 531 : if (compat_types_p (size_in_bytes->get_type (), size_type_node))
6751 530 : set_dynamic_extents (reg, size_in_bytes, ctxt);
6752 531 : return reg;
6753 : }
6754 :
6755 : /* Record that the size of REG is SIZE_IN_BYTES.
6756 : Use CTXT to complain about tainted sizes. */
6757 :
6758 : void
6759 14350 : region_model::set_dynamic_extents (const region *reg,
6760 : const svalue *size_in_bytes,
6761 : region_model_context *ctxt)
6762 : {
6763 14350 : assert_compat_types (size_in_bytes->get_type (), size_type_node);
6764 14350 : if (ctxt)
6765 : {
6766 7818 : check_dynamic_size_for_taint (reg->get_memory_space (), size_in_bytes,
6767 : ctxt);
6768 7818 : check_dynamic_size_for_floats (size_in_bytes, ctxt);
6769 : }
6770 14350 : m_dynamic_extents.put (reg, size_in_bytes);
6771 14350 : }
6772 :
6773 : /* Get the recording of REG in bytes, or nullptr if no dynamic size was
6774 : recorded. */
6775 :
6776 : const svalue *
6777 62597 : region_model::get_dynamic_extents (const region *reg) const
6778 : {
6779 62597 : if (const svalue * const *slot = m_dynamic_extents.get (reg))
6780 13004 : return *slot;
6781 : return nullptr;
6782 : }
6783 :
6784 : /* Unset any recorded dynamic size of REG. */
6785 :
6786 : void
6787 51662 : region_model::unset_dynamic_extents (const region *reg)
6788 : {
6789 51662 : m_dynamic_extents.remove (reg);
6790 51662 : }
6791 :
6792 : /* A subclass of pending_diagnostic for complaining about uninitialized data
6793 : being copied across a trust boundary to an untrusted output
6794 : (e.g. copy_to_user infoleaks in the Linux kernel). */
6795 :
6796 : class exposure_through_uninit_copy
6797 : : public pending_diagnostic_subclass<exposure_through_uninit_copy>
6798 : {
6799 : public:
6800 25 : exposure_through_uninit_copy (const region *src_region,
6801 : const region *dest_region,
6802 : const svalue *copied_sval)
6803 25 : : m_src_region (src_region),
6804 25 : m_dest_region (dest_region),
6805 25 : m_copied_sval (copied_sval)
6806 : {
6807 25 : gcc_assert (m_copied_sval->get_kind () == SK_POISONED
6808 : || m_copied_sval->get_kind () == SK_COMPOUND);
6809 25 : }
6810 :
6811 294 : const char *get_kind () const final override
6812 : {
6813 294 : return "exposure_through_uninit_copy";
6814 : }
6815 :
6816 25 : bool operator== (const exposure_through_uninit_copy &other) const
6817 : {
6818 25 : return (m_src_region == other.m_src_region
6819 25 : && m_dest_region == other.m_dest_region
6820 50 : && m_copied_sval == other.m_copied_sval);
6821 : }
6822 :
6823 50 : int get_controlling_option () const final override
6824 : {
6825 50 : return OPT_Wanalyzer_exposure_through_uninit_copy;
6826 : }
6827 :
6828 25 : bool emit (diagnostic_emission_context &ctxt) final override
6829 : {
6830 : /* CWE-200: Exposure of Sensitive Information to an Unauthorized Actor. */
6831 25 : ctxt.add_cwe (200);
6832 50 : enum memory_space mem_space = get_src_memory_space ();
6833 25 : bool warned;
6834 25 : switch (mem_space)
6835 : {
6836 0 : default:
6837 0 : warned = ctxt.warn ("potential exposure of sensitive information"
6838 : " by copying uninitialized data"
6839 : " across trust boundary");
6840 0 : break;
6841 25 : case MEMSPACE_STACK:
6842 25 : warned = ctxt.warn ("potential exposure of sensitive information"
6843 : " by copying uninitialized data from stack"
6844 : " across trust boundary");
6845 25 : break;
6846 0 : case MEMSPACE_HEAP:
6847 0 : warned = ctxt.warn ("potential exposure of sensitive information"
6848 : " by copying uninitialized data from heap"
6849 : " across trust boundary");
6850 0 : break;
6851 : }
6852 25 : if (warned)
6853 : {
6854 25 : const location_t loc = ctxt.get_location ();
6855 25 : inform_number_of_uninit_bits (loc);
6856 25 : complain_about_uninit_ranges (loc);
6857 :
6858 25 : if (mem_space == MEMSPACE_STACK)
6859 25 : maybe_emit_fixit_hint ();
6860 : }
6861 25 : return warned;
6862 : }
6863 :
6864 : bool
6865 50 : describe_final_event (pretty_printer &pp,
6866 : const evdesc::final_event &) final override
6867 : {
6868 100 : enum memory_space mem_space = get_src_memory_space ();
6869 50 : switch (mem_space)
6870 : {
6871 0 : default:
6872 0 : pp_string (&pp, "uninitialized data copied here");
6873 0 : return true;
6874 :
6875 50 : case MEMSPACE_STACK:
6876 50 : pp_string (&pp, "uninitialized data copied from stack here");
6877 50 : return true;
6878 :
6879 0 : case MEMSPACE_HEAP:
6880 0 : pp_string (&pp, "uninitialized data copied from heap here");
6881 0 : return true;
6882 : }
6883 : }
6884 :
6885 50 : void mark_interesting_stuff (interesting_t *interest) final override
6886 : {
6887 50 : if (m_src_region)
6888 50 : interest->add_region_creation (m_src_region);
6889 50 : }
6890 :
6891 : void
6892 0 : maybe_add_sarif_properties (diagnostics::sarif_object &result_obj)
6893 : const final override
6894 : {
6895 0 : auto &props = result_obj.get_or_create_properties ();
6896 : #define PROPERTY_PREFIX "gcc/-Wanalyzer-exposure-through-uninit-copy/"
6897 0 : props.set (PROPERTY_PREFIX "src_region", m_src_region->to_json ());
6898 0 : props.set (PROPERTY_PREFIX "dest_region", m_dest_region->to_json ());
6899 0 : props.set (PROPERTY_PREFIX "copied_sval", m_copied_sval->to_json ());
6900 : #undef PROPERTY_PREFIX
6901 0 : }
6902 :
6903 : private:
6904 75 : enum memory_space get_src_memory_space () const
6905 : {
6906 75 : return m_src_region ? m_src_region->get_memory_space () : MEMSPACE_UNKNOWN;
6907 : }
6908 :
6909 25 : bit_size_t calc_num_uninit_bits () const
6910 : {
6911 25 : switch (m_copied_sval->get_kind ())
6912 : {
6913 0 : default:
6914 0 : gcc_unreachable ();
6915 4 : break;
6916 4 : case SK_POISONED:
6917 4 : {
6918 4 : const poisoned_svalue *poisoned_sval
6919 4 : = as_a <const poisoned_svalue *> (m_copied_sval);
6920 4 : gcc_assert (poisoned_sval->get_poison_kind () == poison_kind::uninit);
6921 :
6922 : /* Give up if don't have type information. */
6923 4 : if (m_copied_sval->get_type () == NULL_TREE)
6924 0 : return 0;
6925 :
6926 4 : bit_size_t size_in_bits;
6927 4 : if (int_size_in_bits (m_copied_sval->get_type (), &size_in_bits))
6928 4 : return size_in_bits;
6929 :
6930 : /* Give up if we can't get the size of the type. */
6931 0 : return 0;
6932 : }
6933 21 : break;
6934 21 : case SK_COMPOUND:
6935 21 : {
6936 21 : const compound_svalue *compound_sval
6937 21 : = as_a <const compound_svalue *> (m_copied_sval);
6938 21 : bit_size_t result = 0;
6939 : /* Find keys for uninit svals. */
6940 82 : for (auto iter : compound_sval->get_concrete_bindings ())
6941 : {
6942 61 : const svalue *sval = iter.second;
6943 122 : if (const poisoned_svalue *psval
6944 61 : = sval->dyn_cast_poisoned_svalue ())
6945 24 : if (psval->get_poison_kind () == poison_kind::uninit)
6946 : {
6947 24 : const bit_range &bits = iter.first;
6948 24 : result += bits.m_size_in_bits;
6949 : }
6950 : }
6951 21 : return result;
6952 : }
6953 : }
6954 : }
6955 :
6956 25 : void inform_number_of_uninit_bits (location_t loc) const
6957 : {
6958 25 : bit_size_t num_uninit_bits = calc_num_uninit_bits ();
6959 25 : if (num_uninit_bits <= 0)
6960 0 : return;
6961 25 : if (num_uninit_bits % BITS_PER_UNIT == 0)
6962 : {
6963 : /* Express in bytes. */
6964 25 : byte_size_t num_uninit_bytes = num_uninit_bits / BITS_PER_UNIT;
6965 25 : if (num_uninit_bytes == 1)
6966 3 : inform (loc, "1 byte is uninitialized");
6967 : else
6968 22 : inform (loc,
6969 : "%wu bytes are uninitialized", num_uninit_bytes.to_uhwi ());
6970 : }
6971 : else
6972 : {
6973 : /* Express in bits. */
6974 0 : if (num_uninit_bits == 1)
6975 0 : inform (loc, "1 bit is uninitialized");
6976 : else
6977 0 : inform (loc,
6978 : "%wu bits are uninitialized", num_uninit_bits.to_uhwi ());
6979 : }
6980 : }
6981 :
6982 25 : void complain_about_uninit_ranges (location_t loc) const
6983 : {
6984 50 : if (const compound_svalue *compound_sval
6985 25 : = m_copied_sval->dyn_cast_compound_svalue ())
6986 : {
6987 : /* Find keys for uninit svals. */
6988 21 : auto_vec<bit_range> uninit_bit_ranges;
6989 82 : for (auto iter : compound_sval->get_concrete_bindings ())
6990 : {
6991 61 : const svalue *sval = iter.second;
6992 122 : if (const poisoned_svalue *psval
6993 61 : = sval->dyn_cast_poisoned_svalue ())
6994 24 : if (psval->get_poison_kind () == poison_kind::uninit)
6995 24 : uninit_bit_ranges.safe_push (iter.first);
6996 : }
6997 :
6998 21 : std::unique_ptr<record_layout> layout;
6999 :
7000 21 : tree type = m_copied_sval->get_type ();
7001 21 : if (type && TREE_CODE (type) == RECORD_TYPE)
7002 : {
7003 17 : layout = std::make_unique<record_layout> (type);
7004 :
7005 17 : if (0)
7006 : layout->dump ();
7007 : }
7008 :
7009 : unsigned i;
7010 : bit_range *bits;
7011 45 : FOR_EACH_VEC_ELT (uninit_bit_ranges, i, bits)
7012 : {
7013 24 : bit_offset_t start_bit = bits->get_start_bit_offset ();
7014 24 : bit_offset_t next_bit = bits->get_next_bit_offset ();
7015 24 : complain_about_uninit_range (loc, start_bit, next_bit,
7016 24 : layout.get ());
7017 : }
7018 21 : }
7019 25 : }
7020 :
7021 24 : void complain_about_uninit_range (location_t loc,
7022 : bit_offset_t start_bit,
7023 : bit_offset_t next_bit,
7024 : const record_layout *layout) const
7025 : {
7026 24 : if (layout)
7027 : {
7028 75 : while (start_bit < next_bit)
7029 : {
7030 165 : if (const record_layout::item *item
7031 55 : = layout->get_item_at (start_bit))
7032 : {
7033 55 : gcc_assert (start_bit >= item->get_start_bit_offset ());
7034 55 : gcc_assert (start_bit < item->get_next_bit_offset ());
7035 55 : if (item->get_start_bit_offset () == start_bit
7036 108 : && item->get_next_bit_offset () <= next_bit)
7037 53 : complain_about_fully_uninit_item (*item);
7038 : else
7039 2 : complain_about_partially_uninit_item (*item);
7040 55 : start_bit = item->get_next_bit_offset ();
7041 55 : continue;
7042 : }
7043 : else
7044 : break;
7045 : }
7046 : }
7047 :
7048 24 : if (start_bit >= next_bit)
7049 : return;
7050 :
7051 4 : if (start_bit % 8 == 0 && next_bit % 8 == 0)
7052 : {
7053 : /* Express in bytes. */
7054 4 : byte_offset_t start_byte = start_bit / 8;
7055 4 : byte_offset_t last_byte = (next_bit / 8) - 1;
7056 4 : if (last_byte == start_byte)
7057 0 : inform (loc,
7058 : "byte %wu is uninitialized",
7059 : start_byte.to_uhwi ());
7060 : else
7061 4 : inform (loc,
7062 : "bytes %wu - %wu are uninitialized",
7063 : start_byte.to_uhwi (),
7064 : last_byte.to_uhwi ());
7065 : }
7066 : else
7067 : {
7068 : /* Express in bits. */
7069 0 : bit_offset_t last_bit = next_bit - 1;
7070 0 : if (last_bit == start_bit)
7071 0 : inform (loc,
7072 : "bit %wu is uninitialized",
7073 : start_bit.to_uhwi ());
7074 : else
7075 0 : inform (loc,
7076 : "bits %wu - %wu are uninitialized",
7077 : start_bit.to_uhwi (),
7078 : last_bit.to_uhwi ());
7079 : }
7080 : }
7081 :
7082 : static void
7083 53 : complain_about_fully_uninit_item (const record_layout::item &item)
7084 : {
7085 53 : const_tree field = item.m_field;
7086 53 : bit_size_t num_bits = item.m_bit_range.m_size_in_bits;
7087 53 : if (item.m_is_padding)
7088 : {
7089 11 : if (num_bits % 8 == 0)
7090 : {
7091 : /* Express in bytes. */
7092 9 : byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
7093 9 : if (num_bytes == 1)
7094 2 : inform (DECL_SOURCE_LOCATION (field),
7095 : "padding after field %qD is uninitialized (1 byte)",
7096 : field);
7097 : else
7098 7 : inform (DECL_SOURCE_LOCATION (field),
7099 : "padding after field %qD is uninitialized (%wu bytes)",
7100 : field, num_bytes.to_uhwi ());
7101 : }
7102 : else
7103 : {
7104 : /* Express in bits. */
7105 2 : if (num_bits == 1)
7106 0 : inform (DECL_SOURCE_LOCATION (field),
7107 : "padding after field %qD is uninitialized (1 bit)",
7108 : field);
7109 : else
7110 2 : inform (DECL_SOURCE_LOCATION (field),
7111 : "padding after field %qD is uninitialized (%wu bits)",
7112 : field, num_bits.to_uhwi ());
7113 : }
7114 : }
7115 : else
7116 : {
7117 42 : if (num_bits % 8 == 0)
7118 : {
7119 : /* Express in bytes. */
7120 32 : byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
7121 32 : if (num_bytes == 1)
7122 1 : inform (DECL_SOURCE_LOCATION (field),
7123 : "field %qD is uninitialized (1 byte)", field);
7124 : else
7125 31 : inform (DECL_SOURCE_LOCATION (field),
7126 : "field %qD is uninitialized (%wu bytes)",
7127 : field, num_bytes.to_uhwi ());
7128 : }
7129 : else
7130 : {
7131 : /* Express in bits. */
7132 10 : if (num_bits == 1)
7133 9 : inform (DECL_SOURCE_LOCATION (field),
7134 : "field %qD is uninitialized (1 bit)", field);
7135 : else
7136 1 : inform (DECL_SOURCE_LOCATION (field),
7137 : "field %qD is uninitialized (%wu bits)",
7138 : field, num_bits.to_uhwi ());
7139 : }
7140 : }
7141 53 : }
7142 :
7143 : static void
7144 2 : complain_about_partially_uninit_item (const record_layout::item &item)
7145 : {
7146 2 : const_tree field = item.m_field;
7147 2 : if (item.m_is_padding)
7148 0 : inform (DECL_SOURCE_LOCATION (field),
7149 : "padding after field %qD is partially uninitialized",
7150 : field);
7151 : else
7152 2 : inform (DECL_SOURCE_LOCATION (field),
7153 : "field %qD is partially uninitialized",
7154 : field);
7155 : /* TODO: ideally we'd describe what parts are uninitialized. */
7156 2 : }
7157 :
7158 25 : void maybe_emit_fixit_hint () const
7159 : {
7160 25 : if (tree decl = m_src_region->maybe_get_decl ())
7161 : {
7162 25 : gcc_rich_location hint_richloc (DECL_SOURCE_LOCATION (decl));
7163 25 : hint_richloc.add_fixit_insert_after (" = {0}");
7164 25 : inform (&hint_richloc,
7165 : "suggest forcing zero-initialization by"
7166 : " providing a %<{0}%> initializer");
7167 25 : }
7168 25 : }
7169 :
7170 : private:
7171 : const region *m_src_region;
7172 : const region *m_dest_region;
7173 : const svalue *m_copied_sval;
7174 : };
7175 :
7176 : /* Return true if any part of SVAL is uninitialized. */
7177 :
7178 : static bool
7179 80 : contains_uninit_p (const svalue *sval)
7180 : {
7181 80 : switch (sval->get_kind ())
7182 : {
7183 : default:
7184 : return false;
7185 4 : case SK_POISONED:
7186 4 : {
7187 4 : const poisoned_svalue *psval
7188 4 : = as_a <const poisoned_svalue *> (sval);
7189 4 : return psval->get_poison_kind () == poison_kind::uninit;
7190 : }
7191 43 : case SK_COMPOUND:
7192 43 : {
7193 43 : const compound_svalue *compound_sval
7194 43 : = as_a <const compound_svalue *> (sval);
7195 :
7196 141 : for (auto iter = compound_sval->begin ();
7197 141 : iter != compound_sval->end (); ++iter)
7198 : {
7199 119 : const svalue *inner_sval = iter->second;
7200 238 : if (const poisoned_svalue *psval
7201 119 : = inner_sval->dyn_cast_poisoned_svalue ())
7202 21 : if (psval->get_poison_kind () == poison_kind::uninit)
7203 80 : return true;
7204 : }
7205 :
7206 : return false;
7207 : }
7208 : }
7209 : }
7210 :
7211 : /* Function for use by plugins when simulating writing data through a
7212 : pointer to an "untrusted" region DST_REG (and thus crossing a security
7213 : boundary), such as copying data to user space in an OS kernel.
7214 :
7215 : Check that COPIED_SVAL is fully initialized. If not, complain about
7216 : an infoleak to CTXT.
7217 :
7218 : SRC_REG can be nullptr; if non-NULL it is used as a hint in the diagnostic
7219 : as to where COPIED_SVAL came from. */
7220 :
7221 : void
7222 80 : region_model::maybe_complain_about_infoleak (const region *dst_reg,
7223 : const svalue *copied_sval,
7224 : const region *src_reg,
7225 : region_model_context *ctxt)
7226 : {
7227 : /* Check for exposure. */
7228 80 : if (contains_uninit_p (copied_sval))
7229 25 : ctxt->warn
7230 25 : (std::make_unique<exposure_through_uninit_copy> (src_reg,
7231 : dst_reg,
7232 : copied_sval));
7233 80 : }
7234 :
7235 : /* Set errno to a positive symbolic int, as if some error has occurred. */
7236 :
7237 : void
7238 537 : region_model::set_errno (const call_details &cd)
7239 : {
7240 537 : const region *errno_reg = m_mgr->get_errno_region ();
7241 537 : conjured_purge p (this, cd.get_ctxt ());
7242 537 : const svalue *new_errno_sval
7243 537 : = m_mgr->get_or_create_conjured_svalue (integer_type_node,
7244 537 : &cd.get_call_stmt (),
7245 : errno_reg, p);
7246 537 : const svalue *zero
7247 537 : = m_mgr->get_or_create_int_cst (integer_type_node, 0);
7248 537 : add_constraint (new_errno_sval, GT_EXPR, zero, cd.get_ctxt ());
7249 537 : set_value (errno_reg, new_errno_sval, cd.get_ctxt ());
7250 537 : }
7251 :
7252 : // class region_model_context
7253 :
7254 : bool
7255 4124 : region_model_context::
7256 : warn (std::unique_ptr<pending_diagnostic> d,
7257 : std::unique_ptr<pending_location::fixer_for_epath> ploc_fixer)
7258 : {
7259 4124 : pending_location ploc (get_pending_location_for_diag ());
7260 4124 : ploc.m_fixer_for_epath = std::move (ploc_fixer);
7261 4124 : return warn_at (std::move (d), std::move (ploc));
7262 4124 : }
7263 :
7264 : /* class noop_region_model_context : public region_model_context. */
7265 :
7266 : void
7267 0 : noop_region_model_context::add_note (std::unique_ptr<pending_note>)
7268 : {
7269 0 : }
7270 :
7271 : void
7272 0 : noop_region_model_context::add_event (std::unique_ptr<checker_event>)
7273 : {
7274 0 : }
7275 :
7276 : void
7277 78 : noop_region_model_context::bifurcate (std::unique_ptr<custom_edge_info>)
7278 : {
7279 78 : }
7280 :
7281 : void
7282 0 : noop_region_model_context::terminate_path ()
7283 : {
7284 0 : }
7285 :
7286 : /* class region_model_context_decorator : public region_model_context. */
7287 :
7288 : void
7289 172 : region_model_context_decorator::add_event (std::unique_ptr<checker_event> event)
7290 : {
7291 172 : if (m_inner)
7292 172 : m_inner->add_event (std::move (event));
7293 172 : }
7294 :
7295 : /* struct model_merger. */
7296 :
7297 : /* Dump a multiline representation of this merger to PP. */
7298 :
7299 : void
7300 0 : model_merger::dump_to_pp (pretty_printer *pp, bool simple) const
7301 : {
7302 0 : pp_string (pp, "model A:");
7303 0 : pp_newline (pp);
7304 0 : m_model_a->dump_to_pp (pp, simple, true);
7305 0 : pp_newline (pp);
7306 :
7307 0 : pp_string (pp, "model B:");
7308 0 : pp_newline (pp);
7309 0 : m_model_b->dump_to_pp (pp, simple, true);
7310 0 : pp_newline (pp);
7311 :
7312 0 : pp_string (pp, "merged model:");
7313 0 : pp_newline (pp);
7314 0 : m_merged_model->dump_to_pp (pp, simple, true);
7315 0 : pp_newline (pp);
7316 0 : }
7317 :
7318 : /* Dump a multiline representation of this merger to FILE. */
7319 :
7320 : void
7321 0 : model_merger::dump (FILE *fp, bool simple) const
7322 : {
7323 0 : tree_dump_pretty_printer pp (fp);
7324 0 : dump_to_pp (&pp, simple);
7325 0 : }
7326 :
7327 : /* Dump a multiline representation of this merger to stderr. */
7328 :
7329 : DEBUG_FUNCTION void
7330 0 : model_merger::dump (bool simple) const
7331 : {
7332 0 : dump (stderr, simple);
7333 0 : }
7334 :
7335 : /* Return true if it's OK to merge SVAL with other svalues. */
7336 :
7337 : bool
7338 548994 : model_merger::mergeable_svalue_p (const svalue *sval) const
7339 : {
7340 548994 : if (m_ext_state)
7341 : {
7342 : /* Reject merging svalues that have non-purgable sm-state,
7343 : to avoid falsely reporting memory leaks by merging them
7344 : with something else. For example, given a local var "p",
7345 : reject the merger of a:
7346 : store_a mapping "p" to a malloc-ed ptr
7347 : with:
7348 : store_b mapping "p" to a NULL ptr. */
7349 548946 : if (m_state_a)
7350 548946 : if (!m_state_a->can_purge_p (*m_ext_state, sval))
7351 : return false;
7352 547131 : if (m_state_b)
7353 547131 : if (!m_state_b->can_purge_p (*m_ext_state, sval))
7354 : return false;
7355 : }
7356 : return true;
7357 : }
7358 :
7359 : /* Mark WIDENING_SVAL as changing meaning during the merge. */
7360 :
7361 : void
7362 843 : model_merger::on_widening_reuse (const widening_svalue *widening_sval)
7363 : {
7364 843 : m_svals_changing_meaning.add (widening_sval);
7365 843 : }
7366 :
7367 : } // namespace ana
7368 :
7369 : /* Dump RMODEL fully to stderr (i.e. without summarization). */
7370 :
7371 : DEBUG_FUNCTION void
7372 0 : debug (const region_model &rmodel)
7373 : {
7374 0 : rmodel.dump (false);
7375 0 : }
7376 :
7377 : /* class rejected_op_constraint : public rejected_constraint. */
7378 :
7379 : void
7380 4 : rejected_op_constraint::dump_to_pp (pretty_printer *pp) const
7381 : {
7382 4 : region_model m (m_model);
7383 4 : m_lhs->dump_to_pp (pp, true);
7384 4 : pp_printf (pp, " %s ", op_symbol_code (m_op));
7385 4 : m_rhs->dump_to_pp (pp, true);
7386 4 : }
7387 :
7388 : /* class rejected_default_case : public rejected_constraint. */
7389 :
7390 : void
7391 0 : rejected_default_case::dump_to_pp (pretty_printer *pp) const
7392 : {
7393 0 : pp_string (pp, "implicit default for enum");
7394 0 : }
7395 :
7396 : /* class rejected_ranges_constraint : public rejected_constraint. */
7397 :
7398 : void
7399 0 : rejected_ranges_constraint::dump_to_pp (pretty_printer *pp) const
7400 : {
7401 0 : region_model m (m_model);
7402 0 : const svalue *sval = m.get_rvalue (m_expr, nullptr);
7403 0 : sval->dump_to_pp (pp, true);
7404 0 : pp_string (pp, " in ");
7405 0 : m_ranges->dump_to_pp (pp, true);
7406 0 : }
7407 :
7408 : /* class engine. */
7409 :
7410 : /* engine's ctor. */
7411 :
7412 3443 : engine::engine (region_model_manager &mgr,
7413 3443 : const supergraph *sg)
7414 3443 : : m_mgr (mgr),
7415 3443 : m_sg (sg)
7416 : {
7417 3443 : }
7418 :
7419 : /* Dump the managed objects by class to LOGGER, and the per-class totals. */
7420 :
7421 : void
7422 5 : engine::log_stats (logger *logger) const
7423 : {
7424 5 : m_mgr.log_stats (logger, true);
7425 5 : }
7426 :
7427 : namespace ana {
7428 :
7429 : #if CHECKING_P
7430 :
7431 : namespace selftest {
7432 :
7433 : /* Build a constant tree of the given type from STR. */
7434 :
7435 : static tree
7436 64 : build_real_cst_from_string (tree type, const char *str)
7437 : {
7438 64 : REAL_VALUE_TYPE real;
7439 64 : real_from_string (&real, str);
7440 64 : return build_real (type, real);
7441 : }
7442 :
7443 : /* Append various "interesting" constants to OUT (e.g. NaN). */
7444 :
7445 : static void
7446 8 : append_interesting_constants (auto_vec<tree> *out)
7447 : {
7448 8 : out->safe_push (integer_zero_node);
7449 8 : out->safe_push (build_int_cst (integer_type_node, 42));
7450 8 : out->safe_push (build_int_cst (unsigned_type_node, 0));
7451 8 : out->safe_push (build_int_cst (unsigned_type_node, 42));
7452 8 : out->safe_push (build_real_cst_from_string (float_type_node, "QNaN"));
7453 8 : out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN"));
7454 8 : out->safe_push (build_real_cst_from_string (float_type_node, "SNaN"));
7455 8 : out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN"));
7456 8 : out->safe_push (build_real_cst_from_string (float_type_node, "0.0"));
7457 8 : out->safe_push (build_real_cst_from_string (float_type_node, "-0.0"));
7458 8 : out->safe_push (build_real_cst_from_string (float_type_node, "Inf"));
7459 8 : out->safe_push (build_real_cst_from_string (float_type_node, "-Inf"));
7460 8 : }
7461 :
7462 : /* Verify that tree_cmp is a well-behaved comparator for qsort, even
7463 : if the underlying constants aren't comparable. */
7464 :
7465 : static void
7466 4 : test_tree_cmp_on_constants ()
7467 : {
7468 4 : auto_vec<tree> csts;
7469 4 : append_interesting_constants (&csts);
7470 :
7471 : /* Try sorting every triple. */
7472 4 : const unsigned num = csts.length ();
7473 52 : for (unsigned i = 0; i < num; i++)
7474 624 : for (unsigned j = 0; j < num; j++)
7475 7488 : for (unsigned k = 0; k < num; k++)
7476 : {
7477 6912 : auto_vec<tree> v (3);
7478 6912 : v.quick_push (csts[i]);
7479 6912 : v.quick_push (csts[j]);
7480 6912 : v.quick_push (csts[k]);
7481 6912 : v.qsort (tree_cmp);
7482 6912 : }
7483 4 : }
7484 :
7485 : /* Implementation detail of the ASSERT_CONDITION_* macros. */
7486 :
7487 : void
7488 8 : assert_condition (const location &loc,
7489 : region_model &model,
7490 : const svalue *lhs, tree_code op, const svalue *rhs,
7491 : tristate expected)
7492 : {
7493 8 : tristate actual = model.eval_condition (lhs, op, rhs);
7494 8 : ASSERT_EQ_AT (loc, actual, expected);
7495 8 : }
7496 :
7497 : /* Implementation detail of the ASSERT_CONDITION_* macros. */
7498 :
7499 : void
7500 3084 : assert_condition (const location &loc,
7501 : region_model &model,
7502 : tree lhs, tree_code op, tree rhs,
7503 : tristate expected)
7504 : {
7505 3084 : tristate actual = model.eval_condition (lhs, op, rhs, nullptr);
7506 3084 : ASSERT_EQ_AT (loc, actual, expected);
7507 3084 : }
7508 :
7509 : /* Implementation detail of ASSERT_DUMP_TREE_EQ. */
7510 :
7511 : static void
7512 20 : assert_dump_tree_eq (const location &loc, tree t, const char *expected)
7513 : {
7514 20 : auto_fix_quotes sentinel;
7515 20 : pretty_printer pp;
7516 20 : pp_format_decoder (&pp) = default_tree_printer;
7517 20 : dump_tree (&pp, t);
7518 20 : ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
7519 20 : }
7520 :
7521 : /* Assert that dump_tree (T) is EXPECTED. */
7522 :
7523 : #define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \
7524 : SELFTEST_BEGIN_STMT \
7525 : assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \
7526 : SELFTEST_END_STMT
7527 :
7528 : /* Implementation detail of ASSERT_DUMP_EQ. */
7529 :
7530 : static void
7531 8 : assert_dump_eq (const location &loc,
7532 : const region_model &model,
7533 : bool summarize,
7534 : const char *expected)
7535 : {
7536 8 : auto_fix_quotes sentinel;
7537 8 : pretty_printer pp;
7538 8 : pp_format_decoder (&pp) = default_tree_printer;
7539 :
7540 8 : model.dump_to_pp (&pp, summarize, true);
7541 8 : ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
7542 8 : }
7543 :
7544 : /* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
7545 :
7546 : #define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
7547 : SELFTEST_BEGIN_STMT \
7548 : assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
7549 : SELFTEST_END_STMT
7550 :
7551 : /* Smoketest for region_model::dump_to_pp. */
7552 :
7553 : static void
7554 4 : test_dump ()
7555 : {
7556 4 : region_model_manager mgr;
7557 4 : region_model model (&mgr);
7558 :
7559 4 : ASSERT_DUMP_EQ (model, false,
7560 : "stack depth: 0\n"
7561 : "m_called_unknown_fn: FALSE\n"
7562 : "constraint_manager:\n"
7563 : " equiv classes:\n"
7564 : " constraints:\n");
7565 4 : ASSERT_DUMP_EQ (model, true,
7566 : "stack depth: 0\n"
7567 : "m_called_unknown_fn: FALSE\n"
7568 : "constraint_manager:\n"
7569 : " equiv classes:\n"
7570 : " constraints:\n");
7571 :
7572 4 : text_art::ascii_theme theme;
7573 4 : pretty_printer pp;
7574 4 : dump_to_pp (model, &theme, &pp);
7575 4 : ASSERT_STREQ ("Region Model\n"
7576 : "`- Store\n"
7577 : " `- m_called_unknown_fn: false\n",
7578 : pp_formatted_text (&pp));
7579 4 : }
7580 :
7581 : /* Helper function for selftests. Create a struct or union type named NAME,
7582 : with the fields given by the FIELD_DECLS in FIELDS.
7583 : If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
7584 : create a UNION_TYPE. */
7585 :
7586 : static tree
7587 16 : make_test_compound_type (const char *name, bool is_struct,
7588 : const auto_vec<tree> *fields)
7589 : {
7590 16 : tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE);
7591 16 : TYPE_NAME (t) = get_identifier (name);
7592 16 : TYPE_SIZE (t) = 0;
7593 :
7594 16 : tree fieldlist = NULL_TREE;
7595 16 : int i;
7596 16 : tree field;
7597 48 : FOR_EACH_VEC_ELT (*fields, i, field)
7598 : {
7599 32 : gcc_assert (TREE_CODE (field) == FIELD_DECL);
7600 32 : DECL_CONTEXT (field) = t;
7601 32 : fieldlist = chainon (field, fieldlist);
7602 : }
7603 16 : fieldlist = nreverse (fieldlist);
7604 16 : TYPE_FIELDS (t) = fieldlist;
7605 :
7606 16 : layout_type (t);
7607 16 : return t;
7608 : }
7609 :
7610 : /* Selftest fixture for creating the type "struct coord {int x; int y; };". */
7611 :
7612 : struct coord_test
7613 : {
7614 16 : coord_test ()
7615 16 : {
7616 16 : auto_vec<tree> fields;
7617 16 : m_x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
7618 : get_identifier ("x"), integer_type_node);
7619 16 : fields.safe_push (m_x_field);
7620 16 : m_y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
7621 : get_identifier ("y"), integer_type_node);
7622 16 : fields.safe_push (m_y_field);
7623 16 : m_coord_type = make_test_compound_type ("coord", true, &fields);
7624 16 : }
7625 :
7626 : tree m_x_field;
7627 : tree m_y_field;
7628 : tree m_coord_type;
7629 : };
7630 :
7631 : /* Verify usage of a struct. */
7632 :
7633 : static void
7634 4 : test_struct ()
7635 : {
7636 4 : coord_test ct;
7637 :
7638 4 : tree c = build_global_decl ("c", ct.m_coord_type);
7639 4 : tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
7640 : c, ct.m_x_field, NULL_TREE);
7641 4 : tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
7642 : c, ct.m_y_field, NULL_TREE);
7643 :
7644 4 : tree int_17 = build_int_cst (integer_type_node, 17);
7645 4 : tree int_m3 = build_int_cst (integer_type_node, -3);
7646 :
7647 4 : region_model_manager mgr;
7648 4 : region_model model (&mgr);
7649 : /* Set fields in order y, then x. */
7650 4 : model.set_value (c_y, int_m3, nullptr);
7651 4 : model.set_value (c_x, int_17, nullptr);
7652 :
7653 : /* Verify get_offset for "c.x". */
7654 4 : {
7655 4 : const region *c_x_reg = model.get_lvalue (c_x, nullptr);
7656 4 : region_offset offset = c_x_reg->get_offset (&mgr);
7657 4 : ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, nullptr));
7658 4 : ASSERT_EQ (offset.get_bit_offset (), 0);
7659 : }
7660 :
7661 : /* Verify get_offset for "c.y". */
7662 4 : {
7663 4 : const region *c_y_reg = model.get_lvalue (c_y, nullptr);
7664 4 : region_offset offset = c_y_reg->get_offset (&mgr);
7665 4 : ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, nullptr));
7666 4 : ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
7667 : }
7668 :
7669 : /* Check iteration order of binding_cluster (and thus of binding_map). */
7670 4 : {
7671 4 : std::vector<binding_map::binding_pair> vec;
7672 4 : auto cluster
7673 4 : = model.get_store ()->get_cluster (model.get_lvalue (c, nullptr));
7674 12 : for (auto iter : *cluster)
7675 8 : vec.push_back (iter);
7676 4 : ASSERT_EQ (vec.size (), 2);
7677 : /* we should get them back in ascending order in memory (x then y). */
7678 : /* x */
7679 4 : ASSERT_EQ (vec[0].m_key->dyn_cast_concrete_binding ()->get_bit_range (),
7680 : bit_range (0, INT_TYPE_SIZE));
7681 4 : ASSERT_TRUE (tree_int_cst_equal(vec[0].m_sval->maybe_get_constant (),
7682 : int_17));
7683 : /* y */
7684 4 : ASSERT_EQ (vec[1].m_key->dyn_cast_concrete_binding ()->get_bit_range (),
7685 : bit_range (INT_TYPE_SIZE, INT_TYPE_SIZE));
7686 4 : ASSERT_TRUE (tree_int_cst_equal(vec[1].m_sval->maybe_get_constant (),
7687 : int_m3));
7688 4 : }
7689 4 : }
7690 :
7691 : /* Verify usage of an array element. */
7692 :
7693 : static void
7694 4 : test_array_1 ()
7695 : {
7696 4 : tree tlen = size_int (10);
7697 4 : tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
7698 :
7699 4 : tree a = build_global_decl ("a", arr_type);
7700 :
7701 4 : region_model_manager mgr;
7702 4 : region_model model (&mgr);
7703 4 : tree int_0 = integer_zero_node;
7704 4 : tree a_0 = build4 (ARRAY_REF, char_type_node,
7705 : a, int_0, NULL_TREE, NULL_TREE);
7706 4 : tree char_A = build_int_cst (char_type_node, 'A');
7707 4 : model.set_value (a_0, char_A, nullptr);
7708 4 : }
7709 :
7710 : /* Verify that region_model::get_representative_tree works as expected. */
7711 :
7712 : static void
7713 4 : test_get_representative_tree ()
7714 : {
7715 4 : region_model_manager mgr;
7716 :
7717 : /* STRING_CST. */
7718 4 : {
7719 4 : tree string_cst = build_string (4, "foo");
7720 4 : region_model m (&mgr);
7721 4 : const svalue *str_sval = m.get_rvalue (string_cst, nullptr);
7722 4 : tree rep = m.get_representative_tree (str_sval);
7723 4 : ASSERT_EQ (rep, string_cst);
7724 4 : }
7725 :
7726 : /* String literal. */
7727 4 : {
7728 4 : tree string_cst_ptr = build_string_literal (4, "foo");
7729 4 : region_model m (&mgr);
7730 4 : const svalue *str_sval = m.get_rvalue (string_cst_ptr, nullptr);
7731 4 : tree rep = m.get_representative_tree (str_sval);
7732 4 : ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]");
7733 4 : }
7734 :
7735 : /* Value of an element within an array. */
7736 4 : {
7737 4 : tree tlen = size_int (10);
7738 4 : tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
7739 4 : tree a = build_global_decl ("a", arr_type);
7740 4 : placeholder_svalue test_sval (mgr.alloc_symbol_id (),
7741 4 : char_type_node, "test value");
7742 :
7743 : /* Value of a[3]. */
7744 4 : {
7745 4 : test_region_model_context ctxt;
7746 4 : region_model model (&mgr);
7747 4 : tree int_3 = build_int_cst (integer_type_node, 3);
7748 4 : tree a_3 = build4 (ARRAY_REF, char_type_node,
7749 : a, int_3, NULL_TREE, NULL_TREE);
7750 4 : const region *a_3_reg = model.get_lvalue (a_3, &ctxt);
7751 4 : model.set_value (a_3_reg, &test_sval, &ctxt);
7752 4 : tree rep = model.get_representative_tree (&test_sval);
7753 4 : ASSERT_DUMP_TREE_EQ (rep, "a[3]");
7754 4 : }
7755 :
7756 : /* Value of a[0]. */
7757 4 : {
7758 4 : test_region_model_context ctxt;
7759 4 : region_model model (&mgr);
7760 4 : tree idx = integer_zero_node;
7761 4 : tree a_0 = build4 (ARRAY_REF, char_type_node,
7762 : a, idx, NULL_TREE, NULL_TREE);
7763 4 : const region *a_0_reg = model.get_lvalue (a_0, &ctxt);
7764 4 : model.set_value (a_0_reg, &test_sval, &ctxt);
7765 4 : tree rep = model.get_representative_tree (&test_sval);
7766 4 : ASSERT_DUMP_TREE_EQ (rep, "a[0]");
7767 4 : }
7768 4 : }
7769 :
7770 : /* Value of a field within a struct. */
7771 4 : {
7772 4 : coord_test ct;
7773 :
7774 4 : tree c = build_global_decl ("c", ct.m_coord_type);
7775 4 : tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
7776 : c, ct.m_x_field, NULL_TREE);
7777 4 : tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
7778 : c, ct.m_y_field, NULL_TREE);
7779 :
7780 4 : test_region_model_context ctxt;
7781 :
7782 : /* Value of initial field. */
7783 4 : {
7784 4 : region_model m (&mgr);
7785 4 : const region *c_x_reg = m.get_lvalue (c_x, &ctxt);
7786 4 : placeholder_svalue test_sval_x (mgr.alloc_symbol_id (),
7787 4 : integer_type_node, "test x val");
7788 4 : m.set_value (c_x_reg, &test_sval_x, &ctxt);
7789 4 : tree rep = m.get_representative_tree (&test_sval_x);
7790 4 : ASSERT_DUMP_TREE_EQ (rep, "c.x");
7791 4 : }
7792 :
7793 : /* Value of non-initial field. */
7794 4 : {
7795 4 : region_model m (&mgr);
7796 4 : const region *c_y_reg = m.get_lvalue (c_y, &ctxt);
7797 4 : placeholder_svalue test_sval_y (mgr.alloc_symbol_id (),
7798 4 : integer_type_node, "test y val");
7799 4 : m.set_value (c_y_reg, &test_sval_y, &ctxt);
7800 4 : tree rep = m.get_representative_tree (&test_sval_y);
7801 4 : ASSERT_DUMP_TREE_EQ (rep, "c.y");
7802 4 : }
7803 4 : }
7804 4 : }
7805 :
7806 : /* Verify that calling region_model::get_rvalue repeatedly on the same
7807 : tree constant retrieves the same svalue *. */
7808 :
7809 : static void
7810 4 : test_unique_constants ()
7811 : {
7812 4 : tree int_0 = integer_zero_node;
7813 4 : tree int_42 = build_int_cst (integer_type_node, 42);
7814 :
7815 4 : test_region_model_context ctxt;
7816 4 : region_model_manager mgr;
7817 4 : region_model model (&mgr);
7818 4 : ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
7819 4 : ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
7820 : model.get_rvalue (int_42, &ctxt));
7821 4 : ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
7822 4 : ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
7823 :
7824 : /* A "(const int)42" will be a different tree from "(int)42)"... */
7825 4 : tree const_int_type_node
7826 4 : = build_qualified_type (integer_type_node, TYPE_QUAL_CONST);
7827 4 : tree const_int_42 = build_int_cst (const_int_type_node, 42);
7828 4 : ASSERT_NE (int_42, const_int_42);
7829 : /* It should have a different const_svalue. */
7830 4 : const svalue *int_42_sval = model.get_rvalue (int_42, &ctxt);
7831 4 : const svalue *const_int_42_sval = model.get_rvalue (const_int_42, &ctxt);
7832 4 : ASSERT_NE (int_42_sval, const_int_42_sval);
7833 : /* But they should compare as equal. */
7834 4 : ASSERT_CONDITION_TRUE (model, int_42_sval, EQ_EXPR, const_int_42_sval);
7835 4 : ASSERT_CONDITION_FALSE (model, int_42_sval, NE_EXPR, const_int_42_sval);
7836 4 : }
7837 :
7838 : /* Verify that each type gets its own singleton unknown_svalue within a
7839 : region_model_manager, and that NULL_TREE gets its own singleton. */
7840 :
7841 : static void
7842 4 : test_unique_unknowns ()
7843 : {
7844 4 : region_model_manager mgr;
7845 4 : const svalue *unknown_int
7846 4 : = mgr.get_or_create_unknown_svalue (integer_type_node);
7847 : /* Repeated calls with the same type should get the same "unknown"
7848 : svalue. */
7849 4 : const svalue *unknown_int_2
7850 4 : = mgr.get_or_create_unknown_svalue (integer_type_node);
7851 4 : ASSERT_EQ (unknown_int, unknown_int_2);
7852 :
7853 : /* Different types (or the NULL type) should have different
7854 : unknown_svalues. */
7855 4 : const svalue *unknown_NULL_type = mgr.get_or_create_unknown_svalue (nullptr);
7856 4 : ASSERT_NE (unknown_NULL_type, unknown_int);
7857 :
7858 : /* Repeated calls with NULL for the type should get the same "unknown"
7859 : svalue. */
7860 4 : const svalue *unknown_NULL_type_2 = mgr.get_or_create_unknown_svalue (nullptr);
7861 4 : ASSERT_EQ (unknown_NULL_type, unknown_NULL_type_2);
7862 4 : }
7863 :
7864 : /* Verify that initial_svalue are handled as expected. */
7865 :
7866 : static void
7867 4 : test_initial_svalue_folding ()
7868 : {
7869 4 : region_model_manager mgr;
7870 4 : tree x = build_global_decl ("x", integer_type_node);
7871 4 : tree y = build_global_decl ("y", integer_type_node);
7872 :
7873 4 : test_region_model_context ctxt;
7874 4 : region_model model (&mgr);
7875 4 : const svalue *x_init = model.get_rvalue (x, &ctxt);
7876 4 : const svalue *y_init = model.get_rvalue (y, &ctxt);
7877 4 : ASSERT_NE (x_init, y_init);
7878 4 : const region *x_reg = model.get_lvalue (x, &ctxt);
7879 4 : ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
7880 :
7881 4 : }
7882 :
7883 : /* Verify that unary ops are folded as expected. */
7884 :
7885 : static void
7886 4 : test_unaryop_svalue_folding ()
7887 : {
7888 4 : region_model_manager mgr;
7889 4 : tree x = build_global_decl ("x", integer_type_node);
7890 4 : tree y = build_global_decl ("y", integer_type_node);
7891 :
7892 4 : test_region_model_context ctxt;
7893 4 : region_model model (&mgr);
7894 4 : const svalue *x_init = model.get_rvalue (x, &ctxt);
7895 4 : const svalue *y_init = model.get_rvalue (y, &ctxt);
7896 4 : const region *x_reg = model.get_lvalue (x, &ctxt);
7897 4 : ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
7898 :
7899 : /* "(int)x" -> "x". */
7900 4 : ASSERT_EQ (x_init, mgr.get_or_create_cast (integer_type_node, x_init));
7901 :
7902 : /* "(void *)x" -> something other than "x". */
7903 4 : ASSERT_NE (x_init, mgr.get_or_create_cast (ptr_type_node, x_init));
7904 :
7905 : /* "!(x == y)" -> "x != y". */
7906 4 : ASSERT_EQ (mgr.get_or_create_unaryop
7907 : (boolean_type_node, TRUTH_NOT_EXPR,
7908 : mgr.get_or_create_binop (boolean_type_node, EQ_EXPR,
7909 : x_init, y_init)),
7910 : mgr.get_or_create_binop (boolean_type_node, NE_EXPR,
7911 : x_init, y_init));
7912 : /* "!(x > y)" -> "x <= y". */
7913 4 : ASSERT_EQ (mgr.get_or_create_unaryop
7914 : (boolean_type_node, TRUTH_NOT_EXPR,
7915 : mgr.get_or_create_binop (boolean_type_node, GT_EXPR,
7916 : x_init, y_init)),
7917 : mgr.get_or_create_binop (boolean_type_node, LE_EXPR,
7918 : x_init, y_init));
7919 4 : }
7920 :
7921 : /* Verify that binops on constant svalues are folded. */
7922 :
7923 : static void
7924 4 : test_binop_svalue_folding ()
7925 : {
7926 : #define NUM_CSTS 10
7927 4 : tree cst_int[NUM_CSTS];
7928 4 : region_model_manager mgr;
7929 4 : const svalue *cst_sval[NUM_CSTS];
7930 44 : for (int i = 0; i < NUM_CSTS; i++)
7931 : {
7932 40 : cst_int[i] = build_int_cst (integer_type_node, i);
7933 40 : cst_sval[i] = mgr.get_or_create_constant_svalue (cst_int[i]);
7934 40 : ASSERT_EQ (cst_sval[i]->get_kind (), SK_CONSTANT);
7935 40 : ASSERT_EQ (cst_sval[i]->maybe_get_constant (), cst_int[i]);
7936 : }
7937 :
7938 44 : for (int i = 0; i < NUM_CSTS; i++)
7939 440 : for (int j = 0; j < NUM_CSTS; j++)
7940 : {
7941 400 : if (i != j)
7942 360 : ASSERT_NE (cst_sval[i], cst_sval[j]);
7943 400 : if (i + j < NUM_CSTS)
7944 : {
7945 220 : const svalue *sum
7946 220 : = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7947 : cst_sval[i], cst_sval[j]);
7948 220 : ASSERT_EQ (sum, cst_sval[i + j]);
7949 : }
7950 400 : if (i - j >= 0)
7951 : {
7952 220 : const svalue *difference
7953 220 : = mgr.get_or_create_binop (integer_type_node, MINUS_EXPR,
7954 : cst_sval[i], cst_sval[j]);
7955 220 : ASSERT_EQ (difference, cst_sval[i - j]);
7956 : }
7957 400 : if (i * j < NUM_CSTS)
7958 : {
7959 168 : const svalue *product
7960 168 : = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7961 : cst_sval[i], cst_sval[j]);
7962 168 : ASSERT_EQ (product, cst_sval[i * j]);
7963 : }
7964 400 : const svalue *eq = mgr.get_or_create_binop (integer_type_node, EQ_EXPR,
7965 : cst_sval[i], cst_sval[j]);
7966 400 : ASSERT_EQ (eq, i == j ? cst_sval[1] : cst_sval [0]);
7967 400 : const svalue *neq = mgr.get_or_create_binop (integer_type_node, NE_EXPR,
7968 : cst_sval[i], cst_sval[j]);
7969 400 : ASSERT_EQ (neq, i != j ? cst_sval[1] : cst_sval [0]);
7970 : // etc
7971 : }
7972 :
7973 4 : tree x = build_global_decl ("x", integer_type_node);
7974 :
7975 4 : test_region_model_context ctxt;
7976 4 : region_model model (&mgr);
7977 4 : const svalue *x_init = model.get_rvalue (x, &ctxt);
7978 :
7979 : /* PLUS_EXPR folding. */
7980 4 : const svalue *x_init_plus_zero
7981 4 : = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7982 : x_init, cst_sval[0]);
7983 4 : ASSERT_EQ (x_init_plus_zero, x_init);
7984 4 : const svalue *zero_plus_x_init
7985 4 : = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7986 : cst_sval[0], x_init);
7987 4 : ASSERT_EQ (zero_plus_x_init, x_init);
7988 :
7989 : /* MULT_EXPR folding. */
7990 4 : const svalue *x_init_times_zero
7991 4 : = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7992 : x_init, cst_sval[0]);
7993 4 : ASSERT_EQ (x_init_times_zero, cst_sval[0]);
7994 4 : const svalue *zero_times_x_init
7995 4 : = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7996 : cst_sval[0], x_init);
7997 4 : ASSERT_EQ (zero_times_x_init, cst_sval[0]);
7998 :
7999 4 : const svalue *x_init_times_one
8000 4 : = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
8001 : x_init, cst_sval[1]);
8002 4 : ASSERT_EQ (x_init_times_one, x_init);
8003 4 : const svalue *one_times_x_init
8004 4 : = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
8005 : cst_sval[1], x_init);
8006 4 : ASSERT_EQ (one_times_x_init, x_init);
8007 :
8008 : // etc
8009 : // TODO: do we want to use the match-and-simplify DSL for this?
8010 :
8011 : /* Verify that binops put any constants on the RHS. */
8012 4 : const svalue *four_times_x_init
8013 4 : = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
8014 : cst_sval[4], x_init);
8015 4 : const svalue *x_init_times_four
8016 4 : = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
8017 : x_init, cst_sval[4]);
8018 4 : ASSERT_EQ (four_times_x_init, x_init_times_four);
8019 4 : const binop_svalue *binop = four_times_x_init->dyn_cast_binop_svalue ();
8020 4 : ASSERT_EQ (binop->get_op (), MULT_EXPR);
8021 4 : ASSERT_EQ (binop->get_arg0 (), x_init);
8022 4 : ASSERT_EQ (binop->get_arg1 (), cst_sval[4]);
8023 :
8024 : /* Verify that ((x + 1) + 1) == (x + 2). */
8025 4 : const svalue *x_init_plus_one
8026 4 : = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
8027 : x_init, cst_sval[1]);
8028 4 : const svalue *x_init_plus_two
8029 4 : = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
8030 : x_init, cst_sval[2]);
8031 4 : const svalue *x_init_plus_one_plus_one
8032 4 : = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
8033 : x_init_plus_one, cst_sval[1]);
8034 4 : ASSERT_EQ (x_init_plus_one_plus_one, x_init_plus_two);
8035 :
8036 : /* Verify various binops on booleans. */
8037 4 : {
8038 4 : const svalue *sval_true = mgr.get_or_create_int_cst (boolean_type_node, 1);
8039 4 : const svalue *sval_false = mgr.get_or_create_int_cst (boolean_type_node, 0);
8040 4 : const svalue *sval_unknown
8041 4 : = mgr.get_or_create_unknown_svalue (boolean_type_node);
8042 4 : const placeholder_svalue sval_placeholder (mgr.alloc_symbol_id (),
8043 4 : boolean_type_node, "v");
8044 12 : for (auto op : {BIT_IOR_EXPR, TRUTH_OR_EXPR})
8045 : {
8046 8 : ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
8047 : sval_true, sval_unknown),
8048 : sval_true);
8049 8 : ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
8050 : sval_false, sval_unknown),
8051 : sval_unknown);
8052 8 : ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
8053 : sval_false, &sval_placeholder),
8054 : &sval_placeholder);
8055 : }
8056 12 : for (auto op : {BIT_AND_EXPR, TRUTH_AND_EXPR})
8057 : {
8058 8 : ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
8059 : sval_false, sval_unknown),
8060 : sval_false);
8061 8 : ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
8062 : sval_true, sval_unknown),
8063 : sval_unknown);
8064 8 : ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
8065 : sval_true, &sval_placeholder),
8066 : &sval_placeholder);
8067 : }
8068 4 : }
8069 4 : }
8070 :
8071 : /* Verify that sub_svalues are folded as expected. */
8072 :
8073 : static void
8074 4 : test_sub_svalue_folding ()
8075 : {
8076 4 : coord_test ct;
8077 4 : tree c = build_global_decl ("c", ct.m_coord_type);
8078 4 : tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
8079 : c, ct.m_x_field, NULL_TREE);
8080 :
8081 4 : region_model_manager mgr;
8082 4 : region_model model (&mgr);
8083 4 : test_region_model_context ctxt;
8084 4 : const region *c_x_reg = model.get_lvalue (c_x, &ctxt);
8085 :
8086 : /* Verify that sub_svalue of "unknown" simply
8087 : yields an unknown. */
8088 :
8089 4 : const svalue *unknown = mgr.get_or_create_unknown_svalue (ct.m_coord_type);
8090 4 : const svalue *sub = mgr.get_or_create_sub_svalue (TREE_TYPE (ct.m_x_field),
8091 : unknown, c_x_reg);
8092 4 : ASSERT_EQ (sub->get_kind (), SK_UNKNOWN);
8093 4 : ASSERT_EQ (sub->get_type (), TREE_TYPE (ct.m_x_field));
8094 4 : }
8095 :
8096 : /* Get BIT within VAL as a symbolic value within MGR. */
8097 :
8098 : static const svalue *
8099 256 : get_bit (region_model_manager *mgr,
8100 : bit_offset_t bit,
8101 : unsigned HOST_WIDE_INT val)
8102 : {
8103 256 : const svalue *inner_svalue
8104 256 : = mgr->get_or_create_int_cst (unsigned_type_node, val);
8105 256 : return mgr->get_or_create_bits_within (boolean_type_node,
8106 256 : bit_range (bit, 1),
8107 256 : inner_svalue);
8108 : }
8109 :
8110 : /* Verify that bits_within_svalues are folded as expected. */
8111 :
8112 : static void
8113 4 : test_bits_within_svalue_folding ()
8114 : {
8115 4 : region_model_manager mgr;
8116 :
8117 4 : const svalue *zero = mgr.get_or_create_int_cst (boolean_type_node, 0);
8118 4 : const svalue *one = mgr.get_or_create_int_cst (boolean_type_node, 1);
8119 :
8120 4 : {
8121 4 : const unsigned val = 0x0000;
8122 68 : for (unsigned bit = 0; bit < 16; bit++)
8123 64 : ASSERT_EQ (get_bit (&mgr, bit, val), zero);
8124 : }
8125 :
8126 4 : {
8127 4 : const unsigned val = 0x0001;
8128 4 : ASSERT_EQ (get_bit (&mgr, 0, val), one);
8129 64 : for (unsigned bit = 1; bit < 16; bit++)
8130 60 : ASSERT_EQ (get_bit (&mgr, bit, val), zero);
8131 : }
8132 :
8133 4 : {
8134 4 : const unsigned val = 0x8000;
8135 64 : for (unsigned bit = 0; bit < 15; bit++)
8136 60 : ASSERT_EQ (get_bit (&mgr, bit, val), zero);
8137 4 : ASSERT_EQ (get_bit (&mgr, 15, val), one);
8138 : }
8139 :
8140 4 : {
8141 4 : const unsigned val = 0xFFFF;
8142 68 : for (unsigned bit = 0; bit < 16; bit++)
8143 64 : ASSERT_EQ (get_bit (&mgr, bit, val), one);
8144 : }
8145 4 : }
8146 :
8147 : /* Test that region::descendent_of_p works as expected. */
8148 :
8149 : static void
8150 4 : test_descendent_of_p ()
8151 : {
8152 4 : region_model_manager mgr;
8153 4 : const region *stack = mgr.get_stack_region ();
8154 4 : const region *heap = mgr.get_heap_region ();
8155 4 : const region *code = mgr.get_code_region ();
8156 4 : const region *globals = mgr.get_globals_region ();
8157 :
8158 : /* descendent_of_p should return true when used on the region itself. */
8159 4 : ASSERT_TRUE (stack->descendent_of_p (stack));
8160 4 : ASSERT_FALSE (stack->descendent_of_p (heap));
8161 4 : ASSERT_FALSE (stack->descendent_of_p (code));
8162 4 : ASSERT_FALSE (stack->descendent_of_p (globals));
8163 :
8164 4 : tree x = build_global_decl ("x", integer_type_node);
8165 4 : const region *x_reg = mgr.get_region_for_global (x);
8166 4 : ASSERT_TRUE (x_reg->descendent_of_p (globals));
8167 :
8168 : /* A cast_region should be a descendent of the original region. */
8169 4 : const region *cast_reg = mgr.get_cast_region (x_reg, ptr_type_node);
8170 4 : ASSERT_TRUE (cast_reg->descendent_of_p (x_reg));
8171 4 : }
8172 :
8173 : /* Verify that bit_range_region works as expected. */
8174 :
8175 : static void
8176 4 : test_bit_range_regions ()
8177 : {
8178 4 : tree x = build_global_decl ("x", integer_type_node);
8179 4 : region_model_manager mgr;
8180 4 : const region *x_reg = mgr.get_region_for_global (x);
8181 4 : const region *byte0
8182 4 : = mgr.get_bit_range (x_reg, char_type_node, bit_range (0, 8));
8183 4 : const region *byte1
8184 4 : = mgr.get_bit_range (x_reg, char_type_node, bit_range (8, 8));
8185 4 : ASSERT_TRUE (byte0->descendent_of_p (x_reg));
8186 4 : ASSERT_TRUE (byte1->descendent_of_p (x_reg));
8187 4 : ASSERT_NE (byte0, byte1);
8188 4 : }
8189 :
8190 : /* Verify that simple assignments work as expected. */
8191 :
8192 : static void
8193 4 : test_assignment ()
8194 : {
8195 4 : tree int_0 = integer_zero_node;
8196 4 : tree x = build_global_decl ("x", integer_type_node);
8197 4 : tree y = build_global_decl ("y", integer_type_node);
8198 :
8199 : /* "x == 0", then use of y, then "y = 0;". */
8200 4 : region_model_manager mgr;
8201 4 : region_model model (&mgr);
8202 4 : ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
8203 4 : ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
8204 4 : model.set_value (model.get_lvalue (y, nullptr),
8205 : model.get_rvalue (int_0, nullptr),
8206 : nullptr);
8207 4 : ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
8208 4 : ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
8209 4 : }
8210 :
8211 : /* Verify that compound assignments work as expected. */
8212 :
8213 : static void
8214 4 : test_compound_assignment ()
8215 : {
8216 4 : coord_test ct;
8217 :
8218 4 : tree c = build_global_decl ("c", ct.m_coord_type);
8219 4 : tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
8220 : c, ct.m_x_field, NULL_TREE);
8221 4 : tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
8222 : c, ct.m_y_field, NULL_TREE);
8223 4 : tree d = build_global_decl ("d", ct.m_coord_type);
8224 4 : tree d_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
8225 : d, ct.m_x_field, NULL_TREE);
8226 4 : tree d_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
8227 : d, ct.m_y_field, NULL_TREE);
8228 :
8229 4 : tree int_17 = build_int_cst (integer_type_node, 17);
8230 4 : tree int_m3 = build_int_cst (integer_type_node, -3);
8231 :
8232 4 : region_model_manager mgr;
8233 4 : region_model model (&mgr);
8234 4 : model.set_value (c_x, int_17, nullptr);
8235 4 : model.set_value (c_y, int_m3, nullptr);
8236 :
8237 : /* Copy c to d. */
8238 4 : const svalue *sval = model.get_rvalue (c, nullptr);
8239 4 : model.set_value (model.get_lvalue (d, nullptr), sval, nullptr);
8240 :
8241 : /* Check that the fields have the same svalues. */
8242 4 : ASSERT_EQ (model.get_rvalue (c_x, nullptr), model.get_rvalue (d_x, nullptr));
8243 4 : ASSERT_EQ (model.get_rvalue (c_y, nullptr), model.get_rvalue (d_y, nullptr));
8244 4 : }
8245 :
8246 : /* Verify the details of pushing and popping stack frames. */
8247 :
8248 : static void
8249 4 : test_stack_frames ()
8250 : {
8251 4 : tree int_42 = build_int_cst (integer_type_node, 42);
8252 4 : tree int_10 = build_int_cst (integer_type_node, 10);
8253 4 : tree int_5 = build_int_cst (integer_type_node, 5);
8254 4 : tree int_0 = integer_zero_node;
8255 :
8256 4 : auto_vec <tree> param_types;
8257 4 : tree parent_fndecl = make_fndecl (integer_type_node,
8258 : "parent_fn",
8259 : param_types);
8260 4 : allocate_struct_function (parent_fndecl, true);
8261 :
8262 4 : tree child_fndecl = make_fndecl (integer_type_node,
8263 : "child_fn",
8264 : param_types);
8265 4 : allocate_struct_function (child_fndecl, true);
8266 :
8267 : /* "a" and "b" in the parent frame. */
8268 4 : tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
8269 : get_identifier ("a"),
8270 : integer_type_node);
8271 4 : DECL_CONTEXT (a) = parent_fndecl;
8272 4 : tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
8273 : get_identifier ("b"),
8274 : integer_type_node);
8275 4 : DECL_CONTEXT (b) = parent_fndecl;
8276 : /* "x" and "y" in a child frame. */
8277 4 : tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
8278 : get_identifier ("x"),
8279 : integer_type_node);
8280 4 : DECL_CONTEXT (x) = child_fndecl;
8281 4 : tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
8282 : get_identifier ("y"),
8283 : integer_type_node);
8284 4 : DECL_CONTEXT (y) = child_fndecl;
8285 :
8286 : /* "p" global. */
8287 4 : tree p = build_global_decl ("p", ptr_type_node);
8288 :
8289 : /* "q" global. */
8290 4 : tree q = build_global_decl ("q", ptr_type_node);
8291 :
8292 4 : region_model_manager mgr;
8293 4 : test_region_model_context ctxt;
8294 4 : region_model model (&mgr);
8295 :
8296 : /* Push stack frame for "parent_fn". */
8297 4 : const region *parent_frame_reg
8298 4 : = model.push_frame (*DECL_STRUCT_FUNCTION (parent_fndecl),
8299 : nullptr, nullptr, &ctxt);
8300 4 : ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
8301 4 : ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
8302 4 : const region *a_in_parent_reg = model.get_lvalue (a, &ctxt);
8303 4 : model.set_value (a_in_parent_reg,
8304 : model.get_rvalue (int_42, &ctxt),
8305 : &ctxt);
8306 4 : ASSERT_EQ (a_in_parent_reg->maybe_get_frame_region (), parent_frame_reg);
8307 :
8308 4 : model.add_constraint (b, LT_EXPR, int_10, &ctxt);
8309 4 : ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
8310 : tristate (tristate::TS_TRUE));
8311 :
8312 : /* Push stack frame for "child_fn". */
8313 4 : const region *child_frame_reg
8314 4 : = model.push_frame (*DECL_STRUCT_FUNCTION (child_fndecl),
8315 : nullptr, nullptr, &ctxt);
8316 4 : ASSERT_EQ (model.get_current_frame (), child_frame_reg);
8317 4 : ASSERT_TRUE (model.region_exists_p (child_frame_reg));
8318 4 : const region *x_in_child_reg = model.get_lvalue (x, &ctxt);
8319 4 : model.set_value (x_in_child_reg,
8320 : model.get_rvalue (int_0, &ctxt),
8321 : &ctxt);
8322 4 : ASSERT_EQ (x_in_child_reg->maybe_get_frame_region (), child_frame_reg);
8323 :
8324 4 : model.add_constraint (y, NE_EXPR, int_5, &ctxt);
8325 4 : ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
8326 : tristate (tristate::TS_TRUE));
8327 :
8328 : /* Point a global pointer at a local in the child frame: p = &x. */
8329 4 : const region *p_in_globals_reg = model.get_lvalue (p, &ctxt);
8330 4 : model.set_value (p_in_globals_reg,
8331 : mgr.get_ptr_svalue (ptr_type_node, x_in_child_reg),
8332 : &ctxt);
8333 4 : ASSERT_EQ (p_in_globals_reg->maybe_get_frame_region (), nullptr);
8334 :
8335 : /* Point another global pointer at p: q = &p. */
8336 4 : const region *q_in_globals_reg = model.get_lvalue (q, &ctxt);
8337 4 : model.set_value (q_in_globals_reg,
8338 : mgr.get_ptr_svalue (ptr_type_node, p_in_globals_reg),
8339 : &ctxt);
8340 :
8341 : /* Test region::descendent_of_p. */
8342 4 : ASSERT_TRUE (child_frame_reg->descendent_of_p (child_frame_reg));
8343 4 : ASSERT_TRUE (x_in_child_reg->descendent_of_p (child_frame_reg));
8344 4 : ASSERT_FALSE (a_in_parent_reg->descendent_of_p (child_frame_reg));
8345 :
8346 : /* Pop the "child_fn" frame from the stack. */
8347 4 : model.pop_frame (nullptr, nullptr, &ctxt, nullptr);
8348 4 : ASSERT_FALSE (model.region_exists_p (child_frame_reg));
8349 4 : ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
8350 :
8351 : /* Verify that p (which was pointing at the local "x" in the popped
8352 : frame) has been poisoned. */
8353 4 : const svalue *new_p_sval = model.get_rvalue (p, nullptr);
8354 4 : ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
8355 4 : ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
8356 : poison_kind::popped_stack);
8357 :
8358 : /* Verify that q still points to p, in spite of the region
8359 : renumbering. */
8360 4 : const svalue *new_q_sval = model.get_rvalue (q, &ctxt);
8361 4 : ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
8362 4 : ASSERT_EQ (new_q_sval->maybe_get_region (),
8363 : model.get_lvalue (p, &ctxt));
8364 :
8365 : /* Verify that top of stack has been updated. */
8366 4 : ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
8367 :
8368 : /* Verify locals in parent frame. */
8369 : /* Verify "a" still has its value. */
8370 4 : const svalue *new_a_sval = model.get_rvalue (a, &ctxt);
8371 4 : ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
8372 4 : ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
8373 : int_42);
8374 : /* Verify "b" still has its constraint. */
8375 4 : ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
8376 : tristate (tristate::TS_TRUE));
8377 4 : }
8378 :
8379 : /* Verify that get_representative_path_var works as expected, that
8380 : we can map from regions to parms and back within a recursive call
8381 : stack. */
8382 :
8383 : static void
8384 4 : test_get_representative_path_var ()
8385 : {
8386 4 : auto_vec <tree> param_types;
8387 4 : tree fndecl = make_fndecl (integer_type_node,
8388 : "factorial",
8389 : param_types);
8390 4 : allocate_struct_function (fndecl, true);
8391 :
8392 : /* Parm "n". */
8393 4 : tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
8394 : get_identifier ("n"),
8395 : integer_type_node);
8396 4 : DECL_CONTEXT (n) = fndecl;
8397 :
8398 4 : region_model_manager mgr;
8399 4 : test_region_model_context ctxt;
8400 4 : region_model model (&mgr);
8401 :
8402 : /* Push 5 stack frames for "factorial", each with a param */
8403 4 : auto_vec<const region *> parm_regs;
8404 4 : auto_vec<const svalue *> parm_svals;
8405 24 : for (int depth = 0; depth < 5; depth++)
8406 : {
8407 20 : const region *frame_n_reg
8408 20 : = model.push_frame (*DECL_STRUCT_FUNCTION (fndecl),
8409 : nullptr, nullptr, &ctxt);
8410 20 : const region *parm_n_reg = model.get_lvalue (path_var (n, depth), &ctxt);
8411 20 : parm_regs.safe_push (parm_n_reg);
8412 :
8413 20 : ASSERT_EQ (parm_n_reg->get_parent_region (), frame_n_reg);
8414 20 : const svalue *sval_n = mgr.get_or_create_initial_value (parm_n_reg);
8415 20 : parm_svals.safe_push (sval_n);
8416 : }
8417 :
8418 : /* Verify that we can recognize that the regions are the parms,
8419 : at every depth. */
8420 24 : for (int depth = 0; depth < 5; depth++)
8421 : {
8422 20 : {
8423 20 : svalue_set visited;
8424 40 : ASSERT_EQ (model.get_representative_path_var (parm_regs[depth],
8425 : &visited,
8426 : nullptr),
8427 : path_var (n, depth + 1));
8428 20 : }
8429 : /* ...and that we can lookup lvalues for locals for all frames,
8430 : not just the top. */
8431 20 : ASSERT_EQ (model.get_lvalue (path_var (n, depth), nullptr),
8432 : parm_regs[depth]);
8433 : /* ...and that we can locate the svalues. */
8434 20 : {
8435 20 : svalue_set visited;
8436 40 : ASSERT_EQ (model.get_representative_path_var (parm_svals[depth],
8437 : &visited,
8438 : nullptr),
8439 : path_var (n, depth + 1));
8440 20 : }
8441 : }
8442 4 : }
8443 :
8444 : /* Ensure that region_model::operator== works as expected. */
8445 :
8446 : static void
8447 4 : test_equality_1 ()
8448 : {
8449 4 : tree int_42 = build_int_cst (integer_type_node, 42);
8450 4 : tree int_17 = build_int_cst (integer_type_node, 17);
8451 :
8452 : /* Verify that "empty" region_model instances are equal to each other. */
8453 4 : region_model_manager mgr;
8454 4 : region_model model0 (&mgr);
8455 4 : region_model model1 (&mgr);
8456 4 : ASSERT_EQ (model0, model1);
8457 :
8458 : /* Verify that setting state in model1 makes the models non-equal. */
8459 4 : tree x = build_global_decl ("x", integer_type_node);
8460 4 : model0.set_value (x, int_42, nullptr);
8461 4 : ASSERT_EQ (model0.get_rvalue (x, nullptr)->maybe_get_constant (), int_42);
8462 4 : ASSERT_NE (model0, model1);
8463 :
8464 : /* Verify the copy-ctor. */
8465 4 : region_model model2 (model0);
8466 4 : ASSERT_EQ (model0, model2);
8467 4 : ASSERT_EQ (model2.get_rvalue (x, nullptr)->maybe_get_constant (), int_42);
8468 4 : ASSERT_NE (model1, model2);
8469 :
8470 : /* Verify that models obtained from copy-ctor are independently editable
8471 : w/o affecting the original model. */
8472 4 : model2.set_value (x, int_17, nullptr);
8473 4 : ASSERT_NE (model0, model2);
8474 4 : ASSERT_EQ (model2.get_rvalue (x, nullptr)->maybe_get_constant (), int_17);
8475 4 : ASSERT_EQ (model0.get_rvalue (x, nullptr)->maybe_get_constant (), int_42);
8476 4 : }
8477 :
8478 : /* Verify that region models for
8479 : x = 42; y = 113;
8480 : and
8481 : y = 113; x = 42;
8482 : are equal. */
8483 :
8484 : static void
8485 4 : test_canonicalization_2 ()
8486 : {
8487 4 : tree int_42 = build_int_cst (integer_type_node, 42);
8488 4 : tree int_113 = build_int_cst (integer_type_node, 113);
8489 4 : tree x = build_global_decl ("x", integer_type_node);
8490 4 : tree y = build_global_decl ("y", integer_type_node);
8491 :
8492 4 : region_model_manager mgr;
8493 4 : region_model model0 (&mgr);
8494 4 : model0.set_value (model0.get_lvalue (x, nullptr),
8495 : model0.get_rvalue (int_42, nullptr),
8496 : nullptr);
8497 4 : model0.set_value (model0.get_lvalue (y, nullptr),
8498 : model0.get_rvalue (int_113, nullptr),
8499 : nullptr);
8500 :
8501 4 : region_model model1 (&mgr);
8502 4 : model1.set_value (model1.get_lvalue (y, nullptr),
8503 : model1.get_rvalue (int_113, nullptr),
8504 : nullptr);
8505 4 : model1.set_value (model1.get_lvalue (x, nullptr),
8506 : model1.get_rvalue (int_42, nullptr),
8507 : nullptr);
8508 :
8509 4 : ASSERT_EQ (model0, model1);
8510 4 : }
8511 :
8512 : /* Verify that constraints for
8513 : x > 3 && y > 42
8514 : and
8515 : y > 42 && x > 3
8516 : are equal after canonicalization. */
8517 :
8518 : static void
8519 4 : test_canonicalization_3 ()
8520 : {
8521 4 : tree int_3 = build_int_cst (integer_type_node, 3);
8522 4 : tree int_42 = build_int_cst (integer_type_node, 42);
8523 4 : tree x = build_global_decl ("x", integer_type_node);
8524 4 : tree y = build_global_decl ("y", integer_type_node);
8525 :
8526 4 : region_model_manager mgr;
8527 4 : region_model model0 (&mgr);
8528 4 : model0.add_constraint (x, GT_EXPR, int_3, nullptr);
8529 4 : model0.add_constraint (y, GT_EXPR, int_42, nullptr);
8530 :
8531 4 : region_model model1 (&mgr);
8532 4 : model1.add_constraint (y, GT_EXPR, int_42, nullptr);
8533 4 : model1.add_constraint (x, GT_EXPR, int_3, nullptr);
8534 :
8535 4 : model0.canonicalize ();
8536 4 : model1.canonicalize ();
8537 4 : ASSERT_EQ (model0, model1);
8538 4 : }
8539 :
8540 : /* Verify that we can canonicalize a model containing NaN and other real
8541 : constants. */
8542 :
8543 : static void
8544 4 : test_canonicalization_4 ()
8545 : {
8546 4 : auto_vec<tree> csts;
8547 4 : append_interesting_constants (&csts);
8548 :
8549 4 : region_model_manager mgr;
8550 4 : region_model model (&mgr);
8551 :
8552 60 : for (tree cst : csts)
8553 48 : model.get_rvalue (cst, nullptr);
8554 :
8555 4 : model.canonicalize ();
8556 4 : }
8557 :
8558 : /* Assert that if we have two region_model instances
8559 : with values VAL_A and VAL_B for EXPR that they are
8560 : mergable. Write the merged model to *OUT_MERGED_MODEL,
8561 : and the merged svalue ptr to *OUT_MERGED_SVALUE.
8562 : If VAL_A or VAL_B are nullptr_TREE, don't populate EXPR
8563 : for that region_model. */
8564 :
8565 : static void
8566 20 : assert_region_models_merge (tree expr, tree val_a, tree val_b,
8567 : region_model *out_merged_model,
8568 : const svalue **out_merged_svalue)
8569 : {
8570 20 : region_model_manager *mgr = out_merged_model->get_manager ();
8571 20 : program_point point (program_point::origin (*mgr));
8572 20 : test_region_model_context ctxt;
8573 20 : region_model model0 (mgr);
8574 20 : region_model model1 (mgr);
8575 20 : if (val_a)
8576 16 : model0.set_value (model0.get_lvalue (expr, &ctxt),
8577 : model0.get_rvalue (val_a, &ctxt),
8578 : &ctxt);
8579 20 : if (val_b)
8580 16 : model1.set_value (model1.get_lvalue (expr, &ctxt),
8581 : model1.get_rvalue (val_b, &ctxt),
8582 : &ctxt);
8583 :
8584 : /* They should be mergeable. */
8585 20 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, out_merged_model));
8586 20 : *out_merged_svalue = out_merged_model->get_rvalue (expr, &ctxt);
8587 20 : }
8588 :
8589 : /* Verify that we can merge region_model instances. */
8590 :
8591 : static void
8592 4 : test_state_merging ()
8593 : {
8594 4 : tree int_42 = build_int_cst (integer_type_node, 42);
8595 4 : tree int_113 = build_int_cst (integer_type_node, 113);
8596 4 : tree x = build_global_decl ("x", integer_type_node);
8597 4 : tree y = build_global_decl ("y", integer_type_node);
8598 4 : tree z = build_global_decl ("z", integer_type_node);
8599 4 : tree p = build_global_decl ("p", ptr_type_node);
8600 :
8601 4 : tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
8602 4 : tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
8603 :
8604 4 : auto_vec <tree> param_types;
8605 4 : tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
8606 4 : allocate_struct_function (test_fndecl, true);
8607 :
8608 : /* Param "a". */
8609 4 : tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
8610 : get_identifier ("a"),
8611 : integer_type_node);
8612 4 : DECL_CONTEXT (a) = test_fndecl;
8613 4 : tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
8614 :
8615 : /* Param "q", a pointer. */
8616 4 : tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL,
8617 : get_identifier ("q"),
8618 : ptr_type_node);
8619 4 : DECL_CONTEXT (q) = test_fndecl;
8620 :
8621 4 : region_model_manager mgr;
8622 4 : program_point point (program_point::origin (mgr));
8623 :
8624 4 : {
8625 4 : region_model model0 (&mgr);
8626 4 : region_model model1 (&mgr);
8627 4 : region_model merged (&mgr);
8628 : /* Verify empty models can be merged. */
8629 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
8630 4 : ASSERT_EQ (model0, merged);
8631 4 : }
8632 :
8633 : /* Verify that we can merge two contradictory constraints on the
8634 : value for a global. */
8635 : /* TODO: verify that the merged model doesn't have a value for
8636 : the global */
8637 4 : {
8638 4 : region_model model0 (&mgr);
8639 4 : region_model model1 (&mgr);
8640 4 : region_model merged (&mgr);
8641 4 : test_region_model_context ctxt;
8642 4 : model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
8643 4 : model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
8644 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
8645 4 : ASSERT_NE (model0, merged);
8646 4 : ASSERT_NE (model1, merged);
8647 4 : }
8648 :
8649 : /* Verify handling of a PARM_DECL. */
8650 4 : {
8651 4 : test_region_model_context ctxt;
8652 4 : region_model model0 (&mgr);
8653 4 : region_model model1 (&mgr);
8654 4 : ASSERT_EQ (model0.get_stack_depth (), 0);
8655 4 : model0.push_frame (*DECL_STRUCT_FUNCTION (test_fndecl),
8656 : nullptr, nullptr, &ctxt);
8657 4 : ASSERT_EQ (model0.get_stack_depth (), 1);
8658 4 : model1.push_frame (*DECL_STRUCT_FUNCTION (test_fndecl),
8659 : nullptr, nullptr, &ctxt);
8660 :
8661 4 : placeholder_svalue test_sval (mgr.alloc_symbol_id (),
8662 4 : integer_type_node, "test sval");
8663 4 : model0.set_value (model0.get_lvalue (a, &ctxt), &test_sval, &ctxt);
8664 4 : model1.set_value (model1.get_lvalue (a, &ctxt), &test_sval, &ctxt);
8665 4 : ASSERT_EQ (model0, model1);
8666 :
8667 : /* They should be mergeable, and the result should be the same. */
8668 4 : region_model merged (&mgr);
8669 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
8670 4 : ASSERT_EQ (model0, merged);
8671 : /* In particular, "a" should have the placeholder value. */
8672 4 : ASSERT_EQ (merged.get_rvalue (a, &ctxt), &test_sval);
8673 4 : }
8674 :
8675 : /* Verify handling of a global. */
8676 4 : {
8677 4 : test_region_model_context ctxt;
8678 4 : region_model model0 (&mgr);
8679 4 : region_model model1 (&mgr);
8680 :
8681 4 : placeholder_svalue test_sval (mgr.alloc_symbol_id (),
8682 4 : integer_type_node, "test sval");
8683 4 : model0.set_value (model0.get_lvalue (x, &ctxt), &test_sval, &ctxt);
8684 4 : model1.set_value (model1.get_lvalue (x, &ctxt), &test_sval, &ctxt);
8685 4 : ASSERT_EQ (model0, model1);
8686 :
8687 : /* They should be mergeable, and the result should be the same. */
8688 4 : region_model merged (&mgr);
8689 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
8690 4 : ASSERT_EQ (model0, merged);
8691 : /* In particular, "x" should have the placeholder value. */
8692 4 : ASSERT_EQ (merged.get_rvalue (x, &ctxt), &test_sval);
8693 4 : }
8694 :
8695 : /* Use global-handling to verify various combinations of values. */
8696 :
8697 : /* Two equal constant values. */
8698 4 : {
8699 4 : region_model merged (&mgr);
8700 4 : const svalue *merged_x_sval;
8701 4 : assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
8702 :
8703 : /* In particular, there should be a constant value for "x". */
8704 4 : ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
8705 4 : ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
8706 : int_42);
8707 4 : }
8708 :
8709 : /* Two non-equal constant values. */
8710 4 : {
8711 4 : region_model merged (&mgr);
8712 4 : const svalue *merged_x_sval;
8713 4 : assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
8714 :
8715 : /* In particular, there should be a "widening" value for "x". */
8716 4 : ASSERT_EQ (merged_x_sval->get_kind (), SK_WIDENING);
8717 4 : }
8718 :
8719 : /* Initial and constant. */
8720 4 : {
8721 4 : region_model merged (&mgr);
8722 4 : const svalue *merged_x_sval;
8723 4 : assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
8724 :
8725 : /* In particular, there should be an unknown value for "x". */
8726 4 : ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
8727 4 : }
8728 :
8729 : /* Constant and initial. */
8730 4 : {
8731 4 : region_model merged (&mgr);
8732 4 : const svalue *merged_x_sval;
8733 4 : assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
8734 :
8735 : /* In particular, there should be an unknown value for "x". */
8736 4 : ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
8737 4 : }
8738 :
8739 : /* Unknown and constant. */
8740 : // TODO
8741 :
8742 : /* Pointers: NULL and NULL. */
8743 : // TODO
8744 :
8745 : /* Pointers: NULL and non-NULL. */
8746 : // TODO
8747 :
8748 : /* Pointers: non-NULL and non-NULL: ptr to a local. */
8749 4 : {
8750 4 : region_model model0 (&mgr);
8751 4 : model0.push_frame (*DECL_STRUCT_FUNCTION (test_fndecl),
8752 : nullptr, nullptr, nullptr);
8753 4 : model0.set_value (model0.get_lvalue (p, nullptr),
8754 : model0.get_rvalue (addr_of_a, nullptr), nullptr);
8755 :
8756 4 : region_model model1 (model0);
8757 4 : ASSERT_EQ (model0, model1);
8758 :
8759 : /* They should be mergeable, and the result should be the same. */
8760 4 : region_model merged (&mgr);
8761 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
8762 4 : ASSERT_EQ (model0, merged);
8763 4 : }
8764 :
8765 : /* Pointers: non-NULL and non-NULL: ptr to a global. */
8766 4 : {
8767 4 : region_model merged (&mgr);
8768 : /* p == &y in both input models. */
8769 4 : const svalue *merged_p_sval;
8770 4 : assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
8771 : &merged_p_sval);
8772 :
8773 : /* We should get p == &y in the merged model. */
8774 4 : ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
8775 4 : const region_svalue *merged_p_ptr
8776 4 : = merged_p_sval->dyn_cast_region_svalue ();
8777 4 : const region *merged_p_star_reg = merged_p_ptr->get_pointee ();
8778 4 : ASSERT_EQ (merged_p_star_reg, merged.get_lvalue (y, nullptr));
8779 4 : }
8780 :
8781 : /* Pointers: non-NULL ptrs to different globals should not merge;
8782 : see e.g. gcc.dg/analyzer/torture/uninit-pr108725.c */
8783 4 : {
8784 4 : region_model merged_model (&mgr);
8785 4 : program_point point (program_point::origin (mgr));
8786 4 : test_region_model_context ctxt;
8787 : /* x == &y vs x == &z in the input models; these are actually casts
8788 : of the ptrs to "int". */
8789 4 : region_model model0 (&mgr);
8790 4 : region_model model1 (&mgr);
8791 4 : model0.set_value (model0.get_lvalue (x, &ctxt),
8792 : model0.get_rvalue (addr_of_y, &ctxt),
8793 : &ctxt);
8794 4 : model1.set_value (model1.get_lvalue (x, &ctxt),
8795 : model1.get_rvalue (addr_of_z, &ctxt),
8796 : &ctxt);
8797 : /* They should not be mergeable. */
8798 4 : ASSERT_FALSE (model0.can_merge_with_p (model1, point, &merged_model));
8799 4 : }
8800 :
8801 : /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
8802 4 : {
8803 4 : test_region_model_context ctxt;
8804 4 : region_model model0 (&mgr);
8805 4 : tree size = build_int_cst (size_type_node, 1024);
8806 4 : const svalue *size_sval = mgr.get_or_create_constant_svalue (size);
8807 4 : const region *new_reg
8808 4 : = model0.get_or_create_region_for_heap_alloc (size_sval, &ctxt);
8809 4 : const svalue *ptr_sval = mgr.get_ptr_svalue (ptr_type_node, new_reg);
8810 4 : model0.set_value (model0.get_lvalue (p, &ctxt),
8811 : ptr_sval, &ctxt);
8812 :
8813 4 : region_model model1 (model0);
8814 :
8815 4 : ASSERT_EQ (model0, model1);
8816 :
8817 4 : region_model merged (&mgr);
8818 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
8819 :
8820 : /* The merged model ought to be identical. */
8821 4 : ASSERT_EQ (model0, merged);
8822 4 : }
8823 :
8824 : /* Two regions sharing the same placeholder svalue should continue sharing
8825 : it after self-merger. */
8826 4 : {
8827 4 : test_region_model_context ctxt;
8828 4 : region_model model0 (&mgr);
8829 4 : placeholder_svalue placeholder_sval (mgr.alloc_symbol_id (),
8830 4 : integer_type_node, "test");
8831 4 : model0.set_value (model0.get_lvalue (x, &ctxt),
8832 : &placeholder_sval, &ctxt);
8833 4 : model0.set_value (model0.get_lvalue (y, &ctxt), &placeholder_sval, &ctxt);
8834 4 : region_model model1 (model0);
8835 :
8836 : /* They should be mergeable, and the result should be the same. */
8837 4 : region_model merged (&mgr);
8838 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
8839 4 : ASSERT_EQ (model0, merged);
8840 :
8841 : /* In particular, we should have x == y. */
8842 4 : ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
8843 : tristate (tristate::TS_TRUE));
8844 4 : }
8845 :
8846 4 : {
8847 4 : region_model model0 (&mgr);
8848 4 : region_model model1 (&mgr);
8849 4 : test_region_model_context ctxt;
8850 4 : model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
8851 4 : model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
8852 4 : region_model merged (&mgr);
8853 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
8854 4 : }
8855 :
8856 4 : {
8857 4 : region_model model0 (&mgr);
8858 4 : region_model model1 (&mgr);
8859 4 : test_region_model_context ctxt;
8860 4 : model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
8861 4 : model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
8862 4 : model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
8863 4 : region_model merged (&mgr);
8864 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
8865 4 : }
8866 :
8867 : // TODO: what can't we merge? need at least one such test
8868 :
8869 : /* TODO: various things
8870 : - heap regions
8871 : - value merging:
8872 : - every combination, but in particular
8873 : - pairs of regions
8874 : */
8875 :
8876 : /* Views. */
8877 4 : {
8878 4 : test_region_model_context ctxt;
8879 4 : region_model model0 (&mgr);
8880 :
8881 4 : const region *x_reg = model0.get_lvalue (x, &ctxt);
8882 4 : const region *x_as_ptr = mgr.get_cast_region (x_reg, ptr_type_node);
8883 4 : model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
8884 :
8885 4 : region_model model1 (model0);
8886 4 : ASSERT_EQ (model1, model0);
8887 :
8888 : /* They should be mergeable, and the result should be the same. */
8889 4 : region_model merged (&mgr);
8890 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
8891 4 : }
8892 :
8893 : /* Verify that we can merge a model in which a local in an older stack
8894 : frame points to a local in a more recent stack frame. */
8895 4 : {
8896 4 : region_model model0 (&mgr);
8897 4 : model0.push_frame (*DECL_STRUCT_FUNCTION (test_fndecl),
8898 : nullptr, nullptr, nullptr);
8899 4 : const region *q_in_first_frame = model0.get_lvalue (q, nullptr);
8900 :
8901 : /* Push a second frame. */
8902 4 : const region *reg_2nd_frame
8903 4 : = model0.push_frame (*DECL_STRUCT_FUNCTION (test_fndecl),
8904 : nullptr, nullptr, nullptr);
8905 :
8906 : /* Have a pointer in the older frame point to a local in the
8907 : more recent frame. */
8908 4 : const svalue *sval_ptr = model0.get_rvalue (addr_of_a, nullptr);
8909 4 : model0.set_value (q_in_first_frame, sval_ptr, nullptr);
8910 :
8911 : /* Verify that it's pointing at the newer frame. */
8912 4 : const region *reg_pointee = sval_ptr->maybe_get_region ();
8913 4 : ASSERT_EQ (reg_pointee->get_parent_region (), reg_2nd_frame);
8914 :
8915 4 : model0.canonicalize ();
8916 :
8917 4 : region_model model1 (model0);
8918 4 : ASSERT_EQ (model0, model1);
8919 :
8920 : /* They should be mergeable, and the result should be the same
8921 : (after canonicalization, at least). */
8922 4 : region_model merged (&mgr);
8923 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
8924 4 : merged.canonicalize ();
8925 4 : ASSERT_EQ (model0, merged);
8926 4 : }
8927 :
8928 : /* Verify that we can merge a model in which a local points to a global. */
8929 4 : {
8930 4 : region_model model0 (&mgr);
8931 4 : model0.push_frame (*DECL_STRUCT_FUNCTION (test_fndecl),
8932 : nullptr, nullptr, nullptr);
8933 4 : model0.set_value (model0.get_lvalue (q, nullptr),
8934 : model0.get_rvalue (addr_of_y, nullptr), nullptr);
8935 :
8936 4 : region_model model1 (model0);
8937 4 : ASSERT_EQ (model0, model1);
8938 :
8939 : /* They should be mergeable, and the result should be the same
8940 : (after canonicalization, at least). */
8941 4 : region_model merged (&mgr);
8942 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
8943 4 : ASSERT_EQ (model0, merged);
8944 4 : }
8945 4 : }
8946 :
8947 : /* Verify that constraints are correctly merged when merging region_model
8948 : instances. */
8949 :
8950 : static void
8951 4 : test_constraint_merging ()
8952 : {
8953 4 : tree int_0 = integer_zero_node;
8954 4 : tree int_5 = build_int_cst (integer_type_node, 5);
8955 4 : tree x = build_global_decl ("x", integer_type_node);
8956 4 : tree y = build_global_decl ("y", integer_type_node);
8957 4 : tree z = build_global_decl ("z", integer_type_node);
8958 4 : tree n = build_global_decl ("n", integer_type_node);
8959 :
8960 4 : region_model_manager mgr;
8961 4 : test_region_model_context ctxt;
8962 :
8963 : /* model0: 0 <= (x == y) < n. */
8964 4 : region_model model0 (&mgr);
8965 4 : model0.add_constraint (x, EQ_EXPR, y, &ctxt);
8966 4 : model0.add_constraint (x, GE_EXPR, int_0, nullptr);
8967 4 : model0.add_constraint (x, LT_EXPR, n, nullptr);
8968 :
8969 : /* model1: z != 5 && (0 <= x < n). */
8970 4 : region_model model1 (&mgr);
8971 4 : model1.add_constraint (z, NE_EXPR, int_5, nullptr);
8972 4 : model1.add_constraint (x, GE_EXPR, int_0, nullptr);
8973 4 : model1.add_constraint (x, LT_EXPR, n, nullptr);
8974 :
8975 : /* They should be mergeable; the merged constraints should
8976 : be: (0 <= x < n). */
8977 4 : program_point point (program_point::origin (mgr));
8978 4 : region_model merged (&mgr);
8979 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
8980 :
8981 4 : ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
8982 : tristate (tristate::TS_TRUE));
8983 4 : ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
8984 : tristate (tristate::TS_TRUE));
8985 :
8986 4 : ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
8987 : tristate (tristate::TS_UNKNOWN));
8988 4 : ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
8989 : tristate (tristate::TS_UNKNOWN));
8990 4 : }
8991 :
8992 : /* Verify that widening_svalue::eval_condition_without_cm works as
8993 : expected. */
8994 :
8995 : static void
8996 4 : test_widening_constraints ()
8997 : {
8998 4 : region_model_manager mgr;
8999 4 : const supernode *snode = nullptr;
9000 4 : tree int_0 = integer_zero_node;
9001 4 : tree int_m1 = build_int_cst (integer_type_node, -1);
9002 4 : tree int_1 = integer_one_node;
9003 4 : tree int_256 = build_int_cst (integer_type_node, 256);
9004 4 : test_region_model_context ctxt;
9005 4 : const svalue *int_0_sval = mgr.get_or_create_constant_svalue (int_0);
9006 4 : const svalue *int_1_sval = mgr.get_or_create_constant_svalue (int_1);
9007 4 : const svalue *w_zero_then_one_sval
9008 4 : = mgr.get_or_create_widening_svalue (integer_type_node, snode,
9009 : int_0_sval, int_1_sval);
9010 4 : const widening_svalue *w_zero_then_one
9011 4 : = w_zero_then_one_sval->dyn_cast_widening_svalue ();
9012 4 : ASSERT_EQ (w_zero_then_one->get_direction (),
9013 : widening_svalue::DIR_ASCENDING);
9014 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_m1),
9015 : tristate::TS_FALSE);
9016 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_0),
9017 : tristate::TS_FALSE);
9018 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_1),
9019 : tristate::TS_UNKNOWN);
9020 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_256),
9021 : tristate::TS_UNKNOWN);
9022 :
9023 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_m1),
9024 : tristate::TS_FALSE);
9025 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_0),
9026 : tristate::TS_UNKNOWN);
9027 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_1),
9028 : tristate::TS_UNKNOWN);
9029 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_256),
9030 : tristate::TS_UNKNOWN);
9031 :
9032 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_m1),
9033 : tristate::TS_TRUE);
9034 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_0),
9035 : tristate::TS_UNKNOWN);
9036 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_1),
9037 : tristate::TS_UNKNOWN);
9038 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_256),
9039 : tristate::TS_UNKNOWN);
9040 :
9041 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_m1),
9042 : tristate::TS_TRUE);
9043 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_0),
9044 : tristate::TS_TRUE);
9045 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_1),
9046 : tristate::TS_UNKNOWN);
9047 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_256),
9048 : tristate::TS_UNKNOWN);
9049 :
9050 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_m1),
9051 : tristate::TS_FALSE);
9052 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_0),
9053 : tristate::TS_UNKNOWN);
9054 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_1),
9055 : tristate::TS_UNKNOWN);
9056 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_256),
9057 : tristate::TS_UNKNOWN);
9058 :
9059 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_m1),
9060 : tristate::TS_TRUE);
9061 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_0),
9062 : tristate::TS_UNKNOWN);
9063 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_1),
9064 : tristate::TS_UNKNOWN);
9065 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_256),
9066 : tristate::TS_UNKNOWN);
9067 4 : }
9068 :
9069 : /* Verify merging constraints for states simulating successive iterations
9070 : of a loop.
9071 : Simulate:
9072 : for (i = 0; i < 256; i++)
9073 : [...body...]
9074 : i.e. this gimple:.
9075 : i_15 = 0;
9076 : goto <bb 4>;
9077 :
9078 : <bb 4> :
9079 : i_11 = PHI <i_15(2), i_23(3)>
9080 : if (i_11 <= 255)
9081 : goto <bb 3>;
9082 : else
9083 : goto [AFTER LOOP]
9084 :
9085 : <bb 3> :
9086 : [LOOP BODY]
9087 : i_23 = i_11 + 1;
9088 :
9089 : and thus these ops (and resultant states):
9090 : i_11 = PHI()
9091 : {i_11: 0}
9092 : add_constraint (i_11 <= 255) [for the true edge]
9093 : {i_11: 0} [constraint was a no-op]
9094 : i_23 = i_11 + 1;
9095 : {i_22: 1}
9096 : i_11 = PHI()
9097 : {i_11: WIDENED (at phi, 0, 1)}
9098 : add_constraint (i_11 <= 255) [for the true edge]
9099 : {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}
9100 : i_23 = i_11 + 1;
9101 : {i_23: (WIDENED (at phi, 0, 1) + 1); WIDENED <= 255}
9102 : i_11 = PHI(); merge with state at phi above
9103 : {i_11: WIDENED (at phi, 0, 1); WIDENED <= 256}
9104 : [changing meaning of "WIDENED" here]
9105 : if (i_11 <= 255)
9106 : T: {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}; cache hit
9107 : F: {i_11: 256}
9108 : */
9109 :
9110 : static void
9111 4 : test_iteration_1 ()
9112 : {
9113 4 : region_model_manager mgr;
9114 4 : program_point point (program_point::origin (mgr));
9115 :
9116 4 : tree int_0 = integer_zero_node;
9117 4 : tree int_1 = integer_one_node;
9118 4 : tree int_256 = build_int_cst (integer_type_node, 256);
9119 4 : tree i = build_global_decl ("i", integer_type_node);
9120 :
9121 4 : test_region_model_context ctxt;
9122 :
9123 : /* model0: i: 0. */
9124 4 : region_model model0 (&mgr);
9125 4 : model0.set_value (i, int_0, &ctxt);
9126 :
9127 : /* model1: i: 1. */
9128 4 : region_model model1 (&mgr);
9129 4 : model1.set_value (i, int_1, &ctxt);
9130 :
9131 : /* Should merge "i" to a widened value. */
9132 4 : region_model model2 (&mgr);
9133 4 : ASSERT_TRUE (model1.can_merge_with_p (model0, point, &model2));
9134 4 : const svalue *merged_i = model2.get_rvalue (i, &ctxt);
9135 4 : ASSERT_EQ (merged_i->get_kind (), SK_WIDENING);
9136 4 : const widening_svalue *w = merged_i->dyn_cast_widening_svalue ();
9137 4 : ASSERT_EQ (w->get_direction (), widening_svalue::DIR_ASCENDING);
9138 :
9139 : /* Add constraint: i < 256 */
9140 4 : model2.add_constraint (i, LT_EXPR, int_256, &ctxt);
9141 4 : ASSERT_EQ (model2.eval_condition (i, LT_EXPR, int_256, &ctxt),
9142 : tristate (tristate::TS_TRUE));
9143 4 : ASSERT_EQ (model2.eval_condition (i, GE_EXPR, int_0, &ctxt),
9144 : tristate (tristate::TS_TRUE));
9145 :
9146 : /* Try merging with the initial state. */
9147 4 : region_model model3 (&mgr);
9148 4 : ASSERT_TRUE (model2.can_merge_with_p (model0, point, &model3));
9149 : /* Merging the merged value with the initial value should be idempotent,
9150 : so that the analysis converges. */
9151 4 : ASSERT_EQ (model3.get_rvalue (i, &ctxt), merged_i);
9152 : /* Merger of 0 and a widening value with constraint < CST
9153 : should retain the constraint, even though it was implicit
9154 : for the 0 case. */
9155 4 : ASSERT_EQ (model3.eval_condition (i, LT_EXPR, int_256, &ctxt),
9156 : tristate (tristate::TS_TRUE));
9157 : /* ...and we should have equality: the analysis should have converged. */
9158 4 : ASSERT_EQ (model3, model2);
9159 :
9160 : /* "i_23 = i_11 + 1;" */
9161 4 : region_model model4 (model3);
9162 4 : ASSERT_EQ (model4, model2);
9163 4 : model4.set_value (i, build2 (PLUS_EXPR, integer_type_node, i, int_1), &ctxt);
9164 4 : const svalue *plus_one = model4.get_rvalue (i, &ctxt);
9165 4 : ASSERT_EQ (plus_one->get_kind (), SK_BINOP);
9166 :
9167 : /* Try merging with the "i: 1" state. */
9168 4 : region_model model5 (&mgr);
9169 4 : ASSERT_TRUE (model4.can_merge_with_p (model1, point, &model5));
9170 4 : ASSERT_EQ (model5.get_rvalue (i, &ctxt), plus_one);
9171 4 : ASSERT_EQ (model5, model4);
9172 :
9173 : /* "i_11 = PHI();" merge with state at phi above.
9174 : For i, we should have a merger of WIDENING with WIDENING + 1,
9175 : and this should be WIDENING again. */
9176 4 : region_model model6 (&mgr);
9177 4 : ASSERT_TRUE (model5.can_merge_with_p (model2, point, &model6));
9178 4 : const svalue *merged_widening = model6.get_rvalue (i, &ctxt);
9179 4 : ASSERT_EQ (merged_widening->get_kind (), SK_WIDENING);
9180 4 : }
9181 :
9182 : /* Verify that if we mark a pointer to a malloc-ed region as non-NULL,
9183 : all cast pointers to that region are also known to be non-NULL. */
9184 :
9185 : static void
9186 4 : test_malloc_constraints ()
9187 : {
9188 4 : region_model_manager mgr;
9189 4 : region_model model (&mgr);
9190 4 : tree p = build_global_decl ("p", ptr_type_node);
9191 4 : tree char_star = build_pointer_type (char_type_node);
9192 4 : tree q = build_global_decl ("q", char_star);
9193 4 : tree null_ptr = build_int_cst (ptr_type_node, 0);
9194 :
9195 4 : const svalue *size_in_bytes
9196 4 : = mgr.get_or_create_unknown_svalue (size_type_node);
9197 4 : const region *reg
9198 4 : = model.get_or_create_region_for_heap_alloc (size_in_bytes, nullptr);
9199 4 : const svalue *sval = mgr.get_ptr_svalue (ptr_type_node, reg);
9200 4 : model.set_value (model.get_lvalue (p, nullptr), sval, nullptr);
9201 4 : model.set_value (q, p, nullptr);
9202 :
9203 4 : ASSERT_CONDITION_UNKNOWN (model, p, NE_EXPR, null_ptr);
9204 4 : ASSERT_CONDITION_UNKNOWN (model, p, EQ_EXPR, null_ptr);
9205 4 : ASSERT_CONDITION_UNKNOWN (model, q, NE_EXPR, null_ptr);
9206 4 : ASSERT_CONDITION_UNKNOWN (model, q, EQ_EXPR, null_ptr);
9207 :
9208 4 : model.add_constraint (p, NE_EXPR, null_ptr, nullptr);
9209 :
9210 4 : ASSERT_CONDITION_TRUE (model, p, NE_EXPR, null_ptr);
9211 4 : ASSERT_CONDITION_FALSE (model, p, EQ_EXPR, null_ptr);
9212 4 : ASSERT_CONDITION_TRUE (model, q, NE_EXPR, null_ptr);
9213 4 : ASSERT_CONDITION_FALSE (model, q, EQ_EXPR, null_ptr);
9214 4 : }
9215 :
9216 : /* Smoketest of getting and setting the value of a variable. */
9217 :
9218 : static void
9219 4 : test_var ()
9220 : {
9221 : /* "int i;" */
9222 4 : tree i = build_global_decl ("i", integer_type_node);
9223 :
9224 4 : tree int_17 = build_int_cst (integer_type_node, 17);
9225 4 : tree int_m3 = build_int_cst (integer_type_node, -3);
9226 :
9227 4 : region_model_manager mgr;
9228 4 : region_model model (&mgr);
9229 :
9230 4 : const region *i_reg = model.get_lvalue (i, nullptr);
9231 4 : ASSERT_EQ (i_reg->get_kind (), RK_DECL);
9232 :
9233 : /* Reading "i" should give a symbolic "initial value". */
9234 4 : const svalue *sval_init = model.get_rvalue (i, nullptr);
9235 4 : ASSERT_EQ (sval_init->get_kind (), SK_INITIAL);
9236 4 : ASSERT_EQ (sval_init->dyn_cast_initial_svalue ()->get_region (), i_reg);
9237 : /* ..and doing it again should give the same "initial value". */
9238 4 : ASSERT_EQ (model.get_rvalue (i, nullptr), sval_init);
9239 :
9240 : /* "i = 17;". */
9241 4 : model.set_value (i, int_17, nullptr);
9242 4 : ASSERT_EQ (model.get_rvalue (i, nullptr),
9243 : model.get_rvalue (int_17, nullptr));
9244 :
9245 : /* "i = -3;". */
9246 4 : model.set_value (i, int_m3, nullptr);
9247 4 : ASSERT_EQ (model.get_rvalue (i, nullptr),
9248 : model.get_rvalue (int_m3, nullptr));
9249 :
9250 : /* Verify get_offset for "i". */
9251 4 : {
9252 4 : region_offset offset = i_reg->get_offset (&mgr);
9253 4 : ASSERT_EQ (offset.get_base_region (), i_reg);
9254 4 : ASSERT_EQ (offset.get_bit_offset (), 0);
9255 : }
9256 4 : }
9257 :
9258 : static void
9259 4 : test_array_2 ()
9260 : {
9261 : /* "int arr[10];" */
9262 4 : tree tlen = size_int (10);
9263 4 : tree arr_type
9264 4 : = build_array_type (integer_type_node, build_index_type (tlen));
9265 4 : tree arr = build_global_decl ("arr", arr_type);
9266 :
9267 : /* "int i;" */
9268 4 : tree i = build_global_decl ("i", integer_type_node);
9269 :
9270 4 : tree int_0 = integer_zero_node;
9271 4 : tree int_1 = integer_one_node;
9272 :
9273 4 : tree arr_0 = build4 (ARRAY_REF, integer_type_node,
9274 : arr, int_0, NULL_TREE, NULL_TREE);
9275 4 : tree arr_1 = build4 (ARRAY_REF, integer_type_node,
9276 : arr, int_1, NULL_TREE, NULL_TREE);
9277 4 : tree arr_i = build4 (ARRAY_REF, integer_type_node,
9278 : arr, i, NULL_TREE, NULL_TREE);
9279 :
9280 4 : tree int_17 = build_int_cst (integer_type_node, 17);
9281 4 : tree int_42 = build_int_cst (integer_type_node, 42);
9282 4 : tree int_m3 = build_int_cst (integer_type_node, -3);
9283 :
9284 4 : region_model_manager mgr;
9285 4 : region_model model (&mgr);
9286 : /* "arr[0] = 17;". */
9287 4 : model.set_value (arr_0, int_17, nullptr);
9288 : /* "arr[1] = -3;". */
9289 4 : model.set_value (arr_1, int_m3, nullptr);
9290 :
9291 4 : ASSERT_EQ (model.get_rvalue (arr_0, nullptr),
9292 : model.get_rvalue (int_17, nullptr));
9293 4 : ASSERT_EQ (model.get_rvalue (arr_1, nullptr),
9294 : model.get_rvalue (int_m3, nullptr));
9295 :
9296 : /* Overwrite a pre-existing binding: "arr[1] = 42;". */
9297 4 : model.set_value (arr_1, int_42, nullptr);
9298 4 : ASSERT_EQ (model.get_rvalue (arr_1, nullptr),
9299 : model.get_rvalue (int_42, nullptr));
9300 :
9301 : /* Verify get_offset for "arr[0]". */
9302 4 : {
9303 4 : const region *arr_0_reg = model.get_lvalue (arr_0, nullptr);
9304 4 : region_offset offset = arr_0_reg->get_offset (&mgr);
9305 4 : ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, nullptr));
9306 4 : ASSERT_EQ (offset.get_bit_offset (), 0);
9307 : }
9308 :
9309 : /* Verify get_offset for "arr[1]". */
9310 4 : {
9311 4 : const region *arr_1_reg = model.get_lvalue (arr_1, nullptr);
9312 4 : region_offset offset = arr_1_reg->get_offset (&mgr);
9313 4 : ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, nullptr));
9314 4 : ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
9315 : }
9316 :
9317 : /* Verify get_offset for "arr[i]". */
9318 4 : {
9319 4 : const region *arr_i_reg = model.get_lvalue (arr_i, nullptr);
9320 4 : region_offset offset = arr_i_reg->get_offset (&mgr);
9321 4 : ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, nullptr));
9322 4 : const svalue *offset_sval = offset.get_symbolic_byte_offset ();
9323 4 : if (const svalue *cast = offset_sval->maybe_undo_cast ())
9324 4 : offset_sval = cast;
9325 4 : ASSERT_EQ (offset_sval->get_kind (), SK_BINOP);
9326 : }
9327 :
9328 : /* "arr[i] = i;" - this should remove the earlier bindings. */
9329 4 : model.set_value (arr_i, i, nullptr);
9330 4 : ASSERT_EQ (model.get_rvalue (arr_i, nullptr), model.get_rvalue (i, nullptr));
9331 4 : ASSERT_EQ (model.get_rvalue (arr_0, nullptr)->get_kind (), SK_UNKNOWN);
9332 :
9333 : /* "arr[0] = 17;" - this should remove the arr[i] binding. */
9334 4 : model.set_value (arr_0, int_17, nullptr);
9335 4 : ASSERT_EQ (model.get_rvalue (arr_0, nullptr),
9336 : model.get_rvalue (int_17, nullptr));
9337 4 : ASSERT_EQ (model.get_rvalue (arr_i, nullptr)->get_kind (), SK_UNKNOWN);
9338 4 : }
9339 :
9340 : /* Smoketest of dereferencing a pointer via MEM_REF. */
9341 :
9342 : static void
9343 4 : test_mem_ref ()
9344 : {
9345 : /*
9346 : x = 17;
9347 : p = &x;
9348 : *p;
9349 : */
9350 4 : tree x = build_global_decl ("x", integer_type_node);
9351 4 : tree int_star = build_pointer_type (integer_type_node);
9352 4 : tree p = build_global_decl ("p", int_star);
9353 :
9354 4 : tree int_17 = build_int_cst (integer_type_node, 17);
9355 4 : tree addr_of_x = build1 (ADDR_EXPR, int_star, x);
9356 4 : tree ptype = build_pointer_type_for_mode (char_type_node, ptr_mode, true);
9357 4 : tree offset_0 = build_int_cst (ptype, 0);
9358 4 : tree star_p = build2 (MEM_REF, integer_type_node, p, offset_0);
9359 :
9360 4 : region_model_manager mgr;
9361 4 : region_model model (&mgr);
9362 :
9363 : /* "x = 17;". */
9364 4 : model.set_value (x, int_17, nullptr);
9365 :
9366 : /* "p = &x;". */
9367 4 : model.set_value (p, addr_of_x, nullptr);
9368 :
9369 4 : const svalue *sval = model.get_rvalue (star_p, nullptr);
9370 4 : ASSERT_EQ (sval->maybe_get_constant (), int_17);
9371 4 : }
9372 :
9373 : /* Test for a POINTER_PLUS_EXPR followed by a MEM_REF.
9374 : Analogous to this code:
9375 : void test_6 (int a[10])
9376 : {
9377 : __analyzer_eval (a[3] == 42); [should be UNKNOWN]
9378 : a[3] = 42;
9379 : __analyzer_eval (a[3] == 42); [should be TRUE]
9380 : }
9381 : from data-model-1.c, which looks like this at the gimple level:
9382 : # __analyzer_eval (a[3] == 42); [should be UNKNOWN]
9383 : int *_1 = a_10(D) + 12; # POINTER_PLUS_EXPR
9384 : int _2 = *_1; # MEM_REF
9385 : _Bool _3 = _2 == 42;
9386 : int _4 = (int) _3;
9387 : __analyzer_eval (_4);
9388 :
9389 : # a[3] = 42;
9390 : int *_5 = a_10(D) + 12; # POINTER_PLUS_EXPR
9391 : *_5 = 42; # MEM_REF
9392 :
9393 : # __analyzer_eval (a[3] == 42); [should be TRUE]
9394 : int *_6 = a_10(D) + 12; # POINTER_PLUS_EXPR
9395 : int _7 = *_6; # MEM_REF
9396 : _Bool _8 = _7 == 42;
9397 : int _9 = (int) _8;
9398 : __analyzer_eval (_9); */
9399 :
9400 : static void
9401 4 : test_POINTER_PLUS_EXPR_then_MEM_REF ()
9402 : {
9403 4 : tree int_star = build_pointer_type (integer_type_node);
9404 4 : tree a = build_global_decl ("a", int_star);
9405 4 : tree offset_12 = build_int_cst (size_type_node, 12);
9406 4 : tree pointer_plus_expr = build2 (POINTER_PLUS_EXPR, int_star, a, offset_12);
9407 4 : tree ptype = build_pointer_type_for_mode (char_type_node, ptr_mode, true);
9408 4 : tree offset_0 = build_int_cst (ptype, 0);
9409 4 : tree mem_ref = build2 (MEM_REF, integer_type_node,
9410 : pointer_plus_expr, offset_0);
9411 4 : region_model_manager mgr;
9412 4 : region_model m (&mgr);
9413 :
9414 4 : tree int_42 = build_int_cst (integer_type_node, 42);
9415 4 : m.set_value (mem_ref, int_42, nullptr);
9416 4 : ASSERT_EQ (m.get_rvalue (mem_ref, nullptr)->maybe_get_constant (), int_42);
9417 4 : }
9418 :
9419 : /* Verify that malloc works. */
9420 :
9421 : static void
9422 4 : test_malloc ()
9423 : {
9424 4 : tree int_star = build_pointer_type (integer_type_node);
9425 4 : tree p = build_global_decl ("p", int_star);
9426 4 : tree n = build_global_decl ("n", integer_type_node);
9427 4 : tree n_times_4 = build2 (MULT_EXPR, size_type_node,
9428 : n, build_int_cst (size_type_node, 4));
9429 :
9430 4 : region_model_manager mgr;
9431 4 : test_region_model_context ctxt;
9432 4 : region_model model (&mgr);
9433 :
9434 : /* "p = malloc (n * 4);". */
9435 4 : const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
9436 4 : const region *reg
9437 4 : = model.get_or_create_region_for_heap_alloc (size_sval, &ctxt);
9438 4 : const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
9439 4 : model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
9440 4 : ASSERT_EQ (model.get_capacity (reg), size_sval);
9441 4 : }
9442 :
9443 : /* Verify that alloca works. */
9444 :
9445 : static void
9446 4 : test_alloca ()
9447 : {
9448 4 : auto_vec <tree> param_types;
9449 4 : tree fndecl = make_fndecl (integer_type_node,
9450 : "test_fn",
9451 : param_types);
9452 4 : allocate_struct_function (fndecl, true);
9453 :
9454 :
9455 4 : tree int_star = build_pointer_type (integer_type_node);
9456 4 : tree p = build_global_decl ("p", int_star);
9457 4 : tree n = build_global_decl ("n", integer_type_node);
9458 4 : tree n_times_4 = build2 (MULT_EXPR, size_type_node,
9459 : n, build_int_cst (size_type_node, 4));
9460 :
9461 4 : region_model_manager mgr;
9462 4 : test_region_model_context ctxt;
9463 4 : region_model model (&mgr);
9464 :
9465 : /* Push stack frame. */
9466 4 : const region *frame_reg
9467 4 : = model.push_frame (*DECL_STRUCT_FUNCTION (fndecl),
9468 : nullptr, nullptr, &ctxt);
9469 : /* "p = alloca (n * 4);". */
9470 4 : const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
9471 4 : const region *reg = model.create_region_for_alloca (size_sval, &ctxt);
9472 4 : ASSERT_EQ (reg->get_parent_region (), frame_reg);
9473 4 : const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
9474 4 : model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
9475 4 : ASSERT_EQ (model.get_capacity (reg), size_sval);
9476 :
9477 : /* Verify that the pointers to the alloca region are replaced by
9478 : poisoned values when the frame is popped. */
9479 4 : model.pop_frame (nullptr, nullptr, &ctxt, nullptr);
9480 4 : ASSERT_EQ (model.get_rvalue (p, nullptr)->get_kind (), SK_POISONED);
9481 4 : }
9482 :
9483 : /* Verify that svalue::involves_p works. */
9484 :
9485 : static void
9486 4 : test_involves_p ()
9487 : {
9488 4 : region_model_manager mgr;
9489 4 : tree int_star = build_pointer_type (integer_type_node);
9490 4 : tree p = build_global_decl ("p", int_star);
9491 4 : tree q = build_global_decl ("q", int_star);
9492 :
9493 4 : test_region_model_context ctxt;
9494 4 : region_model model (&mgr);
9495 4 : const svalue *p_init = model.get_rvalue (p, &ctxt);
9496 4 : const svalue *q_init = model.get_rvalue (q, &ctxt);
9497 :
9498 4 : ASSERT_TRUE (p_init->involves_p (p_init));
9499 4 : ASSERT_FALSE (p_init->involves_p (q_init));
9500 :
9501 4 : const region *star_p_reg = mgr.get_symbolic_region (p_init);
9502 4 : const region *star_q_reg = mgr.get_symbolic_region (q_init);
9503 :
9504 4 : const svalue *init_star_p = mgr.get_or_create_initial_value (star_p_reg);
9505 4 : const svalue *init_star_q = mgr.get_or_create_initial_value (star_q_reg);
9506 :
9507 4 : ASSERT_TRUE (init_star_p->involves_p (p_init));
9508 4 : ASSERT_FALSE (p_init->involves_p (init_star_p));
9509 4 : ASSERT_FALSE (init_star_p->involves_p (q_init));
9510 4 : ASSERT_TRUE (init_star_q->involves_p (q_init));
9511 4 : ASSERT_FALSE (init_star_q->involves_p (p_init));
9512 4 : }
9513 :
9514 : /* Run all of the selftests within this file. */
9515 :
9516 : void
9517 4 : analyzer_region_model_cc_tests ()
9518 : {
9519 4 : test_tree_cmp_on_constants ();
9520 4 : test_dump ();
9521 4 : test_struct ();
9522 4 : test_array_1 ();
9523 4 : test_get_representative_tree ();
9524 4 : test_unique_constants ();
9525 4 : test_unique_unknowns ();
9526 4 : test_initial_svalue_folding ();
9527 4 : test_unaryop_svalue_folding ();
9528 4 : test_binop_svalue_folding ();
9529 4 : test_sub_svalue_folding ();
9530 4 : test_bits_within_svalue_folding ();
9531 4 : test_descendent_of_p ();
9532 4 : test_bit_range_regions ();
9533 4 : test_assignment ();
9534 4 : test_compound_assignment ();
9535 4 : test_stack_frames ();
9536 4 : test_get_representative_path_var ();
9537 4 : test_equality_1 ();
9538 4 : test_canonicalization_2 ();
9539 4 : test_canonicalization_3 ();
9540 4 : test_canonicalization_4 ();
9541 4 : test_state_merging ();
9542 4 : test_constraint_merging ();
9543 4 : test_widening_constraints ();
9544 4 : test_iteration_1 ();
9545 4 : test_malloc_constraints ();
9546 4 : test_var ();
9547 4 : test_array_2 ();
9548 4 : test_mem_ref ();
9549 4 : test_POINTER_PLUS_EXPR_then_MEM_REF ();
9550 4 : test_malloc ();
9551 4 : test_alloca ();
9552 4 : test_involves_p ();
9553 4 : }
9554 :
9555 : } // namespace selftest
9556 :
9557 : #endif /* CHECKING_P */
9558 :
9559 : } // namespace ana
9560 :
9561 : #endif /* #if ENABLE_ANALYZER */
|