Line data Source code
1 : /* Classes for modeling the state of memory.
2 : Copyright (C) 2019-2026 Free Software Foundation, Inc.
3 : Contributed by David Malcolm <dmalcolm@redhat.com>.
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify it
8 : under the terms of the GNU General Public License as published by
9 : the Free Software Foundation; either version 3, or (at your option)
10 : any later version.
11 :
12 : GCC is distributed in the hope that it will be useful, but
13 : WITHOUT ANY WARRANTY; without even the implied warranty of
14 : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 : General Public License for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 : #define INCLUDE_ALGORITHM
22 : #include "analyzer/common.h"
23 :
24 : #include "ordered-hash-map.h"
25 : #include "options.h"
26 : #include "cgraph.h"
27 : #include "cfg.h"
28 : #include "sbitmap.h"
29 : #include "diagnostics/event-id.h"
30 : #include "stor-layout.h"
31 : #include "stringpool.h"
32 : #include "attribs.h"
33 : #include "tree-object-size.h"
34 : #include "gimple-ssa.h"
35 : #include "tree-phinodes.h"
36 : #include "tree-ssa-operands.h"
37 : #include "ssa-iterators.h"
38 : #include "target.h"
39 : #include "calls.h"
40 : #include "is-a.h"
41 : #include "gcc-rich-location.h"
42 : #include "gcc-urlifier.h"
43 : #include "diagnostics/sarif-sink.h"
44 : #include "tree-pretty-print.h"
45 : #include "fold-const.h"
46 : #include "selftest-tree.h"
47 : #include "context.h"
48 : #include "channels.h"
49 : #include "value-relation.h"
50 : #include "range-op.h"
51 :
52 : #include "text-art/tree-widget.h"
53 :
54 : #include "analyzer/analyzer-logging.h"
55 : #include "analyzer/supergraph.h"
56 : #include "analyzer/call-string.h"
57 : #include "analyzer/program-point.h"
58 : #include "analyzer/store.h"
59 : #include "analyzer/region-model.h"
60 : #include "analyzer/constraint-manager.h"
61 : #include "analyzer/sm.h"
62 : #include "analyzer/pending-diagnostic.h"
63 : #include "analyzer/region-model-reachability.h"
64 : #include "analyzer/analyzer-selftests.h"
65 : #include "analyzer/program-state.h"
66 : #include "analyzer/call-summary.h"
67 : #include "analyzer/checker-event.h"
68 : #include "analyzer/checker-path.h"
69 : #include "analyzer/feasible-graph.h"
70 : #include "analyzer/record-layout.h"
71 : #include "analyzer/function-set.h"
72 :
73 : #if ENABLE_ANALYZER
74 :
75 : namespace ana {
76 :
77 : /* Dump T to PP in language-independent form, for debugging/logging/dumping
78 : purposes. */
79 :
80 : void
81 41497 : dump_tree (pretty_printer *pp, tree t)
82 : {
83 41497 : dump_generic_node (pp, t, 0, TDF_SLIM, 0);
84 41497 : }
85 :
86 : /* Dump T to PP in language-independent form in quotes, for
87 : debugging/logging/dumping purposes. */
88 :
89 : void
90 1366 : dump_quoted_tree (pretty_printer *pp, tree t)
91 : {
92 1366 : pp_begin_quote (pp, pp_show_color (pp));
93 1366 : dump_tree (pp, t);
94 1366 : pp_end_quote (pp, pp_show_color (pp));
95 1366 : }
96 :
97 : /* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
98 : calls within other pp_printf calls.
99 :
100 : default_tree_printer handles 'T' and some other codes by calling
101 : dump_generic_node (pp, t, 0, TDF_SLIM, 0);
102 : dump_generic_node calls pp_printf in various places, leading to
103 : garbled output.
104 :
105 : Ideally pp_printf could be made to be reentrant, but in the meantime
106 : this function provides a workaround. */
107 :
108 : void
109 4434 : print_quoted_type (pretty_printer *pp, tree t)
110 : {
111 4434 : if (!t)
112 : return;
113 4341 : pp_begin_quote (pp, pp_show_color (pp));
114 4341 : dump_generic_node (pp, t, 0, TDF_SLIM, 0);
115 4341 : pp_end_quote (pp, pp_show_color (pp));
116 : }
117 :
118 : /* Print EXPR to PP, without quotes.
119 : For use within svalue::maybe_print_for_user
120 : and region::maybe_print_for_user. */
121 :
122 : void
123 38 : print_expr_for_user (pretty_printer *pp, tree expr)
124 : {
125 : /* Workaround for C++'s lang_hooks.decl_printable_name,
126 : which unhelpfully (for us) prefixes the decl with its
127 : type. */
128 38 : if (DECL_P (expr))
129 38 : dump_generic_node (pp, expr, 0, TDF_SLIM, 0);
130 : else
131 0 : pp_printf (pp, "%E", expr);
132 38 : }
133 :
134 : /* class region_to_value_map. */
135 :
136 : /* Assignment operator for region_to_value_map. */
137 :
138 : region_to_value_map &
139 58694 : region_to_value_map::operator= (const region_to_value_map &other)
140 : {
141 58694 : m_hash_map.empty ();
142 71717 : for (auto iter : other.m_hash_map)
143 : {
144 13023 : const region *reg = iter.first;
145 13023 : const svalue *sval = iter.second;
146 13023 : m_hash_map.put (reg, sval);
147 : }
148 58694 : return *this;
149 : }
150 :
151 : /* Equality operator for region_to_value_map. */
152 :
153 : bool
154 443868 : region_to_value_map::operator== (const region_to_value_map &other) const
155 : {
156 443868 : if (m_hash_map.elements () != other.m_hash_map.elements ())
157 : return false;
158 :
159 707208 : for (auto iter : *this)
160 : {
161 132981 : const region *reg = iter.first;
162 132981 : const svalue *sval = iter.second;
163 132981 : const svalue * const *other_slot = other.get (reg);
164 132981 : if (other_slot == nullptr)
165 58 : return false;
166 132951 : if (sval != *other_slot)
167 : return false;
168 : }
169 :
170 441304 : return true;
171 : }
172 :
173 : /* Dump this object to PP. */
174 :
175 : void
176 416 : region_to_value_map::dump_to_pp (pretty_printer *pp, bool simple,
177 : bool multiline) const
178 : {
179 416 : auto_vec<const region *> regs;
180 1248 : for (iterator iter = begin (); iter != end (); ++iter)
181 416 : regs.safe_push ((*iter).first);
182 416 : regs.qsort (region::cmp_ptr_ptr);
183 416 : if (multiline)
184 416 : pp_newline (pp);
185 : else
186 0 : pp_string (pp, " {");
187 : unsigned i;
188 : const region *reg;
189 832 : FOR_EACH_VEC_ELT (regs, i, reg)
190 : {
191 416 : if (multiline)
192 416 : pp_string (pp, " ");
193 0 : else if (i > 0)
194 0 : pp_string (pp, ", ");
195 416 : reg->dump_to_pp (pp, simple);
196 416 : pp_string (pp, ": ");
197 416 : const svalue *sval = *get (reg);
198 416 : sval->dump_to_pp (pp, true);
199 416 : if (multiline)
200 416 : pp_newline (pp);
201 : }
202 416 : if (!multiline)
203 0 : pp_string (pp, "}");
204 416 : }
205 :
206 : /* Dump this object to stderr. */
207 :
208 : DEBUG_FUNCTION void
209 0 : region_to_value_map::dump (bool simple) const
210 : {
211 0 : tree_dump_pretty_printer pp (stderr);
212 0 : dump_to_pp (&pp, simple, true);
213 0 : pp_newline (&pp);
214 0 : }
215 :
216 : /* Generate a JSON value for this region_to_value_map.
217 : This is intended for debugging the analyzer rather than
218 : serialization. */
219 :
220 : std::unique_ptr<json::object>
221 4 : region_to_value_map::to_json () const
222 : {
223 4 : auto map_obj = std::make_unique<json::object> ();
224 :
225 4 : auto_vec<const region *> regs;
226 4 : for (iterator iter = begin (); iter != end (); ++iter)
227 0 : regs.safe_push ((*iter).first);
228 4 : regs.qsort (region::cmp_ptr_ptr);
229 :
230 : unsigned i;
231 : const region *reg;
232 4 : FOR_EACH_VEC_ELT (regs, i, reg)
233 : {
234 0 : label_text reg_desc = reg->get_desc ();
235 0 : const svalue *sval = *get (reg);
236 0 : map_obj->set (reg_desc.get (), sval->to_json ());
237 0 : }
238 :
239 4 : return map_obj;
240 4 : }
241 :
242 : std::unique_ptr<text_art::tree_widget>
243 4 : region_to_value_map::
244 : make_dump_widget (const text_art::dump_widget_info &dwi) const
245 : {
246 4 : if (is_empty ())
247 4 : return nullptr;
248 :
249 0 : std::unique_ptr<text_art::tree_widget> w
250 0 : (text_art::tree_widget::make (dwi, "Dynamic Extents"));
251 :
252 0 : auto_vec<const region *> regs;
253 0 : for (iterator iter = begin (); iter != end (); ++iter)
254 0 : regs.safe_push ((*iter).first);
255 0 : regs.qsort (region::cmp_ptr_ptr);
256 :
257 : unsigned i;
258 : const region *reg;
259 0 : FOR_EACH_VEC_ELT (regs, i, reg)
260 : {
261 0 : pretty_printer the_pp;
262 0 : pretty_printer * const pp = &the_pp;
263 0 : pp_format_decoder (pp) = default_tree_printer;
264 0 : const bool simple = true;
265 :
266 0 : reg->dump_to_pp (pp, simple);
267 0 : pp_string (pp, ": ");
268 0 : const svalue *sval = *get (reg);
269 0 : sval->dump_to_pp (pp, true);
270 0 : w->add_child (text_art::tree_widget::make (dwi, pp));
271 0 : }
272 0 : return w;
273 0 : }
274 :
275 : /* Attempt to merge THIS with OTHER, writing the result
276 : to OUT.
277 :
278 : For now, write (region, value) mappings that are in common between THIS
279 : and OTHER to OUT, effectively taking the intersection.
280 :
281 : Reject merger of different values. */
282 :
283 : bool
284 42241 : region_to_value_map::can_merge_with_p (const region_to_value_map &other,
285 : region_to_value_map *out) const
286 : {
287 57261 : for (auto iter : *this)
288 : {
289 9490 : const region *iter_reg = iter.first;
290 9490 : const svalue *iter_sval = iter.second;
291 9490 : const svalue * const * other_slot = other.get (iter_reg);
292 9490 : if (other_slot)
293 : {
294 9206 : if (iter_sval == *other_slot)
295 7226 : out->put (iter_reg, iter_sval);
296 : else
297 1980 : return false;
298 : }
299 : }
300 40261 : return true;
301 : }
302 :
303 : /* Purge any state involving SVAL. */
304 :
305 : void
306 26853 : region_to_value_map::purge_state_involving (const svalue *sval)
307 : {
308 26853 : auto_vec<const region *> to_purge;
309 68313 : for (auto iter : *this)
310 : {
311 20730 : const region *iter_reg = iter.first;
312 20730 : const svalue *iter_sval = iter.second;
313 20730 : if (iter_reg->involves_p (sval) || iter_sval->involves_p (sval))
314 26 : to_purge.safe_push (iter_reg);
315 : }
316 26931 : for (auto iter : to_purge)
317 26 : m_hash_map.remove (iter);
318 26853 : }
319 :
320 : // struct exception_node
321 :
322 : bool
323 10276 : exception_node::operator== (const exception_node &other) const
324 : {
325 10276 : return (m_exception_sval == other.m_exception_sval
326 10276 : && m_typeinfo_sval == other.m_typeinfo_sval
327 20552 : && m_destructor_sval == other.m_destructor_sval);
328 : }
329 :
330 : void
331 6 : exception_node::dump_to_pp (pretty_printer *pp,
332 : bool simple) const
333 : {
334 6 : pp_printf (pp, "{exception: ");
335 6 : m_exception_sval->dump_to_pp (pp, simple);
336 6 : pp_string (pp, ", typeinfo: ");
337 6 : m_typeinfo_sval->dump_to_pp (pp, simple);
338 6 : pp_string (pp, ", destructor: ");
339 6 : m_destructor_sval->dump_to_pp (pp, simple);
340 6 : pp_string (pp, "}");
341 6 : }
342 :
343 : void
344 0 : exception_node::dump (FILE *fp, bool simple) const
345 : {
346 0 : tree_dump_pretty_printer pp (fp);
347 0 : dump_to_pp (&pp, simple);
348 0 : pp_newline (&pp);
349 0 : }
350 :
351 : /* Dump a multiline representation of this model to stderr. */
352 :
353 : DEBUG_FUNCTION void
354 0 : exception_node::dump (bool simple) const
355 : {
356 0 : dump (stderr, simple);
357 0 : }
358 :
359 : DEBUG_FUNCTION void
360 0 : exception_node::dump () const
361 : {
362 0 : text_art::dump (*this);
363 0 : }
364 :
365 : std::unique_ptr<json::object>
366 0 : exception_node::to_json () const
367 : {
368 0 : auto obj = std::make_unique<json::object> ();
369 0 : obj->set ("exception", m_exception_sval->to_json ());
370 0 : obj->set ("typeinfo", m_typeinfo_sval->to_json ());
371 0 : obj->set ("destructor", m_destructor_sval->to_json ());
372 0 : return obj;
373 : }
374 :
375 : std::unique_ptr<text_art::tree_widget>
376 0 : exception_node::make_dump_widget (const text_art::dump_widget_info &dwi) const
377 : {
378 0 : using text_art::tree_widget;
379 0 : std::unique_ptr<tree_widget> w
380 0 : (tree_widget::from_fmt (dwi, nullptr, "Exception Node"));
381 :
382 0 : w->add_child (m_exception_sval->make_dump_widget (dwi, "exception"));
383 0 : w->add_child (m_typeinfo_sval->make_dump_widget (dwi, "typeinfo"));
384 0 : w->add_child (m_destructor_sval->make_dump_widget (dwi, "destructor"));
385 :
386 0 : return w;
387 : }
388 :
389 : tree
390 466 : exception_node::maybe_get_type () const
391 : {
392 466 : return m_typeinfo_sval->maybe_get_type_from_typeinfo ();
393 : }
394 :
395 : void
396 64 : exception_node::add_to_reachable_regions (reachable_regions ®s) const
397 : {
398 64 : regs.handle_sval (m_exception_sval);
399 64 : regs.handle_sval (m_typeinfo_sval);
400 64 : regs.handle_sval (m_destructor_sval);
401 64 : }
402 :
403 : /* class region_model. */
404 :
405 : /* Ctor for region_model: construct an "empty" model. */
406 :
407 370471 : region_model::region_model (region_model_manager *mgr)
408 370471 : : m_mgr (mgr), m_store (), m_current_frame (nullptr),
409 370471 : m_thrown_exceptions_stack (),
410 370471 : m_caught_exceptions_stack (),
411 370471 : m_dynamic_extents ()
412 : {
413 370471 : m_constraints = new constraint_manager (mgr);
414 370471 : }
415 :
416 : /* region_model's copy ctor. */
417 :
418 3188886 : region_model::region_model (const region_model &other)
419 3188886 : : m_mgr (other.m_mgr), m_store (other.m_store),
420 3188886 : m_constraints (new constraint_manager (*other.m_constraints)),
421 3188886 : m_current_frame (other.m_current_frame),
422 3188886 : m_thrown_exceptions_stack (other.m_thrown_exceptions_stack),
423 3188886 : m_caught_exceptions_stack (other.m_caught_exceptions_stack),
424 3188886 : m_dynamic_extents (other.m_dynamic_extents)
425 : {
426 3188886 : }
427 :
428 : /* region_model's dtor. */
429 :
430 3559357 : region_model::~region_model ()
431 : {
432 3559357 : delete m_constraints;
433 3559357 : }
434 :
435 : /* region_model's assignment operator. */
436 :
437 : region_model &
438 58694 : region_model::operator= (const region_model &other)
439 : {
440 : /* m_mgr is const. */
441 58694 : gcc_assert (m_mgr == other.m_mgr);
442 :
443 58694 : m_store = other.m_store;
444 :
445 58694 : delete m_constraints;
446 58694 : m_constraints = new constraint_manager (*other.m_constraints);
447 :
448 58694 : m_current_frame = other.m_current_frame;
449 :
450 58694 : m_thrown_exceptions_stack = other.m_thrown_exceptions_stack;
451 58694 : m_caught_exceptions_stack = other.m_caught_exceptions_stack;
452 :
453 58694 : m_dynamic_extents = other.m_dynamic_extents;
454 :
455 58694 : return *this;
456 : }
457 :
458 : /* Equality operator for region_model.
459 :
460 : Amongst other things this directly compares the stores and the constraint
461 : managers, so for this to be meaningful both this and OTHER should
462 : have been canonicalized. */
463 :
464 : bool
465 501971 : region_model::operator== (const region_model &other) const
466 : {
467 : /* We can only compare instances that use the same manager. */
468 501971 : gcc_assert (m_mgr == other.m_mgr);
469 :
470 501971 : if (m_store != other.m_store)
471 : return false;
472 :
473 405175 : if (*m_constraints != *other.m_constraints)
474 : return false;
475 :
476 400270 : if (m_current_frame != other.m_current_frame)
477 : return false;
478 :
479 400262 : if (m_thrown_exceptions_stack != other.m_thrown_exceptions_stack)
480 : return false;
481 400262 : if (m_caught_exceptions_stack != other.m_caught_exceptions_stack)
482 : return false;
483 :
484 400262 : if (m_dynamic_extents != other.m_dynamic_extents)
485 : return false;
486 :
487 400030 : gcc_checking_assert (hash () == other.hash ());
488 :
489 : return true;
490 : }
491 :
492 : /* Generate a hash value for this region_model. */
493 :
494 : hashval_t
495 1242525 : region_model::hash () const
496 : {
497 1242525 : hashval_t result = m_store.hash ();
498 1242525 : result ^= m_constraints->hash ();
499 1242525 : return result;
500 : }
501 :
502 : /* Dump a representation of this model to PP, showing the
503 : stack, the store, and any constraints.
504 : Use SIMPLE to control how svalues and regions are printed. */
505 :
506 : void
507 2126 : region_model::dump_to_pp (pretty_printer *pp, bool simple,
508 : bool multiline) const
509 : {
510 : /* Dump frame stack. */
511 2126 : pp_printf (pp, "stack depth: %i", get_stack_depth ());
512 2126 : if (multiline)
513 545 : pp_newline (pp);
514 : else
515 1581 : pp_string (pp, " {");
516 4228 : for (const frame_region *iter_frame = m_current_frame; iter_frame;
517 2102 : iter_frame = iter_frame->get_calling_frame ())
518 : {
519 2102 : if (multiline)
520 549 : pp_string (pp, " ");
521 1553 : else if (iter_frame != m_current_frame)
522 0 : pp_string (pp, ", ");
523 2102 : pp_printf (pp, "frame (index %i): ", iter_frame->get_index ());
524 2102 : iter_frame->dump_to_pp (pp, simple);
525 2102 : if (multiline)
526 549 : pp_newline (pp);
527 : }
528 2126 : if (!multiline)
529 1581 : pp_string (pp, "}");
530 :
531 : /* Dump exception stacks. */
532 2126 : if (m_thrown_exceptions_stack.size () > 0)
533 : {
534 6 : pp_printf (pp, "thrown exceptions: %i", (int)m_thrown_exceptions_stack.size ());
535 6 : if (multiline)
536 6 : pp_newline (pp);
537 : else
538 0 : pp_string (pp, " {");
539 12 : for (size_t idx = 0; idx < m_thrown_exceptions_stack.size (); ++idx)
540 : {
541 6 : if (multiline)
542 6 : pp_string (pp, " ");
543 0 : else if (idx > 0)
544 0 : pp_string (pp, ", ");
545 6 : pp_printf (pp, "exception (index %i): ", (int)idx);
546 6 : m_thrown_exceptions_stack[idx].dump_to_pp (pp, simple);
547 6 : if (multiline)
548 6 : pp_newline (pp);
549 : }
550 6 : if (!multiline)
551 0 : pp_string (pp, "}");
552 : }
553 2126 : if (m_caught_exceptions_stack.size () > 0)
554 : {
555 0 : pp_printf (pp, "caught exceptions: %i", (int)m_caught_exceptions_stack.size ());
556 0 : if (multiline)
557 0 : pp_newline (pp);
558 : else
559 0 : pp_string (pp, " {");
560 0 : for (size_t idx = 0; idx < m_caught_exceptions_stack.size (); ++idx)
561 : {
562 0 : if (multiline)
563 0 : pp_string (pp, " ");
564 0 : else if (idx > 0)
565 0 : pp_string (pp, ", ");
566 0 : pp_printf (pp, "exception (index %i): ", (int)idx);
567 0 : m_caught_exceptions_stack[idx].dump_to_pp (pp, simple);
568 0 : if (multiline)
569 0 : pp_newline (pp);
570 : }
571 0 : if (!multiline)
572 0 : pp_string (pp, "}");
573 : }
574 :
575 : /* Dump store. */
576 2126 : if (!multiline)
577 1581 : pp_string (pp, ", {");
578 2126 : m_store.dump_to_pp (pp, simple, multiline,
579 2126 : m_mgr->get_store_manager ());
580 2126 : if (!multiline)
581 1581 : pp_string (pp, "}");
582 :
583 : /* Dump constraints. */
584 2126 : pp_string (pp, "constraint_manager:");
585 2126 : if (multiline)
586 545 : pp_newline (pp);
587 : else
588 1581 : pp_string (pp, " {");
589 2126 : m_constraints->dump_to_pp (pp, multiline);
590 2126 : if (!multiline)
591 1581 : pp_string (pp, "}");
592 :
593 : /* Dump sizes of dynamic regions, if any are known. */
594 2126 : if (!m_dynamic_extents.is_empty ())
595 : {
596 416 : pp_string (pp, "dynamic_extents:");
597 416 : m_dynamic_extents.dump_to_pp (pp, simple, multiline);
598 : }
599 2126 : }
600 :
601 : /* Dump a representation of this model to FILE. */
602 :
603 : void
604 0 : region_model::dump (FILE *fp, bool simple, bool multiline) const
605 : {
606 0 : tree_dump_pretty_printer pp (fp);
607 0 : dump_to_pp (&pp, simple, multiline);
608 0 : pp_newline (&pp);
609 0 : }
610 :
611 : /* Dump a multiline representation of this model to stderr. */
612 :
613 : DEBUG_FUNCTION void
614 0 : region_model::dump (bool simple) const
615 : {
616 0 : dump (stderr, simple, true);
617 0 : }
618 :
619 : /* Dump a tree-like representation of this state to stderr. */
620 :
621 : DEBUG_FUNCTION void
622 0 : region_model::dump () const
623 : {
624 0 : text_art::dump (*this);
625 0 : }
626 :
627 : /* Dump a multiline representation of this model to stderr. */
628 :
629 : DEBUG_FUNCTION void
630 0 : region_model::debug () const
631 : {
632 0 : dump (true);
633 0 : }
634 :
635 : /* Generate a JSON value for this region_model.
636 : This is intended for debugging the analyzer rather than
637 : serialization. */
638 :
639 : std::unique_ptr<json::object>
640 4 : region_model::to_json () const
641 : {
642 4 : auto model_obj = std::make_unique<json::object> ();
643 4 : model_obj->set ("store", m_store.to_json ());
644 4 : model_obj->set ("constraints", m_constraints->to_json ());
645 4 : if (m_current_frame)
646 4 : model_obj->set ("current_frame", m_current_frame->to_json ());
647 :
648 4 : auto thrown_exceptions_arr = std::make_unique<json::array> ();
649 4 : for (auto &node : m_thrown_exceptions_stack)
650 0 : thrown_exceptions_arr->append (node.to_json ());
651 4 : model_obj->set ("thrown_exception_stack", std::move (thrown_exceptions_arr));
652 :
653 4 : auto caught_exceptions_arr = std::make_unique<json::array> ();
654 4 : for (auto &node : m_caught_exceptions_stack)
655 0 : caught_exceptions_arr->append (node.to_json ());
656 4 : model_obj->set ("caught_exception_stack", std::move (caught_exceptions_arr));
657 :
658 4 : model_obj->set ("dynamic_extents", m_dynamic_extents.to_json ());
659 8 : return model_obj;
660 4 : }
661 :
662 : std::unique_ptr<text_art::tree_widget>
663 4 : region_model::make_dump_widget (const text_art::dump_widget_info &dwi) const
664 : {
665 4 : using text_art::tree_widget;
666 4 : std::unique_ptr<tree_widget> model_widget
667 4 : (tree_widget::from_fmt (dwi, nullptr, "Region Model"));
668 :
669 4 : if (m_current_frame)
670 : {
671 0 : pretty_printer the_pp;
672 0 : pretty_printer * const pp = &the_pp;
673 0 : pp_format_decoder (pp) = default_tree_printer;
674 0 : pp_show_color (pp) = true;
675 0 : const bool simple = true;
676 :
677 0 : pp_string (pp, "Current Frame: ");
678 0 : m_current_frame->dump_to_pp (pp, simple);
679 0 : model_widget->add_child (tree_widget::make (dwi, pp));
680 0 : }
681 :
682 4 : if (m_thrown_exceptions_stack.size () > 0)
683 : {
684 0 : auto thrown_exceptions_widget
685 0 : = tree_widget::make (dwi, "Thrown Exceptions");
686 0 : for (auto &thrown_exception : m_thrown_exceptions_stack)
687 0 : thrown_exceptions_widget->add_child
688 0 : (thrown_exception.make_dump_widget (dwi));
689 0 : model_widget->add_child (std::move (thrown_exceptions_widget));
690 0 : }
691 4 : if (m_caught_exceptions_stack.size () > 0)
692 : {
693 0 : auto caught_exceptions_widget
694 0 : = tree_widget::make (dwi, "Caught Exceptions");
695 0 : for (auto &caught_exception : m_caught_exceptions_stack)
696 0 : caught_exceptions_widget->add_child
697 0 : (caught_exception.make_dump_widget (dwi));
698 0 : model_widget->add_child (std::move (caught_exceptions_widget));
699 0 : }
700 :
701 4 : model_widget->add_child
702 8 : (m_store.make_dump_widget (dwi,
703 4 : m_mgr->get_store_manager ()));
704 4 : model_widget->add_child (m_constraints->make_dump_widget (dwi));
705 4 : model_widget->add_child (m_dynamic_extents.make_dump_widget (dwi));
706 4 : return model_widget;
707 : }
708 :
709 : /* Assert that this object is valid. */
710 :
711 : void
712 1719356 : region_model::validate () const
713 : {
714 1719356 : m_store.validate ();
715 1719356 : }
716 :
717 : /* Canonicalize the store and constraints, to maximize the chance of
718 : equality between region_model instances. */
719 :
720 : void
721 824523 : region_model::canonicalize ()
722 : {
723 824523 : m_store.canonicalize (m_mgr->get_store_manager ());
724 824523 : m_constraints->canonicalize ();
725 824523 : }
726 :
727 : /* Return true if this region_model is in canonical form. */
728 :
729 : bool
730 390087 : region_model::canonicalized_p () const
731 : {
732 390087 : region_model copy (*this);
733 390087 : copy.canonicalize ();
734 390087 : return *this == copy;
735 390087 : }
736 :
737 : /* See the comment for store::loop_replay_fixup. */
738 :
739 : void
740 4661 : region_model::loop_replay_fixup (const region_model *dst_state)
741 : {
742 4661 : m_store.loop_replay_fixup (dst_state->get_store (), m_mgr);
743 4661 : }
744 :
745 : /* A subclass of pending_diagnostic for complaining about uses of
746 : poisoned values. */
747 :
748 : class poisoned_value_diagnostic
749 : : public pending_diagnostic_subclass<poisoned_value_diagnostic>
750 : {
751 : public:
752 2254 : poisoned_value_diagnostic (tree expr, enum poison_kind pkind,
753 : const region *src_region,
754 : tree check_expr)
755 2254 : : m_expr (expr), m_pkind (pkind),
756 2254 : m_src_region (src_region),
757 2254 : m_check_expr (check_expr)
758 : {}
759 :
760 14392 : const char *get_kind () const final override { return "poisoned_value_diagnostic"; }
761 :
762 70 : bool use_of_uninit_p () const final override
763 : {
764 70 : return m_pkind == poison_kind::uninit;
765 : }
766 :
767 1361 : bool operator== (const poisoned_value_diagnostic &other) const
768 : {
769 1361 : return (m_expr == other.m_expr
770 1336 : && m_pkind == other.m_pkind
771 2697 : && m_src_region == other.m_src_region);
772 : }
773 :
774 1958 : int get_controlling_option () const final override
775 : {
776 1958 : switch (m_pkind)
777 : {
778 0 : default:
779 0 : gcc_unreachable ();
780 : case poison_kind::uninit:
781 : return OPT_Wanalyzer_use_of_uninitialized_value;
782 : case poison_kind::freed:
783 : case poison_kind::deleted:
784 : return OPT_Wanalyzer_use_after_free;
785 : case poison_kind::popped_stack:
786 : return OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame;
787 : }
788 : }
789 :
790 1382 : bool terminate_path_p () const final override { return true; }
791 :
792 576 : bool emit (diagnostic_emission_context &ctxt) final override
793 : {
794 576 : switch (m_pkind)
795 : {
796 0 : default:
797 0 : gcc_unreachable ();
798 549 : case poison_kind::uninit:
799 549 : {
800 549 : ctxt.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable". */
801 549 : return ctxt.warn ("use of uninitialized value %qE",
802 549 : m_expr);
803 : }
804 3 : break;
805 3 : case poison_kind::freed:
806 3 : {
807 3 : ctxt.add_cwe (416); /* "CWE-416: Use After Free". */
808 3 : return ctxt.warn ("use after %<free%> of %qE",
809 3 : m_expr);
810 : }
811 9 : break;
812 9 : case poison_kind::deleted:
813 9 : {
814 9 : ctxt.add_cwe (416); /* "CWE-416: Use After Free". */
815 9 : return ctxt.warn ("use after %<delete%> of %qE",
816 9 : m_expr);
817 : }
818 15 : break;
819 15 : case poison_kind::popped_stack:
820 15 : {
821 : /* TODO: which CWE? */
822 15 : return ctxt.warn
823 15 : ("dereferencing pointer %qE to within stale stack frame",
824 15 : m_expr);
825 : }
826 : break;
827 : }
828 : }
829 :
830 : bool
831 1152 : describe_final_event (pretty_printer &pp,
832 : const evdesc::final_event &) final override
833 : {
834 1152 : switch (m_pkind)
835 : {
836 0 : default:
837 0 : gcc_unreachable ();
838 1098 : case poison_kind::uninit:
839 1098 : {
840 1098 : pp_printf (&pp,
841 : "use of uninitialized value %qE here",
842 : m_expr);
843 1098 : return true;
844 : }
845 6 : case poison_kind::freed:
846 6 : {
847 6 : pp_printf (&pp,
848 : "use after %<free%> of %qE here",
849 : m_expr);
850 6 : return true;
851 : }
852 18 : case poison_kind::deleted:
853 18 : {
854 18 : pp_printf (&pp,
855 : "use after %<delete%> of %qE here",
856 : m_expr);
857 18 : return true;
858 : }
859 30 : case poison_kind::popped_stack:
860 30 : {
861 30 : pp_printf (&pp,
862 : "dereferencing pointer %qE to within stale stack frame",
863 : m_expr);
864 30 : return true;
865 : }
866 : }
867 : }
868 :
869 576 : void mark_interesting_stuff (interesting_t *interest) final override
870 : {
871 576 : if (m_src_region)
872 541 : interest->add_region_creation (m_src_region);
873 576 : }
874 :
875 : /* Attempt to suppress false positives.
876 : Reject paths where the value of the underlying region isn't poisoned.
877 : This can happen due to state merging when exploring the exploded graph,
878 : where the more precise analysis during feasibility analysis finds that
879 : the region is in fact valid.
880 : To do this we need to get the value from the fgraph. Unfortunately
881 : we can't simply query the state of m_src_region (from the enode),
882 : since it might be a different region in the fnode state (e.g. with
883 : heap-allocated regions, the numbering could be different).
884 : Hence we access m_check_expr, if available. */
885 :
886 1318 : bool check_valid_fpath_p (const feasible_node &fnode)
887 : const final override
888 : {
889 1318 : if (!m_check_expr)
890 : return true;
891 1211 : const svalue *fsval = fnode.get_model ().get_rvalue (m_check_expr, nullptr);
892 : /* Check to see if the expr is also poisoned in FNODE (and in the
893 : same way). */
894 1211 : const poisoned_svalue * fspval = fsval->dyn_cast_poisoned_svalue ();
895 1211 : if (!fspval)
896 : return false;
897 1211 : if (fspval->get_poison_kind () != m_pkind)
898 : return false;
899 : return true;
900 : }
901 :
902 : void
903 8 : maybe_add_sarif_properties (diagnostics::sarif_object &result_obj)
904 : const final override
905 : {
906 8 : auto &props = result_obj.get_or_create_properties ();
907 : #define PROPERTY_PREFIX "gcc/analyzer/poisoned_value_diagnostic/"
908 8 : props.set (PROPERTY_PREFIX "expr", tree_to_json (m_expr));
909 8 : props.set_string (PROPERTY_PREFIX "kind", poison_kind_to_str (m_pkind));
910 8 : if (m_src_region)
911 8 : props.set (PROPERTY_PREFIX "src_region", m_src_region->to_json ());
912 8 : props.set (PROPERTY_PREFIX "check_expr", tree_to_json (m_check_expr));
913 : #undef PROPERTY_PREFIX
914 8 : }
915 :
916 : private:
917 : tree m_expr;
918 : enum poison_kind m_pkind;
919 : const region *m_src_region;
920 : tree m_check_expr;
921 : };
922 :
923 : /* A subclass of pending_diagnostic for complaining about shifts
924 : by negative counts. */
925 :
926 : class shift_count_negative_diagnostic
927 : : public pending_diagnostic_subclass<shift_count_negative_diagnostic>
928 : {
929 : public:
930 16 : shift_count_negative_diagnostic (const gassign *assign, tree count_cst)
931 16 : : m_assign (assign), m_count_cst (count_cst)
932 : {}
933 :
934 120 : const char *get_kind () const final override
935 : {
936 120 : return "shift_count_negative_diagnostic";
937 : }
938 :
939 16 : bool operator== (const shift_count_negative_diagnostic &other) const
940 : {
941 16 : return (m_assign == other.m_assign
942 16 : && same_tree_p (m_count_cst, other.m_count_cst));
943 : }
944 :
945 24 : int get_controlling_option () const final override
946 : {
947 24 : return OPT_Wanalyzer_shift_count_negative;
948 : }
949 :
950 8 : bool emit (diagnostic_emission_context &ctxt) final override
951 : {
952 8 : return ctxt.warn ("shift by negative count (%qE)", m_count_cst);
953 : }
954 :
955 : bool
956 16 : describe_final_event (pretty_printer &pp,
957 : const evdesc::final_event &) final override
958 : {
959 16 : pp_printf (&pp,
960 : "shift by negative amount here (%qE)",
961 : m_count_cst);
962 16 : return true;
963 : }
964 :
965 : private:
966 : const gassign *m_assign;
967 : tree m_count_cst;
968 : };
969 :
970 : /* A subclass of pending_diagnostic for complaining about shifts
971 : by counts >= the width of the operand type. */
972 :
973 : class shift_count_overflow_diagnostic
974 : : public pending_diagnostic_subclass<shift_count_overflow_diagnostic>
975 : {
976 : public:
977 8 : shift_count_overflow_diagnostic (const gassign *assign,
978 : int operand_precision,
979 : tree count_cst)
980 8 : : m_assign (assign), m_operand_precision (operand_precision),
981 8 : m_count_cst (count_cst)
982 : {}
983 :
984 36 : const char *get_kind () const final override
985 : {
986 36 : return "shift_count_overflow_diagnostic";
987 : }
988 :
989 8 : bool operator== (const shift_count_overflow_diagnostic &other) const
990 : {
991 8 : return (m_assign == other.m_assign
992 8 : && m_operand_precision == other.m_operand_precision
993 16 : && same_tree_p (m_count_cst, other.m_count_cst));
994 : }
995 :
996 12 : int get_controlling_option () const final override
997 : {
998 12 : return OPT_Wanalyzer_shift_count_overflow;
999 : }
1000 :
1001 4 : bool emit (diagnostic_emission_context &ctxt) final override
1002 : {
1003 4 : return ctxt.warn ("shift by count (%qE) >= precision of type (%qi)",
1004 4 : m_count_cst, m_operand_precision);
1005 : }
1006 :
1007 : bool
1008 8 : describe_final_event (pretty_printer &pp,
1009 : const evdesc::final_event &) final override
1010 : {
1011 8 : pp_printf (&pp,
1012 : "shift by count %qE here",
1013 : m_count_cst);
1014 8 : return true;
1015 : }
1016 :
1017 : private:
1018 : const gassign *m_assign;
1019 : int m_operand_precision;
1020 : tree m_count_cst;
1021 : };
1022 :
1023 : /* A subclass of pending_diagnostic for complaining about pointer
1024 : subtractions involving unrelated buffers. */
1025 :
1026 : class undefined_ptrdiff_diagnostic
1027 : : public pending_diagnostic_subclass<undefined_ptrdiff_diagnostic>
1028 : {
1029 : public:
1030 : /* Region_creation_event subclass to give a custom wording when
1031 : talking about creation of buffers for LHS and RHS of the
1032 : subtraction. */
1033 : class ptrdiff_region_creation_event : public region_creation_event
1034 : {
1035 : public:
1036 56 : ptrdiff_region_creation_event (const event_loc_info &loc_info,
1037 : bool is_lhs)
1038 56 : : region_creation_event (loc_info),
1039 56 : m_is_lhs (is_lhs)
1040 : {
1041 : }
1042 :
1043 112 : void print_desc (pretty_printer &pp) const final override
1044 : {
1045 112 : if (m_is_lhs)
1046 56 : pp_string (&pp,
1047 : "underlying object for left-hand side"
1048 : " of subtraction created here");
1049 : else
1050 56 : pp_string (&pp,
1051 : "underlying object for right-hand side"
1052 : " of subtraction created here");
1053 112 : }
1054 :
1055 : private:
1056 : bool m_is_lhs;
1057 : };
1058 :
1059 64 : undefined_ptrdiff_diagnostic (const gassign *assign,
1060 : const svalue *sval_a,
1061 : const svalue *sval_b,
1062 : const region *base_reg_a,
1063 : const region *base_reg_b)
1064 64 : : m_assign (assign),
1065 64 : m_sval_a (sval_a),
1066 64 : m_sval_b (sval_b),
1067 64 : m_base_reg_a (base_reg_a),
1068 64 : m_base_reg_b (base_reg_b)
1069 : {
1070 64 : gcc_assert (m_base_reg_a != m_base_reg_b);
1071 : }
1072 :
1073 380 : const char *get_kind () const final override
1074 : {
1075 380 : return "undefined_ptrdiff_diagnostic";
1076 : }
1077 :
1078 56 : bool operator== (const undefined_ptrdiff_diagnostic &other) const
1079 : {
1080 56 : return (m_assign == other.m_assign
1081 56 : && m_sval_a == other.m_sval_a
1082 56 : && m_sval_b == other.m_sval_b
1083 56 : && m_base_reg_a == other.m_base_reg_a
1084 112 : && m_base_reg_b == other.m_base_reg_b);
1085 : }
1086 :
1087 84 : int get_controlling_option () const final override
1088 : {
1089 84 : return OPT_Wanalyzer_undefined_behavior_ptrdiff;
1090 : }
1091 :
1092 28 : bool emit (diagnostic_emission_context &ctxt) final override
1093 : {
1094 : /* CWE-469: Use of Pointer Subtraction to Determine Size. */
1095 28 : ctxt.add_cwe (469);
1096 28 : return ctxt.warn ("undefined behavior when subtracting pointers");
1097 : }
1098 :
1099 56 : void add_region_creation_events (const region *reg,
1100 : tree /*capacity*/,
1101 : const event_loc_info &loc_info,
1102 : checker_path &emission_path) final override
1103 : {
1104 56 : if (reg == m_base_reg_a)
1105 28 : emission_path.add_event
1106 28 : (std::make_unique<ptrdiff_region_creation_event> (loc_info, true));
1107 28 : else if (reg == m_base_reg_b)
1108 28 : emission_path.add_event
1109 28 : (std::make_unique<ptrdiff_region_creation_event> (loc_info, false));
1110 56 : }
1111 :
1112 : bool
1113 56 : describe_final_event (pretty_printer &pp,
1114 : const evdesc::final_event &) final override
1115 : {
1116 56 : pp_string (&pp,
1117 : "subtraction of pointers has undefined behavior if"
1118 : " they do not point into the same array object");
1119 56 : return true;
1120 : }
1121 :
1122 28 : void mark_interesting_stuff (interesting_t *interesting) final override
1123 : {
1124 28 : interesting->add_region_creation (m_base_reg_a);
1125 28 : interesting->add_region_creation (m_base_reg_b);
1126 28 : }
1127 :
1128 : private:
1129 : const gassign *m_assign;
1130 : const svalue *m_sval_a;
1131 : const svalue *m_sval_b;
1132 : const region *m_base_reg_a;
1133 : const region *m_base_reg_b;
1134 : };
1135 :
1136 : /* Check the pointer subtraction SVAL_A - SVAL_B at ASSIGN and add
1137 : a warning to CTXT if they're not within the same base region. */
1138 :
1139 : static void
1140 582 : check_for_invalid_ptrdiff (const gassign *assign,
1141 : region_model_context &ctxt,
1142 : const svalue *sval_a, const svalue *sval_b)
1143 : {
1144 582 : const region *base_reg_a = sval_a->maybe_get_deref_base_region ();
1145 582 : if (!base_reg_a)
1146 518 : return;
1147 102 : const region *base_reg_b = sval_b->maybe_get_deref_base_region ();
1148 102 : if (!base_reg_b)
1149 : return;
1150 :
1151 78 : if (base_reg_a == base_reg_b)
1152 : return;
1153 :
1154 64 : if (base_reg_a->get_kind () == RK_SYMBOLIC)
1155 : return;
1156 64 : if (base_reg_b->get_kind () == RK_SYMBOLIC)
1157 : return;
1158 :
1159 64 : ctxt.warn
1160 64 : (std::make_unique<undefined_ptrdiff_diagnostic> (assign,
1161 : sval_a,
1162 : sval_b,
1163 : base_reg_a,
1164 : base_reg_b));
1165 : }
1166 :
1167 : /* If ASSIGN is a stmt that can be modelled via
1168 : set_value (lhs_reg, SVALUE, CTXT)
1169 : for some SVALUE, get the SVALUE.
1170 : Otherwise return nullptr. */
1171 :
1172 : const svalue *
1173 403797 : region_model::get_gassign_result (const gassign *assign,
1174 : region_model_context *ctxt)
1175 : {
1176 403797 : tree lhs = gimple_assign_lhs (assign);
1177 :
1178 403797 : if (gimple_has_volatile_ops (assign)
1179 403797 : && !gimple_clobber_p (assign))
1180 : {
1181 116 : conjured_purge p (this, ctxt);
1182 116 : return m_mgr->get_or_create_conjured_svalue (TREE_TYPE (lhs),
1183 : assign,
1184 : get_lvalue (lhs, ctxt),
1185 : p);
1186 : }
1187 :
1188 403681 : tree rhs1 = gimple_assign_rhs1 (assign);
1189 403681 : enum tree_code op = gimple_assign_rhs_code (assign);
1190 403681 : switch (op)
1191 : {
1192 : default:
1193 : return nullptr;
1194 :
1195 38171 : case POINTER_PLUS_EXPR:
1196 38171 : {
1197 : /* e.g. "_1 = a_10(D) + 12;" */
1198 38171 : tree ptr = rhs1;
1199 38171 : tree offset = gimple_assign_rhs2 (assign);
1200 :
1201 38171 : const svalue *ptr_sval = get_rvalue (ptr, ctxt);
1202 38171 : const svalue *offset_sval = get_rvalue (offset, ctxt);
1203 : /* Quoting tree.def, "the second operand [of a POINTER_PLUS_EXPR]
1204 : is an integer of type sizetype". */
1205 38171 : offset_sval = m_mgr->get_or_create_cast (size_type_node, offset_sval);
1206 :
1207 38171 : const svalue *sval_binop
1208 38171 : = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
1209 : ptr_sval, offset_sval);
1210 38171 : return sval_binop;
1211 : }
1212 722 : break;
1213 :
1214 722 : case POINTER_DIFF_EXPR:
1215 722 : {
1216 : /* e.g. "_1 = p_2(D) - q_3(D);". */
1217 722 : tree rhs2 = gimple_assign_rhs2 (assign);
1218 722 : const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
1219 722 : const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
1220 :
1221 : // TODO: perhaps fold to zero if they're known to be equal?
1222 :
1223 722 : if (ctxt)
1224 582 : check_for_invalid_ptrdiff (assign, *ctxt, rhs1_sval, rhs2_sval);
1225 :
1226 722 : const svalue *sval_binop
1227 722 : = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
1228 : rhs1_sval, rhs2_sval);
1229 722 : return sval_binop;
1230 : }
1231 198317 : break;
1232 :
1233 : /* Assignments of the form
1234 : set_value (lvalue (LHS), rvalue (EXPR))
1235 : for various EXPR.
1236 : We already have the lvalue for the LHS above, as "lhs_reg". */
1237 198317 : case ADDR_EXPR: /* LHS = &RHS; */
1238 198317 : case BIT_FIELD_REF:
1239 198317 : case COMPONENT_REF: /* LHS = op0.op1; */
1240 198317 : case MEM_REF:
1241 198317 : case REAL_CST:
1242 198317 : case COMPLEX_CST:
1243 198317 : case VECTOR_CST:
1244 198317 : case INTEGER_CST:
1245 198317 : case ARRAY_REF:
1246 198317 : case SSA_NAME: /* LHS = VAR; */
1247 198317 : case VAR_DECL: /* LHS = VAR; */
1248 198317 : case PARM_DECL:/* LHS = VAR; */
1249 198317 : case REALPART_EXPR:
1250 198317 : case IMAGPART_EXPR:
1251 198317 : return get_rvalue (rhs1, ctxt);
1252 :
1253 63635 : case ABS_EXPR:
1254 63635 : case ABSU_EXPR:
1255 63635 : case CONJ_EXPR:
1256 63635 : case BIT_NOT_EXPR:
1257 63635 : case FIX_TRUNC_EXPR:
1258 63635 : case FLOAT_EXPR:
1259 63635 : case NEGATE_EXPR:
1260 63635 : case NOP_EXPR:
1261 63635 : case VIEW_CONVERT_EXPR:
1262 63635 : {
1263 : /* Unary ops. */
1264 63635 : const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
1265 63635 : const svalue *sval_unaryop
1266 63635 : = m_mgr->get_or_create_unaryop (TREE_TYPE (lhs), op, rhs_sval);
1267 63635 : return sval_unaryop;
1268 : }
1269 :
1270 14480 : case EQ_EXPR:
1271 14480 : case GE_EXPR:
1272 14480 : case LE_EXPR:
1273 14480 : case NE_EXPR:
1274 14480 : case GT_EXPR:
1275 14480 : case LT_EXPR:
1276 14480 : case UNORDERED_EXPR:
1277 14480 : case ORDERED_EXPR:
1278 14480 : {
1279 14480 : tree rhs2 = gimple_assign_rhs2 (assign);
1280 :
1281 14480 : const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
1282 14480 : const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
1283 :
1284 14480 : if (TREE_TYPE (lhs) == boolean_type_node)
1285 : {
1286 : /* Consider constraints between svalues. */
1287 14335 : tristate t = eval_condition (rhs1_sval, op, rhs2_sval);
1288 14335 : if (t.is_known ())
1289 7979 : return m_mgr->get_or_create_constant_svalue
1290 7979 : (t.is_true () ? boolean_true_node : boolean_false_node);
1291 : }
1292 :
1293 : /* Otherwise, generate a symbolic binary op. */
1294 6501 : const svalue *sval_binop
1295 6501 : = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
1296 : rhs1_sval, rhs2_sval);
1297 6501 : return sval_binop;
1298 : }
1299 76429 : break;
1300 :
1301 76429 : case PLUS_EXPR:
1302 76429 : case MINUS_EXPR:
1303 76429 : case MULT_EXPR:
1304 76429 : case MULT_HIGHPART_EXPR:
1305 76429 : case TRUNC_DIV_EXPR:
1306 76429 : case CEIL_DIV_EXPR:
1307 76429 : case FLOOR_DIV_EXPR:
1308 76429 : case ROUND_DIV_EXPR:
1309 76429 : case TRUNC_MOD_EXPR:
1310 76429 : case CEIL_MOD_EXPR:
1311 76429 : case FLOOR_MOD_EXPR:
1312 76429 : case ROUND_MOD_EXPR:
1313 76429 : case RDIV_EXPR:
1314 76429 : case EXACT_DIV_EXPR:
1315 76429 : case LSHIFT_EXPR:
1316 76429 : case RSHIFT_EXPR:
1317 76429 : case LROTATE_EXPR:
1318 76429 : case RROTATE_EXPR:
1319 76429 : case BIT_IOR_EXPR:
1320 76429 : case BIT_XOR_EXPR:
1321 76429 : case BIT_AND_EXPR:
1322 76429 : case MIN_EXPR:
1323 76429 : case MAX_EXPR:
1324 76429 : case COMPLEX_EXPR:
1325 76429 : {
1326 : /* Binary ops. */
1327 76429 : tree rhs2 = gimple_assign_rhs2 (assign);
1328 :
1329 76429 : const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
1330 76429 : const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
1331 :
1332 76429 : if (ctxt && (op == LSHIFT_EXPR || op == RSHIFT_EXPR))
1333 : {
1334 : /* "INT34-C. Do not shift an expression by a negative number of bits
1335 : or by greater than or equal to the number of bits that exist in
1336 : the operand." */
1337 13709 : if (const tree rhs2_cst = rhs2_sval->maybe_get_constant ())
1338 13437 : if (TREE_CODE (rhs2_cst) == INTEGER_CST
1339 13437 : && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
1340 : {
1341 13435 : if (tree_int_cst_sgn (rhs2_cst) < 0)
1342 16 : ctxt->warn
1343 16 : (std::make_unique<shift_count_negative_diagnostic>
1344 16 : (assign, rhs2_cst));
1345 13419 : else if (compare_tree_int (rhs2_cst,
1346 13419 : TYPE_PRECISION (TREE_TYPE (rhs1)))
1347 : >= 0)
1348 8 : ctxt->warn
1349 8 : (std::make_unique<shift_count_overflow_diagnostic>
1350 8 : (assign,
1351 16 : int (TYPE_PRECISION (TREE_TYPE (rhs1))),
1352 : rhs2_cst));
1353 : }
1354 : }
1355 :
1356 24 : if (ctxt
1357 66469 : && (op == TRUNC_DIV_EXPR
1358 : || op == CEIL_DIV_EXPR
1359 : || op == FLOOR_DIV_EXPR
1360 : || op == ROUND_DIV_EXPR
1361 : || op == TRUNC_MOD_EXPR
1362 : || op == CEIL_MOD_EXPR
1363 : || op == FLOOR_MOD_EXPR
1364 : || op == ROUND_MOD_EXPR
1365 : || op == RDIV_EXPR
1366 66469 : || op == EXACT_DIV_EXPR))
1367 : {
1368 908 : value_range rhs_vr;
1369 908 : if (rhs2_sval->maybe_get_value_range (rhs_vr))
1370 802 : if (rhs_vr.zero_p ())
1371 : {
1372 : /* Ideally we should issue a warning here;
1373 : see PR analyzer/124217. */
1374 35 : return nullptr;
1375 : }
1376 908 : }
1377 :
1378 76394 : const svalue *sval_binop
1379 76394 : = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
1380 : rhs1_sval, rhs2_sval);
1381 76394 : return sval_binop;
1382 : }
1383 :
1384 : /* Vector expressions. In theory we could implement these elementwise,
1385 : but for now, simply return unknown values. */
1386 0 : case VEC_DUPLICATE_EXPR:
1387 0 : case VEC_SERIES_EXPR:
1388 0 : case VEC_COND_EXPR:
1389 0 : case VEC_PERM_EXPR:
1390 0 : case VEC_WIDEN_MULT_HI_EXPR:
1391 0 : case VEC_WIDEN_MULT_LO_EXPR:
1392 0 : case VEC_WIDEN_MULT_EVEN_EXPR:
1393 0 : case VEC_WIDEN_MULT_ODD_EXPR:
1394 0 : case VEC_UNPACK_HI_EXPR:
1395 0 : case VEC_UNPACK_LO_EXPR:
1396 0 : case VEC_UNPACK_FLOAT_HI_EXPR:
1397 0 : case VEC_UNPACK_FLOAT_LO_EXPR:
1398 0 : case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1399 0 : case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1400 0 : case VEC_PACK_TRUNC_EXPR:
1401 0 : case VEC_PACK_SAT_EXPR:
1402 0 : case VEC_PACK_FIX_TRUNC_EXPR:
1403 0 : case VEC_PACK_FLOAT_EXPR:
1404 0 : case VEC_WIDEN_LSHIFT_HI_EXPR:
1405 0 : case VEC_WIDEN_LSHIFT_LO_EXPR:
1406 0 : return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
1407 : }
1408 : }
1409 :
1410 : /* Workaround for discarding certain false positives from
1411 : -Wanalyzer-use-of-uninitialized-value
1412 : of the form:
1413 : ((A OR-IF B) OR-IF C)
1414 : and:
1415 : ((A AND-IF B) AND-IF C)
1416 : where evaluating B is redundant, but could involve simple accesses of
1417 : uninitialized locals.
1418 :
1419 : When optimization is turned on the FE can immediately fold compound
1420 : conditionals. Specifically, c_parser_condition parses this condition:
1421 : ((A OR-IF B) OR-IF C)
1422 : and calls c_fully_fold on the condition.
1423 : Within c_fully_fold, fold_truth_andor is called, which bails when
1424 : optimization is off, but if any optimization is turned on can convert the
1425 : ((A OR-IF B) OR-IF C)
1426 : into:
1427 : ((A OR B) OR_IF C)
1428 : for sufficiently simple B
1429 : i.e. the inner OR-IF becomes an OR.
1430 : At gimplification time the inner OR becomes BIT_IOR_EXPR (in gimplify_expr),
1431 : giving this for the inner condition:
1432 : tmp = A | B;
1433 : if (tmp)
1434 : thus effectively synthesizing a redundant access of B when optimization
1435 : is turned on, when compared to:
1436 : if (A) goto L1; else goto L4;
1437 : L1: if (B) goto L2; else goto L4;
1438 : L2: if (C) goto L3; else goto L4;
1439 : for the unoptimized case.
1440 :
1441 : Return true if CTXT appears to be handling such a short-circuitable stmt,
1442 : such as the def-stmt for B for the:
1443 : tmp = A | B;
1444 : case above, for the case where A is true and thus B would have been
1445 : short-circuited without optimization, using MODEL for the value of A. */
1446 :
1447 : static bool
1448 1158 : within_short_circuited_stmt_p (const region_model *model,
1449 : const gassign *assign_stmt)
1450 : {
1451 : /* We must have an assignment to a temporary of _Bool type. */
1452 1158 : tree lhs = gimple_assign_lhs (assign_stmt);
1453 1158 : if (TREE_TYPE (lhs) != boolean_type_node)
1454 : return false;
1455 40 : if (TREE_CODE (lhs) != SSA_NAME)
1456 : return false;
1457 40 : if (SSA_NAME_VAR (lhs) != NULL_TREE)
1458 : return false;
1459 :
1460 : /* The temporary bool must be used exactly once: as the second arg of
1461 : a BIT_IOR_EXPR or BIT_AND_EXPR. */
1462 40 : use_operand_p use_op;
1463 40 : gimple *use_stmt;
1464 40 : if (!single_imm_use (lhs, &use_op, &use_stmt))
1465 : return false;
1466 1186 : const gassign *use_assign = dyn_cast <const gassign *> (use_stmt);
1467 40 : if (!use_assign)
1468 : return false;
1469 40 : enum tree_code op = gimple_assign_rhs_code (use_assign);
1470 40 : if (!(op == BIT_IOR_EXPR ||op == BIT_AND_EXPR))
1471 : return false;
1472 28 : if (!(gimple_assign_rhs1 (use_assign) != lhs
1473 28 : && gimple_assign_rhs2 (use_assign) == lhs))
1474 : return false;
1475 :
1476 : /* The first arg of the bitwise stmt must have a known value in MODEL
1477 : that implies that the value of the second arg doesn't matter, i.e.
1478 : 1 for bitwise or, 0 for bitwise and. */
1479 28 : tree other_arg = gimple_assign_rhs1 (use_assign);
1480 : /* Use a nullptr ctxt here to avoid generating warnings. */
1481 28 : const svalue *other_arg_sval = model->get_rvalue (other_arg, nullptr);
1482 28 : tree other_arg_cst = other_arg_sval->maybe_get_constant ();
1483 28 : if (!other_arg_cst)
1484 : return false;
1485 12 : switch (op)
1486 : {
1487 0 : default:
1488 0 : gcc_unreachable ();
1489 12 : case BIT_IOR_EXPR:
1490 12 : if (zerop (other_arg_cst))
1491 : return false;
1492 : break;
1493 0 : case BIT_AND_EXPR:
1494 0 : if (!zerop (other_arg_cst))
1495 : return false;
1496 : break;
1497 : }
1498 :
1499 : /* All tests passed. We appear to be in a stmt that generates a boolean
1500 : temporary with a value that won't matter. */
1501 : return true;
1502 : }
1503 :
1504 : /* Workaround for discarding certain false positives from
1505 : -Wanalyzer-use-of-uninitialized-value
1506 : seen with -ftrivial-auto-var-init=.
1507 :
1508 : -ftrivial-auto-var-init= will generate calls to IFN_DEFERRED_INIT.
1509 :
1510 : If the address of the var is taken, gimplification will give us
1511 : something like:
1512 :
1513 : _1 = .DEFERRED_INIT (4, 2, &"len"[0]);
1514 : len = _1;
1515 :
1516 : The result of DEFERRED_INIT will be an uninit value; we don't
1517 : want to emit a false positive for "len = _1;"
1518 :
1519 : Return true if ASSIGN_STMT is such a stmt. */
1520 :
1521 : static bool
1522 1146 : due_to_ifn_deferred_init_p (const gassign *assign_stmt)
1523 :
1524 : {
1525 : /* We must have an assignment to a decl from an SSA name that's the
1526 : result of a IFN_DEFERRED_INIT call. */
1527 2122 : if (gimple_assign_rhs_code (assign_stmt) != SSA_NAME)
1528 : return false;
1529 287 : tree lhs = gimple_assign_lhs (assign_stmt);
1530 287 : if (TREE_CODE (lhs) != VAR_DECL)
1531 : return false;
1532 222 : tree rhs = gimple_assign_rhs1 (assign_stmt);
1533 222 : if (TREE_CODE (rhs) != SSA_NAME)
1534 : return false;
1535 222 : const gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1536 222 : const gcall *call = dyn_cast <const gcall *> (def_stmt);
1537 222 : if (!call)
1538 : return false;
1539 222 : if (gimple_call_internal_p (call)
1540 222 : && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
1541 210 : return true;
1542 : return false;
1543 : }
1544 :
1545 : /* Check for SVAL being poisoned, adding a warning to CTXT.
1546 : Return SVAL, or, if a warning is added, another value, to avoid
1547 : repeatedly complaining about the same poisoned value in followup code.
1548 : SRC_REGION is a hint about where SVAL came from, and can be nullptr. */
1549 :
1550 : const svalue *
1551 3290916 : region_model::check_for_poison (const svalue *sval,
1552 : tree expr,
1553 : const region *src_region,
1554 : region_model_context *ctxt) const
1555 : {
1556 3290916 : if (!ctxt)
1557 : return sval;
1558 :
1559 1709014 : if (const poisoned_svalue *poisoned_sval = sval->dyn_cast_poisoned_svalue ())
1560 : {
1561 2715 : enum poison_kind pkind = poisoned_sval->get_poison_kind ();
1562 :
1563 : /* Ignore uninitialized uses of empty types; there's nothing
1564 : to initialize. */
1565 2715 : if (pkind == poison_kind::uninit
1566 2674 : && sval->get_type ()
1567 5279 : && is_empty_type (sval->get_type ()))
1568 : return sval;
1569 :
1570 2476 : if (pkind == poison_kind::uninit)
1571 2435 : if (const gimple *curr_stmt = ctxt->get_stmt ())
1572 1503 : if (const gassign *assign_stmt
1573 3412 : = dyn_cast <const gassign *> (curr_stmt))
1574 : {
1575 : /* Special case to avoid certain false positives. */
1576 1158 : if (within_short_circuited_stmt_p (this, assign_stmt))
1577 : return sval;
1578 :
1579 : /* Special case to avoid false positive on
1580 : -ftrivial-auto-var-init=. */
1581 1146 : if (due_to_ifn_deferred_init_p (assign_stmt))
1582 : return sval;
1583 : }
1584 :
1585 : /* If we have an SSA name for a temporary, we don't want to print
1586 : '<unknown>'.
1587 : Poisoned values are shared by type, and so we can't reconstruct
1588 : the tree other than via the def stmts, using
1589 : fixup_tree_for_diagnostic. */
1590 2254 : tree diag_arg = fixup_tree_for_diagnostic (expr);
1591 2254 : if (src_region == nullptr && pkind == poison_kind::uninit)
1592 2162 : src_region = get_region_for_poisoned_expr (expr);
1593 :
1594 : /* Can we reliably get the poisoned value from "expr"?
1595 : This is for use by poisoned_value_diagnostic::check_valid_fpath_p.
1596 : Unfortunately, we might not have a reliable value for EXPR.
1597 : Hence we only query its value now, and only use it if we get the
1598 : poisoned value back again. */
1599 2254 : tree check_expr = expr;
1600 2254 : const svalue *foo_sval = get_rvalue (expr, nullptr);
1601 2254 : if (foo_sval == sval)
1602 : check_expr = expr;
1603 : else
1604 110 : check_expr = nullptr;
1605 4508 : if (ctxt->warn
1606 2254 : (std::make_unique<poisoned_value_diagnostic> (diag_arg,
1607 : pkind,
1608 : src_region,
1609 : check_expr)))
1610 : {
1611 : /* We only want to report use of a poisoned value at the first
1612 : place it gets used; return an unknown value to avoid generating
1613 : a chain of followup warnings. */
1614 1365 : sval = m_mgr->get_or_create_unknown_svalue (sval->get_type ());
1615 : }
1616 :
1617 2254 : return sval;
1618 : }
1619 :
1620 : return sval;
1621 : }
1622 :
1623 : /* Attempt to get a region for describing EXPR, the source of region of
1624 : a poisoned_svalue for use in a poisoned_value_diagnostic.
1625 : Return nullptr if there is no good region to use. */
1626 :
1627 : const region *
1628 2162 : region_model::get_region_for_poisoned_expr (tree expr) const
1629 : {
1630 2162 : if (TREE_CODE (expr) == SSA_NAME)
1631 : {
1632 1402 : tree decl = SSA_NAME_VAR (expr);
1633 1362 : if (decl && DECL_P (decl))
1634 : expr = decl;
1635 : else
1636 : return nullptr;
1637 : }
1638 2122 : return get_lvalue (expr, nullptr);
1639 : }
1640 :
1641 : /* Update this model for the ASSIGN stmt, using CTXT to report any
1642 : diagnostics. */
1643 :
1644 : void
1645 237421 : region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
1646 : {
1647 237421 : tree lhs = gimple_assign_lhs (assign);
1648 237421 : tree rhs1 = gimple_assign_rhs1 (assign);
1649 :
1650 237421 : const region *lhs_reg = get_lvalue (lhs, ctxt);
1651 :
1652 : /* Any writes other than to the stack are treated
1653 : as externally visible. */
1654 237421 : if (ctxt)
1655 : {
1656 191182 : enum memory_space memspace = lhs_reg->get_memory_space ();
1657 191182 : if (memspace != MEMSPACE_STACK)
1658 11198 : ctxt->maybe_did_work ();
1659 : }
1660 :
1661 : /* Most assignments are handled by:
1662 : set_value (lhs_reg, SVALUE, CTXT)
1663 : for some SVALUE. */
1664 237421 : if (const svalue *sval = get_gassign_result (assign, ctxt))
1665 : {
1666 230884 : tree expr = get_diagnostic_tree_for_gassign (assign);
1667 230884 : check_for_poison (sval, expr, nullptr, ctxt);
1668 230884 : set_value (lhs_reg, sval, ctxt);
1669 230884 : return;
1670 : }
1671 :
1672 6537 : enum tree_code op = gimple_assign_rhs_code (assign);
1673 6537 : switch (op)
1674 : {
1675 18 : default:
1676 18 : {
1677 18 : if (0)
1678 : sorry_at (assign->location, "unhandled assignment op: %qs",
1679 : get_tree_code_name (op));
1680 18 : const svalue *unknown_sval
1681 18 : = m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
1682 18 : set_value (lhs_reg, unknown_sval, ctxt);
1683 : }
1684 18 : break;
1685 :
1686 6266 : case CONSTRUCTOR:
1687 6266 : {
1688 6266 : if (TREE_CLOBBER_P (rhs1))
1689 : {
1690 : /* e.g. "x ={v} {CLOBBER};" */
1691 6131 : clobber_region (lhs_reg);
1692 : }
1693 : else
1694 : {
1695 : /* Any CONSTRUCTOR that survives to this point is either
1696 : just a zero-init of everything, or a vector. */
1697 135 : if (!CONSTRUCTOR_NO_CLEARING (rhs1))
1698 135 : zero_fill_region (lhs_reg, ctxt);
1699 : unsigned ix;
1700 : tree index;
1701 : tree val;
1702 297 : FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), ix, index, val)
1703 : {
1704 162 : gcc_assert (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE);
1705 162 : if (!index)
1706 22 : index = build_int_cst (integer_type_node, ix);
1707 162 : gcc_assert (TREE_CODE (index) == INTEGER_CST);
1708 162 : const svalue *index_sval
1709 162 : = m_mgr->get_or_create_constant_svalue (index);
1710 162 : gcc_assert (index_sval);
1711 162 : const region *sub_reg
1712 162 : = m_mgr->get_element_region (lhs_reg,
1713 162 : TREE_TYPE (val),
1714 : index_sval);
1715 162 : const svalue *val_sval = get_rvalue (val, ctxt);
1716 162 : set_value (sub_reg, val_sval, ctxt);
1717 : }
1718 : }
1719 : }
1720 : break;
1721 :
1722 253 : case STRING_CST:
1723 253 : {
1724 : /* e.g. "struct s2 x = {{'A', 'B', 'C', 'D'}};". */
1725 253 : const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
1726 387 : m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
1727 134 : ctxt ? ctxt->get_uncertainty () : nullptr);
1728 : }
1729 253 : break;
1730 : }
1731 : }
1732 :
1733 : /* Handle the pre-sm-state part of STMT, modifying this object in-place.
1734 : Write true to *OUT_UNKNOWN_SIDE_EFFECTS if the stmt has unknown
1735 : side effects. */
1736 :
1737 : void
1738 311901 : region_model::on_stmt_pre (const gimple *stmt,
1739 : bool *out_unknown_side_effects,
1740 : region_model_context *ctxt)
1741 : {
1742 311901 : switch (gimple_code (stmt))
1743 : {
1744 : case GIMPLE_COND:
1745 : case GIMPLE_EH_DISPATCH:
1746 : case GIMPLE_GOTO:
1747 : case GIMPLE_LABEL:
1748 : case GIMPLE_NOP:
1749 : case GIMPLE_PREDICT:
1750 : case GIMPLE_RESX:
1751 : case GIMPLE_SWITCH:
1752 : /* No-ops here. */
1753 : break;
1754 :
1755 237421 : case GIMPLE_ASSIGN:
1756 237421 : {
1757 237421 : const gassign *assign = as_a <const gassign *> (stmt);
1758 237421 : on_assignment (assign, ctxt);
1759 : }
1760 237421 : break;
1761 :
1762 384 : case GIMPLE_ASM:
1763 384 : {
1764 384 : const gasm *asm_stmt = as_a <const gasm *> (stmt);
1765 384 : on_asm_stmt (asm_stmt, ctxt);
1766 384 : if (ctxt)
1767 342 : ctxt->maybe_did_work ();
1768 : }
1769 : break;
1770 :
1771 70746 : case GIMPLE_CALL:
1772 70746 : {
1773 : /* Track whether we have a gcall to a function that's not recognized by
1774 : anything, for which we don't have a function body, or for which we
1775 : don't know the fndecl. */
1776 70746 : const gcall *call = as_a <const gcall *> (stmt);
1777 70746 : *out_unknown_side_effects = on_call_pre (*call, ctxt);
1778 : }
1779 70746 : break;
1780 :
1781 0 : case GIMPLE_RETURN:
1782 0 : {
1783 0 : const greturn *return_ = as_a <const greturn *> (stmt);
1784 0 : on_return (return_, ctxt);
1785 : }
1786 0 : break;
1787 :
1788 : /* We don't expect to see any other statement kinds in the analyzer. */
1789 0 : case GIMPLE_DEBUG: // should have stripped these out when building the supergraph
1790 0 : default:
1791 0 : internal_error ("unexpected gimple stmt code: %qs",
1792 0 : gimple_code_name[gimple_code (stmt)]);
1793 311901 : break;
1794 : }
1795 311901 : }
1796 :
1797 : /* Given a call CD with function attribute FORMAT_ATTR, check that the
1798 : format arg to the call is a valid null-terminated string. */
1799 :
1800 : void
1801 1054 : region_model::check_call_format_attr (const call_details &cd,
1802 : tree format_attr) const
1803 : {
1804 : /* We assume that FORMAT_ATTR has already been validated. */
1805 :
1806 : /* arg0 of the attribute should be kind of format strings
1807 : that this function expects (e.g. "printf"). */
1808 1054 : const tree arg0_tree_list = TREE_VALUE (format_attr);
1809 1054 : if (!arg0_tree_list)
1810 0 : return;
1811 :
1812 : /* arg1 of the attribute should be the 1-based parameter index
1813 : to treat as the format string. */
1814 1054 : const tree arg1_tree_list = TREE_CHAIN (arg0_tree_list);
1815 1054 : if (!arg1_tree_list)
1816 : return;
1817 1054 : const tree arg1_value = TREE_VALUE (arg1_tree_list);
1818 1054 : if (!arg1_value)
1819 : return;
1820 :
1821 1054 : unsigned format_arg_idx = TREE_INT_CST_LOW (arg1_value) - 1;
1822 1054 : if (cd.num_args () <= format_arg_idx)
1823 : return;
1824 :
1825 : /* Subclass of annotating_context that
1826 : adds a note about the format attr to any saved diagnostics. */
1827 1054 : class annotating_ctxt : public annotating_context
1828 : {
1829 : public:
1830 1054 : annotating_ctxt (const call_details &cd,
1831 : unsigned fmt_param_idx)
1832 1054 : : annotating_context (cd.get_ctxt ()),
1833 1054 : m_cd (cd),
1834 1054 : m_fmt_param_idx (fmt_param_idx)
1835 : {
1836 : }
1837 13 : void add_annotations () final override
1838 : {
1839 0 : class reason_format_attr
1840 : : public pending_note_subclass<reason_format_attr>
1841 : {
1842 : public:
1843 13 : reason_format_attr (const call_arg_details &arg_details)
1844 13 : : m_arg_details (arg_details)
1845 : {
1846 : }
1847 :
1848 74 : const char *get_kind () const final override
1849 : {
1850 74 : return "reason_format_attr";
1851 : }
1852 :
1853 13 : void emit () const final override
1854 : {
1855 13 : inform (DECL_SOURCE_LOCATION (m_arg_details.m_called_fndecl),
1856 : "parameter %i of %qD marked as a format string"
1857 : " via %qs attribute",
1858 13 : m_arg_details.m_arg_idx + 1, m_arg_details.m_called_fndecl,
1859 : "format");
1860 13 : }
1861 :
1862 37 : bool operator== (const reason_format_attr &other) const
1863 : {
1864 37 : return m_arg_details == other.m_arg_details;
1865 : }
1866 :
1867 : private:
1868 : call_arg_details m_arg_details;
1869 : };
1870 :
1871 13 : call_arg_details arg_details (m_cd, m_fmt_param_idx);
1872 13 : add_note (std::make_unique<reason_format_attr> (arg_details));
1873 13 : }
1874 : private:
1875 : const call_details &m_cd;
1876 : unsigned m_fmt_param_idx;
1877 : };
1878 :
1879 1054 : annotating_ctxt my_ctxt (cd, format_arg_idx);
1880 1054 : call_details my_cd (cd, &my_ctxt);
1881 1054 : my_cd.check_for_null_terminated_string_arg (format_arg_idx);
1882 : }
1883 :
1884 : /* Ensure that all arguments at the call described by CD are checked
1885 : for poisoned values, by calling get_rvalue on each argument.
1886 :
1887 : Check that calls to functions with "format" attribute have valid
1888 : null-terminated strings for their format argument. */
1889 :
1890 : void
1891 49121 : region_model::check_call_args (const call_details &cd) const
1892 : {
1893 114415 : for (unsigned arg_idx = 0; arg_idx < cd.num_args (); arg_idx++)
1894 65294 : cd.get_arg_svalue (arg_idx);
1895 :
1896 : /* Handle attribute "format". */
1897 49121 : if (tree format_attr = cd.lookup_function_attribute ("format"))
1898 1054 : check_call_format_attr (cd, format_attr);
1899 49121 : }
1900 :
1901 : /* Update this model for an outcome of a call that returns a specific
1902 : integer constant.
1903 : If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1904 : the state-merger code from merging success and failure outcomes. */
1905 :
1906 : void
1907 845 : region_model::update_for_int_cst_return (const call_details &cd,
1908 : int retval,
1909 : bool unmergeable)
1910 : {
1911 845 : if (!cd.get_lhs_type ())
1912 : return;
1913 591 : if (TREE_CODE (cd.get_lhs_type ()) != INTEGER_TYPE)
1914 : return;
1915 585 : const svalue *result
1916 585 : = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), retval);
1917 585 : if (unmergeable)
1918 585 : result = m_mgr->get_or_create_unmergeable (result);
1919 585 : set_value (cd.get_lhs_region (), result, cd.get_ctxt ());
1920 : }
1921 :
1922 : /* Update this model for an outcome of a call that returns zero.
1923 : If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1924 : the state-merger code from merging success and failure outcomes. */
1925 :
1926 : void
1927 286 : region_model::update_for_zero_return (const call_details &cd,
1928 : bool unmergeable)
1929 : {
1930 286 : update_for_int_cst_return (cd, 0, unmergeable);
1931 286 : }
1932 :
1933 : /* Update this model for an outcome of a call that returns non-zero.
1934 : Specifically, assign an svalue to the LHS, and add a constraint that
1935 : that svalue is non-zero. */
1936 :
1937 : void
1938 133 : region_model::update_for_nonzero_return (const call_details &cd)
1939 : {
1940 133 : if (!cd.get_lhs_type ())
1941 : return;
1942 97 : if (TREE_CODE (cd.get_lhs_type ()) != INTEGER_TYPE)
1943 : return;
1944 97 : cd.set_any_lhs_with_defaults ();
1945 97 : const svalue *zero
1946 97 : = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), 0);
1947 97 : const svalue *result
1948 97 : = get_store_value (cd.get_lhs_region (), cd.get_ctxt ());
1949 97 : add_constraint (result, NE_EXPR, zero, cd.get_ctxt ());
1950 : }
1951 :
1952 : /* Subroutine of region_model::maybe_get_copy_bounds.
1953 : The Linux kernel commonly uses
1954 : min_t([unsigned] long, VAR, sizeof(T));
1955 : to set an upper bound on the size of a copy_to_user.
1956 : Attempt to simplify such sizes by trying to get the upper bound as a
1957 : constant.
1958 : Return the simplified svalue if possible, or nullptr otherwise. */
1959 :
1960 : static const svalue *
1961 53 : maybe_simplify_upper_bound (const svalue *num_bytes_sval,
1962 : region_model_manager *mgr)
1963 : {
1964 53 : tree type = num_bytes_sval->get_type ();
1965 70 : while (const svalue *raw = num_bytes_sval->maybe_undo_cast ())
1966 : num_bytes_sval = raw;
1967 53 : if (const binop_svalue *binop_sval = num_bytes_sval->dyn_cast_binop_svalue ())
1968 38 : if (binop_sval->get_op () == MIN_EXPR)
1969 8 : if (binop_sval->get_arg1 ()->get_kind () == SK_CONSTANT)
1970 : {
1971 8 : return mgr->get_or_create_cast (type, binop_sval->get_arg1 ());
1972 : /* TODO: we might want to also capture the constraint
1973 : when recording the diagnostic, or note that we're using
1974 : the upper bound. */
1975 : }
1976 : return nullptr;
1977 : }
1978 :
1979 : /* Attempt to get an upper bound for the size of a copy when simulating a
1980 : copy function.
1981 :
1982 : NUM_BYTES_SVAL is the symbolic value for the size of the copy.
1983 : Use it if it's constant, otherwise try to simplify it. Failing
1984 : that, use the size of SRC_REG if constant.
1985 :
1986 : Return a symbolic value for an upper limit on the number of bytes
1987 : copied, or nullptr if no such value could be determined. */
1988 :
1989 : const svalue *
1990 144 : region_model::maybe_get_copy_bounds (const region *src_reg,
1991 : const svalue *num_bytes_sval)
1992 : {
1993 144 : if (num_bytes_sval->maybe_get_constant ())
1994 : return num_bytes_sval;
1995 :
1996 106 : if (const svalue *simplified
1997 53 : = maybe_simplify_upper_bound (num_bytes_sval, m_mgr))
1998 8 : num_bytes_sval = simplified;
1999 :
2000 53 : if (num_bytes_sval->maybe_get_constant ())
2001 : return num_bytes_sval;
2002 :
2003 : /* For now, try just guessing the size as the capacity of the
2004 : base region of the src.
2005 : This is a hack; we might get too large a value. */
2006 45 : const region *src_base_reg = src_reg->get_base_region ();
2007 45 : num_bytes_sval = get_capacity (src_base_reg);
2008 :
2009 45 : if (num_bytes_sval->maybe_get_constant ())
2010 11 : return num_bytes_sval;
2011 :
2012 : /* Non-constant: give up. */
2013 : return nullptr;
2014 : }
2015 :
2016 : /* Get any known_function for FNDECL for call CD.
2017 :
2018 : The call must match all assumptions made by the known_function (such as
2019 : e.g. "argument 1's type must be a pointer type").
2020 :
2021 : Return nullptr if no known_function is found, or it does not match the
2022 : assumption(s). */
2023 :
2024 : const known_function *
2025 292371 : region_model::get_known_function (tree fndecl, const call_details &cd) const
2026 : {
2027 292371 : known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
2028 292371 : return known_fn_mgr->get_match (fndecl, cd);
2029 : }
2030 :
2031 : /* Get any known_function for IFN, or nullptr. */
2032 :
2033 : const known_function *
2034 1383 : region_model::get_known_function (enum internal_fn ifn) const
2035 : {
2036 1383 : known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
2037 1383 : return known_fn_mgr->get_internal_fn (ifn);
2038 : }
2039 :
2040 : /* Get any builtin_known_function for CALL and emit any warning to CTXT
2041 : if not nullptr.
2042 :
2043 : The call must match all assumptions made by the known_function (such as
2044 : e.g. "argument 1's type must be a pointer type").
2045 :
2046 : Return nullptr if no builtin_known_function is found, or it does
2047 : not match the assumption(s).
2048 :
2049 : Internally calls get_known_function to find a known_function and cast it
2050 : to a builtin_known_function.
2051 :
2052 : For instance, calloc is a C builtin, defined in gcc/builtins.def
2053 : by the DEF_LIB_BUILTIN macro. Such builtins are recognized by the
2054 : analyzer by their name, so that even in C++ or if the user redeclares
2055 : them but mismatch their signature, they are still recognized as builtins.
2056 :
2057 : Cases when a supposed builtin is not flagged as one by the FE:
2058 :
2059 : The C++ FE does not recognize calloc as a builtin if it has not been
2060 : included from a standard header, but the C FE does. Hence in C++ if
2061 : CALL comes from a calloc and stdlib is not included,
2062 : gcc/tree.h:fndecl_built_in_p (CALL) would be false.
2063 :
2064 : In C code, a __SIZE_TYPE__ calloc (__SIZE_TYPE__, __SIZE_TYPE__) user
2065 : declaration has obviously a mismatching signature from the standard, and
2066 : its function_decl tree won't be unified by
2067 : gcc/c-decl.cc:match_builtin_function_types.
2068 :
2069 : Yet in both cases the analyzer should treat the calls as a builtin calloc
2070 : so that extra attributes unspecified by the standard but added by GCC
2071 : (e.g. sprintf attributes in gcc/builtins.def), useful for the detection of
2072 : dangerous behavior, are indeed processed.
2073 :
2074 : Therefore for those cases when a "builtin flag" is not added by the FE,
2075 : builtins' kf are derived from builtin_known_function, whose method
2076 : builtin_known_function::builtin_decl returns the builtin's
2077 : function_decl tree as defined in gcc/builtins.def, with all the extra
2078 : attributes. */
2079 :
2080 : const builtin_known_function *
2081 163251 : region_model::get_builtin_kf (const gcall &call,
2082 : region_model_context *ctxt /* = nullptr */) const
2083 : {
2084 163251 : region_model *mut_this = const_cast <region_model *> (this);
2085 163251 : tree callee_fndecl = mut_this->get_fndecl_for_call (call, ctxt);
2086 163251 : if (! callee_fndecl)
2087 : return nullptr;
2088 :
2089 163251 : call_details cd (call, mut_this, ctxt);
2090 163251 : if (const known_function *kf = get_known_function (callee_fndecl, cd))
2091 111673 : return kf->dyn_cast_builtin_kf ();
2092 :
2093 : return nullptr;
2094 : }
2095 :
2096 : /* Subclass of custom_edge_info for use by exploded_edges that represent
2097 : an exception being thrown from a call we don't have the code for. */
2098 :
2099 : class exception_thrown_from_unrecognized_call : public custom_edge_info
2100 : {
2101 : public:
2102 5459 : exception_thrown_from_unrecognized_call (const gcall &call,
2103 : tree fndecl)
2104 5459 : : m_call (call),
2105 5459 : m_fndecl (fndecl)
2106 : {
2107 : }
2108 :
2109 12 : void print (pretty_printer *pp) const final override
2110 : {
2111 12 : if (m_fndecl)
2112 12 : pp_printf (pp, "if %qD throws an exception...", m_fndecl);
2113 : else
2114 0 : pp_printf (pp, "if the called function throws an exception...");
2115 12 : };
2116 :
2117 : bool
2118 5455 : update_model (region_model *model,
2119 : const exploded_edge *,
2120 : region_model_context *ctxt) const final override
2121 : {
2122 : /* Allocate an exception and set it as the current exception. */
2123 5455 : const region *exception_reg
2124 : = model->get_or_create_region_for_heap_alloc
2125 5455 : (nullptr, /* We don't know the size of the region. */
2126 : ctxt);
2127 :
2128 5455 : region_model_manager *mgr = model->get_manager ();
2129 5455 : conjured_purge p (model, ctxt);
2130 :
2131 : /* The contents of the region are some conjured svalue. */
2132 5455 : const svalue *exception_sval
2133 10910 : = mgr->get_or_create_conjured_svalue (NULL_TREE,
2134 5455 : &m_call,
2135 : exception_reg, p, 0);
2136 5455 : model->set_value (exception_reg, exception_sval, ctxt);
2137 5455 : const svalue *exception_ptr_sval
2138 5455 : = mgr->get_ptr_svalue (ptr_type_node, exception_reg);
2139 5455 : const svalue *tinfo_sval
2140 10910 : = mgr->get_or_create_conjured_svalue (ptr_type_node,
2141 5455 : &m_call,
2142 : exception_reg, p, 1);
2143 5455 : const svalue *destructor_sval
2144 10910 : = mgr->get_or_create_conjured_svalue (ptr_type_node,
2145 5455 : &m_call,
2146 : exception_reg, p, 2);
2147 :
2148 : /* Push a new exception_node on the model's thrown exception stack. */
2149 5455 : exception_node eh_node (exception_ptr_sval, tinfo_sval, destructor_sval);
2150 5455 : model->push_thrown_exception (eh_node);
2151 :
2152 5455 : return true;
2153 : }
2154 :
2155 : void
2156 23 : add_events_to_path (checker_path *emission_path,
2157 : const exploded_edge &eedge,
2158 : pending_diagnostic &) const final override
2159 : {
2160 23 : const exploded_node *dst_node = eedge.m_dest;
2161 23 : const program_point &dst_point = dst_node->get_point ();
2162 23 : const int dst_stack_depth = dst_point.get_stack_depth ();
2163 :
2164 23 : emission_path->add_event
2165 23 : (std::make_unique<throw_from_call_to_external_fn_event>
2166 23 : (event_loc_info (m_call.location,
2167 : dst_point.get_fndecl (),
2168 23 : dst_stack_depth),
2169 : dst_node,
2170 : m_call,
2171 23 : m_fndecl));
2172 23 : }
2173 :
2174 : exploded_node *
2175 5346 : create_enode (exploded_graph &eg,
2176 : const program_point &point,
2177 : program_state &&state,
2178 : exploded_node *enode_for_diag,
2179 : region_model_context *ctxt) const final override
2180 : {
2181 5346 : exploded_node *thrown_enode
2182 5346 : = eg.get_or_create_node (point, state, enode_for_diag,
2183 : /* Don't add to worklist. */
2184 : false);
2185 5346 : if (!thrown_enode)
2186 : return nullptr;
2187 :
2188 : /* Add successor edges for thrown_enode "by hand" for the exception. */
2189 5261 : eg.unwind_from_exception (*thrown_enode,
2190 5261 : &m_call,
2191 : ctxt);
2192 5261 : return thrown_enode;
2193 : }
2194 :
2195 : private:
2196 : const gcall &m_call;
2197 : tree m_fndecl; // could be null
2198 : };
2199 :
2200 : /* Get a set of functions that are assumed to not throw exceptions. */
2201 :
2202 : static function_set
2203 5326 : get_fns_assumed_not_to_throw ()
2204 : {
2205 : // TODO: populate this list more fully
2206 5326 : static const char * const fn_names[] = {
2207 : /* This array must be kept sorted. */
2208 :
2209 : "fclose"
2210 : };
2211 5326 : const size_t count = ARRAY_SIZE (fn_names);
2212 5326 : function_set fs (fn_names, count);
2213 5326 : return fs;
2214 : }
2215 :
2216 : /* Return true if CALL could throw an exception.
2217 : FNDECL could be NULL_TREE. */
2218 :
2219 : static bool
2220 12786 : can_throw_p (const gcall &call, tree fndecl)
2221 : {
2222 12786 : if (!flag_exceptions)
2223 : return false;
2224 :
2225 : /* Compatibility flag to allow the user to assume external functions
2226 : never throw exceptions. This may be useful when using the analyzer
2227 : on C code that is compiled with -fexceptions, but for which the headers
2228 : haven't yet had "nothrow" attributes systematically added. */
2229 6013 : if (flag_analyzer_assume_nothrow)
2230 : return false;
2231 :
2232 6011 : if (gimple_call_nothrow_p (&call))
2233 : return false;
2234 :
2235 5466 : if (fndecl)
2236 : {
2237 5326 : const function_set fs = get_fns_assumed_not_to_throw ();
2238 5326 : if (fs.contains_decl_p (fndecl))
2239 7 : return false;
2240 : }
2241 :
2242 : return true;
2243 : }
2244 :
2245 : /* Given CALL where we don't know what code is being called
2246 : (by not having the body of FNDECL, or having NULL_TREE for FNDECL),
2247 : potentially bifurcate control flow to simulate the call throwing
2248 : an exception. */
2249 :
2250 : void
2251 17553 : region_model::check_for_throw_inside_call (const gcall &call,
2252 : tree fndecl,
2253 : region_model_context *ctxt)
2254 : {
2255 17553 : if (!ctxt)
2256 12094 : return;
2257 :
2258 : /* Could this function throw an exception?
2259 : If so, add an extra e-edge for that. */
2260 12786 : if (!can_throw_p (call, fndecl))
2261 : return;
2262 :
2263 5459 : auto throws_exception
2264 5459 : = std::make_unique<exception_thrown_from_unrecognized_call> (call, fndecl);
2265 5459 : ctxt->bifurcate (std::move (throws_exception));
2266 5459 : }
2267 :
2268 : /* A subclass of pending_diagnostic for complaining about jumps through NULL
2269 : function pointers. */
2270 :
2271 : class jump_through_null : public pending_diagnostic_subclass<jump_through_null>
2272 : {
2273 : public:
2274 16 : jump_through_null (const gcall &call)
2275 16 : : m_call (call)
2276 : {}
2277 :
2278 152 : const char *get_kind () const final override
2279 : {
2280 152 : return "jump_through_null";
2281 : }
2282 :
2283 16 : bool operator== (const jump_through_null &other) const
2284 : {
2285 16 : return &m_call == &other.m_call;
2286 : }
2287 :
2288 32 : int get_controlling_option () const final override
2289 : {
2290 32 : return OPT_Wanalyzer_jump_through_null;
2291 : }
2292 :
2293 16 : bool emit (diagnostic_emission_context &ctxt) final override
2294 : {
2295 16 : return ctxt.warn ("jump through null pointer");
2296 : }
2297 :
2298 32 : bool describe_final_event (pretty_printer &pp,
2299 : const evdesc::final_event &) final override
2300 : {
2301 32 : pp_string (&pp, "jump through null pointer here");
2302 32 : return true;
2303 : }
2304 :
2305 : private:
2306 : const gcall &m_call;
2307 : };
2308 : /* Update this model for the CALL stmt, using CTXT to report any
2309 : diagnostics - the first half.
2310 :
2311 : Updates to the region_model that should be made *before* sm-states
2312 : are updated are done here; other updates to the region_model are done
2313 : in region_model::on_call_post.
2314 :
2315 : Return true if the function call has unknown side effects (it wasn't
2316 : recognized and we don't have a body for it, or are unable to tell which
2317 : fndecl it is). */
2318 :
2319 : bool
2320 70746 : region_model::on_call_pre (const gcall &call, region_model_context *ctxt)
2321 : {
2322 70746 : call_details cd (call, this, ctxt);
2323 :
2324 : /* Special-case for IFN_DEFERRED_INIT.
2325 : We want to report uninitialized variables with -fanalyzer (treating
2326 : -ftrivial-auto-var-init= as purely a mitigation feature).
2327 : Handle IFN_DEFERRED_INIT by treating it as no-op: don't touch the
2328 : lhs of the call, so that it is still uninitialized from the point of
2329 : view of the analyzer. */
2330 70746 : if (gimple_call_internal_p (&call)
2331 70746 : && gimple_call_internal_fn (&call) == IFN_DEFERRED_INIT)
2332 : return false; /* No side effects. */
2333 :
2334 : /* Get svalues for all of the arguments at the callsite, to ensure that we
2335 : complain about any uninitialized arguments. This might lead to
2336 : duplicates if any of the handling below also looks up the svalues,
2337 : but the deduplication code should deal with that. */
2338 66488 : if (ctxt)
2339 49121 : check_call_args (cd);
2340 :
2341 66488 : tree callee_fndecl = get_fndecl_for_call (call, ctxt);
2342 :
2343 66488 : if (gimple_call_internal_p (&call))
2344 2766 : if (const known_function *kf
2345 1383 : = get_known_function (gimple_call_internal_fn (&call)))
2346 : {
2347 1353 : kf->impl_call_pre (cd);
2348 1353 : return false; /* No further side effects. */
2349 : }
2350 :
2351 65135 : if (!callee_fndecl)
2352 : {
2353 : /* Check for jump through nullptr. */
2354 476 : if (ctxt)
2355 404 : if (tree fn_ptr = gimple_call_fn (&call))
2356 : {
2357 378 : const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
2358 378 : if (fn_ptr_sval->all_zeroes_p ())
2359 : {
2360 16 : ctxt->warn
2361 16 : (std::make_unique<jump_through_null> (call));
2362 16 : ctxt->terminate_path ();
2363 16 : return true;
2364 : }
2365 : }
2366 :
2367 460 : check_for_throw_inside_call (call, NULL_TREE, ctxt);
2368 460 : cd.set_any_lhs_with_defaults ();
2369 460 : return true; /* Unknown side effects. */
2370 : }
2371 :
2372 64659 : if (const known_function *kf = get_known_function (callee_fndecl, cd))
2373 : {
2374 45431 : kf->impl_call_pre (cd);
2375 45431 : return false; /* No further side effects. */
2376 : }
2377 :
2378 19228 : cd.set_any_lhs_with_defaults ();
2379 :
2380 19228 : const int callee_fndecl_flags = flags_from_decl_or_type (callee_fndecl);
2381 19228 : if (callee_fndecl_flags & (ECF_CONST | ECF_PURE))
2382 : return false; /* No side effects. */
2383 :
2384 17870 : if (fndecl_built_in_p (callee_fndecl))
2385 : return true; /* Unknown side effects. */
2386 :
2387 17093 : if (!fndecl_has_gimple_body_p (callee_fndecl))
2388 : {
2389 17093 : check_for_throw_inside_call (call, callee_fndecl, ctxt);
2390 17093 : return true; /* Unknown side effects. */
2391 : }
2392 :
2393 : return false; /* No side effects. */
2394 : }
2395 :
2396 : /* Update this model for the CALL stmt, using CTXT to report any
2397 : diagnostics - the second half.
2398 :
2399 : Updates to the region_model that should be made *after* sm-states
2400 : are updated are done here; other updates to the region_model are done
2401 : in region_model::on_call_pre.
2402 :
2403 : If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
2404 : to purge state. */
2405 :
2406 : void
2407 70501 : region_model::on_call_post (const gcall &call,
2408 : bool unknown_side_effects,
2409 : region_model_context *ctxt)
2410 : {
2411 70501 : if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
2412 : {
2413 64461 : call_details cd (call, this, ctxt);
2414 64461 : if (const known_function *kf = get_known_function (callee_fndecl, cd))
2415 : {
2416 45291 : kf->impl_call_post (cd);
2417 90921 : return;
2418 : }
2419 : /* Was this fndecl referenced by
2420 : __attribute__((malloc(FOO)))? */
2421 19170 : if (lookup_attribute ("*dealloc", DECL_ATTRIBUTES (callee_fndecl)))
2422 : {
2423 339 : impl_deallocation_call (cd);
2424 339 : return;
2425 : }
2426 : }
2427 :
2428 24871 : if (unknown_side_effects)
2429 : {
2430 16848 : handle_unrecognized_call (call, ctxt);
2431 16848 : if (ctxt)
2432 12201 : ctxt->maybe_did_work ();
2433 : }
2434 : }
2435 :
2436 : /* Purge state involving SVAL from this region_model, using CTXT
2437 : (if non-NULL) to purge other state in a program_state.
2438 :
2439 : For example, if we're at the def-stmt of an SSA name, then we need to
2440 : purge any state for svalues that involve that SSA name. This avoids
2441 : false positives in loops, since a symbolic value referring to the
2442 : SSA name will be referring to the previous value of that SSA name.
2443 :
2444 : For example, in:
2445 : while ((e = hashmap_iter_next(&iter))) {
2446 : struct oid2strbuf *e_strbuf = (struct oid2strbuf *)e;
2447 : free (e_strbuf->value);
2448 : }
2449 : at the def-stmt of e_8:
2450 : e_8 = hashmap_iter_next (&iter);
2451 : we should purge the "freed" state of:
2452 : INIT_VAL(CAST_REG(‘struct oid2strbuf’, (*INIT_VAL(e_8))).value)
2453 : which is the "e_strbuf->value" value from the previous iteration,
2454 : or we will erroneously report a double-free - the "e_8" within it
2455 : refers to the previous value. */
2456 :
2457 : void
2458 26853 : region_model::purge_state_involving (const svalue *sval,
2459 : region_model_context *ctxt)
2460 : {
2461 26853 : if (!sval->can_have_associated_state_p ())
2462 : return;
2463 26853 : m_store.purge_state_involving (sval, m_mgr);
2464 26853 : m_constraints->purge_state_involving (sval);
2465 26853 : m_dynamic_extents.purge_state_involving (sval);
2466 26853 : if (ctxt)
2467 17835 : ctxt->purge_state_involving (sval);
2468 : }
2469 :
2470 : /* A pending_note subclass for adding a note about an
2471 : __attribute__((access, ...)) to a diagnostic. */
2472 :
2473 : class reason_attr_access : public pending_note_subclass<reason_attr_access>
2474 : {
2475 : public:
2476 22 : reason_attr_access (tree callee_fndecl, const attr_access &access)
2477 22 : : m_callee_fndecl (callee_fndecl),
2478 22 : m_ptr_argno (access.ptrarg),
2479 22 : m_access_str (TREE_STRING_POINTER (access.to_external_string ()))
2480 : {
2481 22 : }
2482 :
2483 116 : const char *get_kind () const final override { return "reason_attr_access"; }
2484 :
2485 18 : void emit () const final override
2486 : {
2487 18 : auto_urlify_attributes sentinel;
2488 18 : inform (DECL_SOURCE_LOCATION (m_callee_fndecl),
2489 : "parameter %i of %qD marked with attribute %qs",
2490 18 : m_ptr_argno + 1, m_callee_fndecl, m_access_str);
2491 18 : }
2492 :
2493 58 : bool operator== (const reason_attr_access &other) const
2494 : {
2495 58 : return (m_callee_fndecl == other.m_callee_fndecl
2496 22 : && m_ptr_argno == other.m_ptr_argno
2497 80 : && !strcmp (m_access_str, other.m_access_str));
2498 : }
2499 :
2500 : private:
2501 : tree m_callee_fndecl;
2502 : unsigned m_ptr_argno;
2503 : const char *m_access_str;
2504 : };
2505 :
2506 : /* Check CALL a call to external function CALLEE_FNDECL based on
2507 : any __attribute__ ((access, ....) on the latter, complaining to
2508 : CTXT about any issues.
2509 :
2510 : Currently we merely call check_region_for_write on any regions
2511 : pointed to by arguments marked with a "write_only" or "read_write"
2512 : attribute. */
2513 :
2514 : void
2515 1256 : region_model::check_function_attr_access (const gcall &call,
2516 : tree callee_fndecl,
2517 : region_model_context *ctxt,
2518 : rdwr_map &rdwr_idx) const
2519 : {
2520 1256 : gcc_assert (callee_fndecl);
2521 1256 : gcc_assert (ctxt);
2522 :
2523 1256 : tree fntype = TREE_TYPE (callee_fndecl);
2524 1256 : gcc_assert (fntype);
2525 :
2526 1256 : unsigned argno = 0;
2527 :
2528 4773 : for (tree iter = TYPE_ARG_TYPES (fntype); iter;
2529 3517 : iter = TREE_CHAIN (iter), ++argno)
2530 : {
2531 3517 : const attr_access* access = rdwr_idx.get (argno);
2532 3517 : if (!access)
2533 3239 : continue;
2534 :
2535 : /* Ignore any duplicate entry in the map for the size argument. */
2536 278 : if (access->ptrarg != argno)
2537 114 : continue;
2538 :
2539 164 : if (access->mode == access_write_only
2540 164 : || access->mode == access_read_write)
2541 : {
2542 : /* Subclass of annotating_context that
2543 : adds a note about the attr access to any saved diagnostics. */
2544 40 : class annotating_ctxt : public annotating_context
2545 : {
2546 : public:
2547 40 : annotating_ctxt (tree callee_fndecl,
2548 : const attr_access &access,
2549 : region_model_context *ctxt)
2550 40 : : annotating_context (ctxt),
2551 40 : m_callee_fndecl (callee_fndecl),
2552 40 : m_access (access)
2553 : {
2554 : }
2555 22 : void add_annotations () final override
2556 : {
2557 22 : add_note (std::make_unique<reason_attr_access>
2558 22 : (m_callee_fndecl, m_access));
2559 22 : }
2560 : private:
2561 : tree m_callee_fndecl;
2562 : const attr_access &m_access;
2563 : };
2564 :
2565 : /* Use this ctxt below so that any diagnostics get the
2566 : note added to them. */
2567 40 : annotating_ctxt my_ctxt (callee_fndecl, *access, ctxt);
2568 :
2569 40 : tree ptr_tree = gimple_call_arg (&call, access->ptrarg);
2570 40 : const svalue *ptr_sval = get_rvalue (ptr_tree, &my_ctxt);
2571 40 : const region *reg = deref_rvalue (ptr_sval, ptr_tree, &my_ctxt);
2572 40 : check_region_for_write (reg, nullptr, &my_ctxt);
2573 : /* We don't use the size arg for now. */
2574 : }
2575 : }
2576 1256 : }
2577 :
2578 : /* Subroutine of region_model::check_function_attr_null_terminated_string_arg,
2579 : checking one instance of __attribute__((null_terminated_string_arg)). */
2580 :
2581 : void
2582 200 : region_model::
2583 : check_one_function_attr_null_terminated_string_arg (const gcall &call,
2584 : tree callee_fndecl,
2585 : region_model_context *ctxt,
2586 : rdwr_map &rdwr_idx,
2587 : tree attr)
2588 : {
2589 200 : gcc_assert (callee_fndecl);
2590 200 : gcc_assert (ctxt);
2591 200 : gcc_assert (attr);
2592 :
2593 200 : tree arg = TREE_VALUE (attr);
2594 200 : if (!arg)
2595 76 : return;
2596 :
2597 : /* Convert from 1-based to 0-based index. */
2598 200 : unsigned int arg_idx = TREE_INT_CST_LOW (TREE_VALUE (arg)) - 1;
2599 :
2600 : /* If there's also an "access" attribute on the ptr param
2601 : for reading with a size param specified, then that size
2602 : limits the size of the possible read from the pointer. */
2603 200 : if (const attr_access* access = rdwr_idx.get (arg_idx))
2604 104 : if ((access->mode == access_read_only
2605 104 : || access->mode == access_read_write)
2606 104 : && access->sizarg != UINT_MAX)
2607 : {
2608 76 : call_details cd_checked (call, this, ctxt);
2609 76 : const svalue *limit_sval
2610 76 : = cd_checked.get_arg_svalue (access->sizarg);
2611 76 : const svalue *ptr_sval
2612 76 : = cd_checked.get_arg_svalue (arg_idx);
2613 : /* Try reading all of the bytes expressed by the size param,
2614 : but without emitting warnings (via a null context). */
2615 76 : const svalue *limited_sval
2616 76 : = read_bytes (deref_rvalue (ptr_sval, NULL_TREE, nullptr),
2617 : NULL_TREE,
2618 : limit_sval,
2619 : nullptr);
2620 76 : if (limited_sval->get_kind () == SK_POISONED)
2621 : {
2622 : /* Reading up to the truncation limit caused issues.
2623 : Assume that the string is meant to be terminated
2624 : before then, so perform a *checked* check for the
2625 : terminator. */
2626 24 : check_for_null_terminated_string_arg (cd_checked,
2627 : arg_idx);
2628 : }
2629 : else
2630 : {
2631 : /* Reading up to the truncation limit seems OK; repeat
2632 : the read, but with checking enabled. */
2633 52 : read_bytes (deref_rvalue (ptr_sval, NULL_TREE, ctxt),
2634 : NULL_TREE,
2635 : limit_sval,
2636 : ctxt);
2637 : }
2638 76 : return;
2639 : }
2640 :
2641 : /* Otherwise, we don't have an access-attribute limiting the read.
2642 : Simulate a read up to the null terminator (if any). */
2643 :
2644 124 : call_details cd (call, this, ctxt);
2645 124 : check_for_null_terminated_string_arg (cd, arg_idx);
2646 : }
2647 :
2648 : /* Check CALL a call to external function CALLEE_FNDECL for any uses
2649 : of __attribute__ ((null_terminated_string_arg)), compaining
2650 : to CTXT about any issues.
2651 :
2652 : Use RDWR_IDX for tracking uses of __attribute__ ((access, ....). */
2653 :
2654 : void
2655 1256 : region_model::
2656 : check_function_attr_null_terminated_string_arg (const gcall &call,
2657 : tree callee_fndecl,
2658 : region_model_context *ctxt,
2659 : rdwr_map &rdwr_idx)
2660 : {
2661 1256 : gcc_assert (callee_fndecl);
2662 1256 : gcc_assert (ctxt);
2663 :
2664 1256 : tree fntype = TREE_TYPE (callee_fndecl);
2665 1256 : gcc_assert (fntype);
2666 :
2667 : /* A function declaration can specify multiple attribute
2668 : null_terminated_string_arg, each with one argument. */
2669 1456 : for (tree attr = TYPE_ATTRIBUTES (fntype); attr; attr = TREE_CHAIN (attr))
2670 : {
2671 1280 : attr = lookup_attribute ("null_terminated_string_arg", attr);
2672 1280 : if (!attr)
2673 : return;
2674 :
2675 200 : check_one_function_attr_null_terminated_string_arg (call, callee_fndecl,
2676 : ctxt, rdwr_idx,
2677 : attr);
2678 : }
2679 : }
2680 :
2681 : /* Check CALL a call to external function CALLEE_FNDECL for any
2682 : function attributes, complaining to CTXT about any issues. */
2683 :
2684 : void
2685 11818 : region_model::check_function_attrs (const gcall &call,
2686 : tree callee_fndecl,
2687 : region_model_context *ctxt)
2688 : {
2689 11818 : gcc_assert (callee_fndecl);
2690 11818 : gcc_assert (ctxt);
2691 :
2692 11818 : tree fntype = TREE_TYPE (callee_fndecl);
2693 11818 : if (!fntype)
2694 10562 : return;
2695 :
2696 11818 : if (!TYPE_ATTRIBUTES (fntype))
2697 : return;
2698 :
2699 : /* Initialize a map of attribute access specifications for arguments
2700 : to the function call. */
2701 1256 : rdwr_map rdwr_idx;
2702 1256 : init_attr_rdwr_indices (&rdwr_idx, TYPE_ATTRIBUTES (fntype));
2703 :
2704 1256 : check_function_attr_access (call, callee_fndecl, ctxt, rdwr_idx);
2705 1256 : check_function_attr_null_terminated_string_arg (call, callee_fndecl,
2706 : ctxt, rdwr_idx);
2707 1256 : }
2708 :
2709 : /* Handle a call CALL to a function with unknown behavior.
2710 :
2711 : Traverse the regions in this model, determining what regions are
2712 : reachable from pointer arguments to CALL and from global variables,
2713 : recursively.
2714 :
2715 : Set all reachable regions to new unknown values and purge sm-state
2716 : from their values, and from values that point to them. */
2717 :
2718 : void
2719 16848 : region_model::handle_unrecognized_call (const gcall &call,
2720 : region_model_context *ctxt)
2721 : {
2722 16848 : tree fndecl = get_fndecl_for_call (call, ctxt);
2723 :
2724 16848 : if (fndecl && ctxt)
2725 11818 : check_function_attrs (call, fndecl, ctxt);
2726 :
2727 16848 : reachable_regions reachable_regs (this);
2728 :
2729 : /* Determine the reachable regions and their mutability. */
2730 16848 : {
2731 : /* Add globals and regions that already escaped in previous
2732 : unknown calls. */
2733 16848 : m_store.for_each_cluster (reachable_regions::init_cluster_cb,
2734 : &reachable_regs);
2735 :
2736 : /* Params that are pointers. */
2737 16848 : tree iter_param_types = NULL_TREE;
2738 16848 : if (fndecl)
2739 16393 : iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2740 35516 : for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (&call);
2741 : arg_idx++)
2742 : {
2743 : /* Track expected param type, where available. */
2744 18668 : tree param_type = NULL_TREE;
2745 18668 : if (iter_param_types)
2746 : {
2747 17304 : param_type = TREE_VALUE (iter_param_types);
2748 17304 : gcc_assert (param_type);
2749 17304 : iter_param_types = TREE_CHAIN (iter_param_types);
2750 : }
2751 :
2752 18668 : tree parm = gimple_call_arg (&call, arg_idx);
2753 18668 : const svalue *parm_sval = get_rvalue (parm, ctxt);
2754 18668 : reachable_regs.handle_parm (parm_sval, param_type);
2755 : }
2756 : }
2757 :
2758 16848 : uncertainty_t *uncertainty = ctxt ? ctxt->get_uncertainty () : nullptr;
2759 :
2760 : /* Purge sm-state for the svalues that were reachable,
2761 : both in non-mutable and mutable form. */
2762 44043 : for (svalue_set::iterator iter
2763 16848 : = reachable_regs.begin_reachable_svals ();
2764 71238 : iter != reachable_regs.end_reachable_svals (); ++iter)
2765 : {
2766 27195 : const svalue *sval = (*iter);
2767 27195 : if (ctxt)
2768 21944 : ctxt->on_unknown_change (sval, false);
2769 : }
2770 56179 : for (svalue_set::iterator iter
2771 16848 : = reachable_regs.begin_mutable_svals ();
2772 95510 : iter != reachable_regs.end_mutable_svals (); ++iter)
2773 : {
2774 39331 : const svalue *sval = (*iter);
2775 39331 : if (ctxt)
2776 32224 : ctxt->on_unknown_change (sval, true);
2777 39331 : if (uncertainty)
2778 31017 : uncertainty->on_mutable_sval_at_unknown_call (sval);
2779 : }
2780 :
2781 : /* Mark any clusters that have escaped. */
2782 16848 : reachable_regs.mark_escaped_clusters (ctxt);
2783 :
2784 : /* Update bindings for all clusters that have escaped, whether above,
2785 : or previously. */
2786 16848 : m_store.on_unknown_fncall (call, m_mgr->get_store_manager (),
2787 16848 : conjured_purge (this, ctxt));
2788 :
2789 : /* Purge dynamic extents from any regions that have escaped mutably:
2790 : realloc could have been called on them. */
2791 41543 : for (hash_set<const region *>::iterator
2792 16848 : iter = reachable_regs.begin_mutable_base_regs ();
2793 41543 : iter != reachable_regs.end_mutable_base_regs ();
2794 24695 : ++iter)
2795 : {
2796 24695 : const region *base_reg = (*iter);
2797 24695 : unset_dynamic_extents (base_reg);
2798 : }
2799 16848 : }
2800 :
2801 : /* Traverse the regions in this model, determining what regions are
2802 : reachable from the store and populating *OUT.
2803 :
2804 : If EXTRA_SVAL is non-NULL, treat it as an additional "root"
2805 : for reachability (for handling return values from functions when
2806 : analyzing return of the only function on the stack).
2807 :
2808 : If UNCERTAINTY is non-NULL, treat any svalues that were recorded
2809 : within it as being maybe-bound as additional "roots" for reachability.
2810 :
2811 : Find svalues that haven't leaked. */
2812 :
2813 : void
2814 968726 : region_model::get_reachable_svalues (svalue_set *out,
2815 : const svalue *extra_sval,
2816 : const uncertainty_t *uncertainty)
2817 : {
2818 968726 : reachable_regions reachable_regs (this);
2819 :
2820 : /* Add globals and regions that already escaped in previous
2821 : unknown calls. */
2822 968726 : m_store.for_each_cluster (reachable_regions::init_cluster_cb,
2823 : &reachable_regs);
2824 :
2825 968726 : if (extra_sval)
2826 5211 : reachable_regs.handle_sval (extra_sval);
2827 :
2828 968726 : if (uncertainty)
2829 445044 : for (uncertainty_t::iterator iter
2830 430741 : = uncertainty->begin_maybe_bound_svals ();
2831 890088 : iter != uncertainty->end_maybe_bound_svals (); ++iter)
2832 14303 : reachable_regs.handle_sval (*iter);
2833 :
2834 : /* Get regions for locals that have explicitly bound values. */
2835 9469124 : for (store::cluster_map_t::iterator iter = m_store.begin ();
2836 17969522 : iter != m_store.end (); ++iter)
2837 : {
2838 8500398 : const region *base_reg = (*iter).first;
2839 8500398 : if (const region *parent = base_reg->get_parent_region ())
2840 8500398 : if (parent->get_kind () == RK_FRAME)
2841 5584822 : reachable_regs.add (base_reg, false);
2842 : }
2843 :
2844 : /* Populate *OUT based on the values that were reachable. */
2845 968726 : for (svalue_set::iterator iter
2846 968726 : = reachable_regs.begin_reachable_svals ();
2847 18569626 : iter != reachable_regs.end_reachable_svals (); ++iter)
2848 8800450 : out->add (*iter);
2849 968726 : }
2850 :
2851 : /* Update this model for the RETURN_STMT, using CTXT to report any
2852 : diagnostics. */
2853 :
2854 : void
2855 0 : region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
2856 : {
2857 0 : tree callee = get_current_function ()->decl;
2858 0 : tree lhs = DECL_RESULT (callee);
2859 0 : tree rhs = gimple_return_retval (return_stmt);
2860 :
2861 0 : if (lhs && rhs)
2862 : {
2863 0 : const svalue *sval = get_rvalue (rhs, ctxt);
2864 0 : const region *ret_reg = get_lvalue (lhs, ctxt);
2865 0 : set_value (ret_reg, sval, ctxt);
2866 : }
2867 0 : }
2868 :
2869 : /* Update this model for a call and return of setjmp/sigsetjmp at CALL within
2870 : ENODE, using CTXT to report any diagnostics.
2871 :
2872 : This is for the initial direct invocation of setjmp/sigsetjmp (which returns
2873 : 0), as opposed to any second return due to longjmp/sigsetjmp. */
2874 :
2875 : void
2876 34 : region_model::on_setjmp (const gcall &call,
2877 : const exploded_node &enode,
2878 : const superedge &sedge,
2879 : region_model_context *ctxt)
2880 : {
2881 34 : const svalue *buf_ptr = get_rvalue (gimple_call_arg (&call, 0), ctxt);
2882 34 : const region *buf_reg = deref_rvalue (buf_ptr, gimple_call_arg (&call, 0),
2883 : ctxt);
2884 :
2885 : /* Create a setjmp_svalue for this call and store it in BUF_REG's
2886 : region. */
2887 34 : if (buf_reg)
2888 : {
2889 34 : setjmp_record r (&enode, &sedge, call);
2890 34 : const svalue *sval
2891 34 : = m_mgr->get_or_create_setjmp_svalue (r, buf_reg->get_type ());
2892 34 : set_value (buf_reg, sval, ctxt);
2893 : }
2894 :
2895 : /* Direct calls to setjmp return 0. */
2896 34 : if (tree lhs = gimple_call_lhs (&call))
2897 : {
2898 16 : const svalue *new_sval
2899 16 : = m_mgr->get_or_create_int_cst (TREE_TYPE (lhs), 0);
2900 16 : const region *lhs_reg = get_lvalue (lhs, ctxt);
2901 16 : set_value (lhs_reg, new_sval, ctxt);
2902 : }
2903 34 : }
2904 :
2905 : /* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
2906 : to a "setjmp" at SETJMP_CALL where the final stack depth should be
2907 : SETJMP_STACK_DEPTH. Pop any stack frames. Leak detection is *not*
2908 : done, and should be done by the caller. */
2909 :
2910 : void
2911 31 : region_model::on_longjmp (const gcall &longjmp_call, const gcall &setjmp_call,
2912 : int setjmp_stack_depth, region_model_context *ctxt)
2913 : {
2914 : /* Evaluate the val, using the frame of the "longjmp". */
2915 31 : tree fake_retval = gimple_call_arg (&longjmp_call, 1);
2916 31 : const svalue *fake_retval_sval = get_rvalue (fake_retval, ctxt);
2917 :
2918 : /* Pop any frames until we reach the stack depth of the function where
2919 : setjmp was called. */
2920 31 : gcc_assert (get_stack_depth () >= setjmp_stack_depth);
2921 61 : while (get_stack_depth () > setjmp_stack_depth)
2922 30 : pop_frame (nullptr, nullptr, ctxt, nullptr, false);
2923 :
2924 31 : gcc_assert (get_stack_depth () == setjmp_stack_depth);
2925 :
2926 : /* Assign to LHS of "setjmp" in new_state. */
2927 31 : if (tree lhs = gimple_call_lhs (&setjmp_call))
2928 : {
2929 : /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
2930 27 : const svalue *zero_sval
2931 27 : = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 0);
2932 27 : tristate eq_zero = eval_condition (fake_retval_sval, EQ_EXPR, zero_sval);
2933 : /* If we have 0, use 1. */
2934 27 : if (eq_zero.is_true ())
2935 : {
2936 2 : const svalue *one_sval
2937 2 : = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 1);
2938 2 : fake_retval_sval = one_sval;
2939 : }
2940 : else
2941 : {
2942 : /* Otherwise note that the value is nonzero. */
2943 25 : m_constraints->add_constraint (fake_retval_sval, NE_EXPR, zero_sval);
2944 : }
2945 :
2946 : /* Decorate the return value from setjmp as being unmergeable,
2947 : so that we don't attempt to merge states with it as zero
2948 : with states in which it's nonzero, leading to a clean distinction
2949 : in the exploded_graph betweeen the first return and the second
2950 : return. */
2951 27 : fake_retval_sval = m_mgr->get_or_create_unmergeable (fake_retval_sval);
2952 :
2953 27 : const region *lhs_reg = get_lvalue (lhs, ctxt);
2954 27 : set_value (lhs_reg, fake_retval_sval, ctxt);
2955 : }
2956 31 : }
2957 :
2958 : /* Implementation of region_model::get_lvalue; the latter adds type-checking.
2959 :
2960 : Get the id of the region for PV within this region_model,
2961 : emitting any diagnostics to CTXT. */
2962 :
2963 : const region *
2964 2480138 : region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt) const
2965 : {
2966 2480138 : tree expr = pv.m_tree;
2967 :
2968 2480138 : gcc_assert (expr);
2969 :
2970 2480138 : switch (TREE_CODE (expr))
2971 : {
2972 56 : default:
2973 56 : return m_mgr->get_region_for_unexpected_tree_code (ctxt, expr,
2974 56 : dump_location_t ());
2975 :
2976 27091 : case ARRAY_REF:
2977 27091 : {
2978 27091 : tree array = TREE_OPERAND (expr, 0);
2979 27091 : tree index = TREE_OPERAND (expr, 1);
2980 :
2981 27091 : const region *array_reg = get_lvalue (array, ctxt);
2982 27091 : const svalue *index_sval = get_rvalue (index, ctxt);
2983 27091 : return m_mgr->get_element_region (array_reg,
2984 27091 : TREE_TYPE (TREE_TYPE (array)),
2985 27091 : index_sval);
2986 : }
2987 189 : break;
2988 :
2989 189 : case BIT_FIELD_REF:
2990 189 : {
2991 189 : tree inner_expr = TREE_OPERAND (expr, 0);
2992 189 : const region *inner_reg = get_lvalue (inner_expr, ctxt);
2993 189 : tree num_bits = TREE_OPERAND (expr, 1);
2994 189 : tree first_bit_offset = TREE_OPERAND (expr, 2);
2995 189 : gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2996 189 : gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2997 189 : bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2998 189 : TREE_INT_CST_LOW (num_bits));
2999 189 : return m_mgr->get_bit_range (inner_reg, TREE_TYPE (expr), bits);
3000 : }
3001 69063 : break;
3002 :
3003 69063 : case MEM_REF:
3004 69063 : {
3005 69063 : tree ptr = TREE_OPERAND (expr, 0);
3006 69063 : tree offset = TREE_OPERAND (expr, 1);
3007 69063 : const svalue *ptr_sval = get_rvalue (ptr, ctxt);
3008 69063 : const svalue *offset_sval = get_rvalue (offset, ctxt);
3009 69063 : const region *star_ptr = deref_rvalue (ptr_sval, ptr, ctxt);
3010 69063 : return m_mgr->get_offset_region (star_ptr,
3011 69063 : TREE_TYPE (expr),
3012 69063 : offset_sval);
3013 : }
3014 876970 : break;
3015 :
3016 876970 : case FUNCTION_DECL:
3017 876970 : return m_mgr->get_region_for_fndecl (expr);
3018 :
3019 265 : case LABEL_DECL:
3020 265 : return m_mgr->get_region_for_label (expr);
3021 :
3022 152042 : case VAR_DECL:
3023 : /* Handle globals. */
3024 152042 : if (is_global_var (expr))
3025 51233 : return m_mgr->get_region_for_global (expr);
3026 :
3027 : /* Fall through. */
3028 :
3029 1395224 : case SSA_NAME:
3030 1395224 : case PARM_DECL:
3031 1395224 : case RESULT_DECL:
3032 1395224 : {
3033 1395224 : gcc_assert (TREE_CODE (expr) == SSA_NAME
3034 : || TREE_CODE (expr) == PARM_DECL
3035 : || VAR_P (expr)
3036 : || TREE_CODE (expr) == RESULT_DECL);
3037 :
3038 1395224 : int stack_index = pv.m_stack_depth;
3039 1395224 : const frame_region *frame = get_frame_at_index (stack_index);
3040 1395224 : gcc_assert (frame);
3041 1395224 : return frame->get_region_for_local (m_mgr, expr, ctxt);
3042 : }
3043 :
3044 44862 : case COMPONENT_REF:
3045 44862 : {
3046 : /* obj.field */
3047 44862 : tree obj = TREE_OPERAND (expr, 0);
3048 44862 : tree field = TREE_OPERAND (expr, 1);
3049 44862 : const region *obj_reg = get_lvalue (obj, ctxt);
3050 44862 : return m_mgr->get_field_region (obj_reg, field);
3051 : }
3052 15185 : break;
3053 :
3054 15185 : case STRING_CST:
3055 15185 : return m_mgr->get_region_for_string (expr);
3056 : }
3057 : }
3058 :
3059 : /* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
3060 :
3061 : static void
3062 5547259 : assert_compat_types (tree src_type, tree dst_type)
3063 : {
3064 5547259 : if (src_type && dst_type && !VOID_TYPE_P (dst_type))
3065 : {
3066 : #if CHECKING_P
3067 5546952 : if (!(useless_type_conversion_p (src_type, dst_type)))
3068 0 : internal_error ("incompatible types: %qT and %qT", src_type, dst_type);
3069 : #endif
3070 : }
3071 5547259 : }
3072 :
3073 : /* Return true if SRC_TYPE can be converted to DST_TYPE as a no-op. */
3074 :
3075 : bool
3076 13194 : compat_types_p (tree src_type, tree dst_type)
3077 : {
3078 13194 : if (src_type && dst_type && !VOID_TYPE_P (dst_type))
3079 13194 : if (!(useless_type_conversion_p (src_type, dst_type)))
3080 : return false;
3081 : return true;
3082 : }
3083 :
3084 : /* Get the region for PV within this region_model,
3085 : emitting any diagnostics to CTXT. */
3086 :
3087 : const region *
3088 2480138 : region_model::get_lvalue (path_var pv, region_model_context *ctxt) const
3089 : {
3090 2480138 : if (pv.m_tree == NULL_TREE)
3091 : return nullptr;
3092 :
3093 2480138 : const region *result_reg = get_lvalue_1 (pv, ctxt);
3094 2480138 : assert_compat_types (result_reg->get_type (), TREE_TYPE (pv.m_tree));
3095 2480138 : return result_reg;
3096 : }
3097 :
3098 : /* Get the region for EXPR within this region_model (assuming the most
3099 : recent stack frame if it's a local). */
3100 :
3101 : const region *
3102 1531459 : region_model::get_lvalue (tree expr, region_model_context *ctxt) const
3103 : {
3104 1531459 : return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
3105 : }
3106 :
3107 : /* Implementation of region_model::get_rvalue; the latter adds type-checking.
3108 :
3109 : Get the value of PV within this region_model,
3110 : emitting any diagnostics to CTXT. */
3111 :
3112 : const svalue *
3113 3054343 : region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt) const
3114 : {
3115 3054343 : gcc_assert (pv.m_tree);
3116 :
3117 3054343 : switch (TREE_CODE (pv.m_tree))
3118 : {
3119 45 : default:
3120 45 : return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
3121 :
3122 941219 : case ADDR_EXPR:
3123 941219 : {
3124 : /* "&EXPR". */
3125 941219 : tree expr = pv.m_tree;
3126 941219 : tree op0 = TREE_OPERAND (expr, 0);
3127 941219 : const region *expr_reg = get_lvalue (op0, ctxt);
3128 941219 : return m_mgr->get_ptr_svalue (TREE_TYPE (expr), expr_reg);
3129 : }
3130 130 : break;
3131 :
3132 130 : case BIT_FIELD_REF:
3133 130 : {
3134 130 : tree expr = pv.m_tree;
3135 130 : tree op0 = TREE_OPERAND (expr, 0);
3136 130 : const region *reg = get_lvalue (op0, ctxt);
3137 130 : tree num_bits = TREE_OPERAND (expr, 1);
3138 130 : tree first_bit_offset = TREE_OPERAND (expr, 2);
3139 130 : gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
3140 130 : gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
3141 130 : bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
3142 130 : TREE_INT_CST_LOW (num_bits));
3143 130 : return get_rvalue_for_bits (TREE_TYPE (expr), reg, bits, ctxt);
3144 : }
3145 :
3146 35510 : case VAR_DECL:
3147 35510 : if (DECL_HARD_REGISTER (pv.m_tree))
3148 : {
3149 : /* If it has a hard register, it doesn't have a memory region
3150 : and can't be referred to as an lvalue. */
3151 43 : return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
3152 : }
3153 : /* Fall through. */
3154 900757 : case PARM_DECL:
3155 900757 : case SSA_NAME:
3156 900757 : case RESULT_DECL:
3157 900757 : case ARRAY_REF:
3158 900757 : {
3159 900757 : const region *reg = get_lvalue (pv, ctxt);
3160 900757 : return get_store_value (reg, ctxt);
3161 : }
3162 :
3163 130 : case REALPART_EXPR:
3164 130 : case IMAGPART_EXPR:
3165 130 : case VIEW_CONVERT_EXPR:
3166 130 : {
3167 130 : tree expr = pv.m_tree;
3168 130 : tree arg = TREE_OPERAND (expr, 0);
3169 130 : const svalue *arg_sval = get_rvalue (arg, ctxt);
3170 130 : const svalue *sval_unaryop
3171 130 : = m_mgr->get_or_create_unaryop (TREE_TYPE (expr), TREE_CODE (expr),
3172 : arg_sval);
3173 130 : return sval_unaryop;
3174 1162172 : };
3175 :
3176 1162172 : case INTEGER_CST:
3177 1162172 : case REAL_CST:
3178 1162172 : case COMPLEX_CST:
3179 1162172 : case VECTOR_CST:
3180 1162172 : case STRING_CST:
3181 1162172 : case RAW_DATA_CST:
3182 1162172 : return m_mgr->get_or_create_constant_svalue (pv.m_tree);
3183 :
3184 8 : case POINTER_PLUS_EXPR:
3185 8 : {
3186 8 : tree expr = pv.m_tree;
3187 8 : tree ptr = TREE_OPERAND (expr, 0);
3188 8 : tree offset = TREE_OPERAND (expr, 1);
3189 8 : const svalue *ptr_sval = get_rvalue (ptr, ctxt);
3190 8 : const svalue *offset_sval = get_rvalue (offset, ctxt);
3191 8 : const svalue *sval_binop
3192 8 : = m_mgr->get_or_create_binop (TREE_TYPE (expr), POINTER_PLUS_EXPR,
3193 : ptr_sval, offset_sval);
3194 8 : return sval_binop;
3195 : }
3196 :
3197 : /* Binary ops. */
3198 94 : case PLUS_EXPR:
3199 94 : case MULT_EXPR:
3200 94 : case BIT_AND_EXPR:
3201 94 : case BIT_IOR_EXPR:
3202 94 : case BIT_XOR_EXPR:
3203 94 : {
3204 94 : tree expr = pv.m_tree;
3205 94 : tree arg0 = TREE_OPERAND (expr, 0);
3206 94 : tree arg1 = TREE_OPERAND (expr, 1);
3207 94 : const svalue *arg0_sval = get_rvalue (arg0, ctxt);
3208 94 : const svalue *arg1_sval = get_rvalue (arg1, ctxt);
3209 94 : const svalue *sval_binop
3210 94 : = m_mgr->get_or_create_binop (TREE_TYPE (expr), TREE_CODE (expr),
3211 : arg0_sval, arg1_sval);
3212 94 : return sval_binop;
3213 : }
3214 :
3215 47882 : case COMPONENT_REF:
3216 47882 : case MEM_REF:
3217 47882 : {
3218 47882 : const region *ref_reg = get_lvalue (pv, ctxt);
3219 47882 : return get_store_value (ref_reg, ctxt);
3220 : }
3221 1863 : case OBJ_TYPE_REF:
3222 1863 : {
3223 1863 : tree expr = OBJ_TYPE_REF_EXPR (pv.m_tree);
3224 1863 : return get_rvalue (expr, ctxt);
3225 : }
3226 : }
3227 : }
3228 :
3229 : /* Get the value of PV within this region_model,
3230 : emitting any diagnostics to CTXT. */
3231 :
3232 : const svalue *
3233 3090692 : region_model::get_rvalue (path_var pv, region_model_context *ctxt) const
3234 : {
3235 3090692 : if (pv.m_tree == NULL_TREE)
3236 : return nullptr;
3237 :
3238 3054343 : const svalue *result_sval = get_rvalue_1 (pv, ctxt);
3239 :
3240 3054343 : assert_compat_types (result_sval->get_type (), TREE_TYPE (pv.m_tree));
3241 :
3242 3054343 : result_sval = check_for_poison (result_sval, pv.m_tree, nullptr, ctxt);
3243 :
3244 3054343 : return result_sval;
3245 : }
3246 :
3247 : /* Get the value of EXPR within this region_model (assuming the most
3248 : recent stack frame if it's a local). */
3249 :
3250 : const svalue *
3251 3090198 : region_model::get_rvalue (tree expr, region_model_context *ctxt) const
3252 : {
3253 3090198 : return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
3254 : }
3255 :
3256 : /* Return true if this model is on a path with "main" as the entrypoint
3257 : (as opposed to one in which we're merely analyzing a subset of the
3258 : path through the code). */
3259 :
3260 : bool
3261 224182 : region_model::called_from_main_p () const
3262 : {
3263 224182 : if (!m_current_frame)
3264 : return false;
3265 : /* Determine if the oldest stack frame in this model is for "main". */
3266 217819 : const frame_region *frame0 = get_frame_at_index (0);
3267 217819 : gcc_assert (frame0);
3268 217819 : return id_equal (DECL_NAME (frame0->get_function ().decl), "main");
3269 : }
3270 :
3271 : /* Subroutine of region_model::get_store_value for when REG is (or is within)
3272 : a global variable that hasn't been touched since the start of this path
3273 : (or was implicitly touched due to a call to an unknown function). */
3274 :
3275 : const svalue *
3276 233369 : region_model::get_initial_value_for_global (const region *reg) const
3277 : {
3278 : /* Get the decl that REG is for (or is within). */
3279 233369 : const decl_region *base_reg
3280 233369 : = reg->get_base_region ()->dyn_cast_decl_region ();
3281 233369 : gcc_assert (base_reg);
3282 233369 : tree decl = base_reg->get_decl ();
3283 :
3284 : /* Special-case: to avoid having to explicitly update all previously
3285 : untracked globals when calling an unknown fn, they implicitly have
3286 : an unknown value if an unknown call has occurred, unless this is
3287 : static to-this-TU and hasn't escaped. Globals that have escaped
3288 : are explicitly tracked, so we shouldn't hit this case for them. */
3289 233369 : if (m_store.called_unknown_fn_p ()
3290 71464 : && TREE_PUBLIC (decl)
3291 250924 : && !TREE_READONLY (decl))
3292 9243 : return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
3293 :
3294 : /* If we are on a path from the entrypoint from "main" and we have a
3295 : global decl defined in this TU that hasn't been touched yet, then
3296 : the initial value of REG can be taken from the initialization value
3297 : of the decl. */
3298 224126 : if (called_from_main_p () || TREE_READONLY (decl))
3299 14660 : return reg->get_initial_value_at_main (m_mgr);
3300 :
3301 : /* Otherwise, return INIT_VAL(REG). */
3302 209466 : return m_mgr->get_or_create_initial_value (reg);
3303 : }
3304 :
3305 : /* Get a value for REG, looking it up in the store, or otherwise falling
3306 : back to "initial" or "unknown" values.
3307 : Use CTXT to report any warnings associated with reading from REG. */
3308 :
3309 : const svalue *
3310 4097955 : region_model::get_store_value (const region *reg,
3311 : region_model_context *ctxt) const
3312 : {
3313 : /* Getting the value of an empty region gives an unknown_svalue. */
3314 4097955 : if (reg->empty_p ())
3315 52 : return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
3316 :
3317 4097903 : bool check_poisoned = true;
3318 4097903 : if (check_region_for_read (reg, ctxt))
3319 430 : check_poisoned = false;
3320 :
3321 : /* Special-case: handle var_decls in the constant pool. */
3322 4097903 : if (const decl_region *decl_reg = reg->dyn_cast_decl_region ())
3323 3437016 : if (const svalue *sval = decl_reg->maybe_get_constant_value (m_mgr))
3324 : return sval;
3325 :
3326 4097889 : const svalue *sval
3327 4097889 : = m_store.get_any_binding (m_mgr->get_store_manager (), reg);
3328 4097889 : if (sval)
3329 : {
3330 1148265 : if (reg->get_type ())
3331 1146275 : sval = m_mgr->get_or_create_cast (reg->get_type (), sval);
3332 1148265 : return sval;
3333 : }
3334 :
3335 : /* Special-case: read at a constant index within a STRING_CST. */
3336 2949624 : if (const offset_region *offset_reg = reg->dyn_cast_offset_region ())
3337 133054 : if (tree byte_offset_cst
3338 133054 : = offset_reg->get_byte_offset ()->maybe_get_constant ())
3339 8308 : if (const string_region *str_reg
3340 8308 : = reg->get_parent_region ()->dyn_cast_string_region ())
3341 : {
3342 194 : tree string_cst = str_reg->get_string_cst ();
3343 388 : if (const svalue *char_sval
3344 194 : = m_mgr->maybe_get_char_from_string_cst (string_cst,
3345 : byte_offset_cst))
3346 190 : return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
3347 : }
3348 :
3349 : /* Special-case: read the initial char of a STRING_CST. */
3350 2949434 : if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
3351 4850 : if (const string_region *str_reg
3352 2425 : = cast_reg->get_parent_region ()->dyn_cast_string_region ())
3353 : {
3354 186 : tree string_cst = str_reg->get_string_cst ();
3355 186 : tree byte_offset_cst = integer_zero_node;
3356 372 : if (const svalue *char_sval
3357 186 : = m_mgr->maybe_get_char_from_string_cst (string_cst,
3358 : byte_offset_cst))
3359 186 : return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
3360 : }
3361 :
3362 : /* Otherwise we implicitly have the initial value of the region
3363 : (if the cluster had been touched, binding_cluster::get_any_binding,
3364 : would have returned UNKNOWN, and we would already have returned
3365 : that above). */
3366 :
3367 : /* Handle globals. */
3368 2949248 : if (reg->get_base_region ()->get_parent_region ()->get_kind ()
3369 : == RK_GLOBALS)
3370 233369 : return get_initial_value_for_global (reg);
3371 :
3372 2715879 : return m_mgr->get_or_create_initial_value (reg, check_poisoned);
3373 : }
3374 :
3375 : /* Return false if REG does not exist, true if it may do.
3376 : This is for detecting regions within the stack that don't exist anymore
3377 : after frames are popped. */
3378 :
3379 : bool
3380 2643633 : region_model::region_exists_p (const region *reg) const
3381 : {
3382 : /* If within a stack frame, check that the stack frame is live. */
3383 2643633 : if (const frame_region *enclosing_frame = reg->maybe_get_frame_region ())
3384 : {
3385 : /* Check that the current frame is the enclosing frame, or is called
3386 : by it. */
3387 2611242 : for (const frame_region *iter_frame = get_current_frame (); iter_frame;
3388 560273 : iter_frame = iter_frame->get_calling_frame ())
3389 2595095 : if (iter_frame == enclosing_frame)
3390 : return true;
3391 : return false;
3392 : }
3393 :
3394 : return true;
3395 : }
3396 :
3397 : /* Get a region for referencing PTR_SVAL, creating a region if need be, and
3398 : potentially generating warnings via CTXT.
3399 : PTR_SVAL must be of pointer type.
3400 : PTR_TREE if non-NULL can be used when emitting diagnostics. */
3401 :
3402 : const region *
3403 120313 : region_model::deref_rvalue (const svalue *ptr_sval, tree ptr_tree,
3404 : region_model_context *ctxt,
3405 : bool add_nonnull_constraint) const
3406 : {
3407 120313 : gcc_assert (ptr_sval);
3408 120313 : gcc_assert (POINTER_TYPE_P (ptr_sval->get_type ()));
3409 :
3410 : /* If we're dereferencing PTR_SVAL, assume that it is non-NULL; add this
3411 : as a constraint. This suppresses false positives from
3412 : -Wanalyzer-null-dereference for the case where we later have an
3413 : if (PTR_SVAL) that would occur if we considered the false branch
3414 : and transitioned the malloc state machine from start->null. */
3415 120313 : if (add_nonnull_constraint)
3416 : {
3417 115231 : tree null_ptr_cst = build_int_cst (ptr_sval->get_type (), 0);
3418 115231 : const svalue *null_ptr
3419 115231 : = m_mgr->get_or_create_constant_svalue (null_ptr_cst);
3420 115231 : m_constraints->add_constraint (ptr_sval, NE_EXPR, null_ptr);
3421 : }
3422 :
3423 120313 : switch (ptr_sval->get_kind ())
3424 : {
3425 : default:
3426 : break;
3427 :
3428 47489 : case SK_REGION:
3429 47489 : {
3430 47489 : const region_svalue *region_sval
3431 47489 : = as_a <const region_svalue *> (ptr_sval);
3432 47489 : return region_sval->get_pointee ();
3433 : }
3434 :
3435 21245 : case SK_BINOP:
3436 21245 : {
3437 21245 : const binop_svalue *binop_sval
3438 21245 : = as_a <const binop_svalue *> (ptr_sval);
3439 21245 : switch (binop_sval->get_op ())
3440 : {
3441 21245 : case POINTER_PLUS_EXPR:
3442 21245 : {
3443 : /* If we have a symbolic value expressing pointer arithmentic,
3444 : try to convert it to a suitable region. */
3445 21245 : const region *parent_region
3446 21245 : = deref_rvalue (binop_sval->get_arg0 (), NULL_TREE, ctxt);
3447 21245 : const svalue *offset = binop_sval->get_arg1 ();
3448 21245 : tree type= TREE_TYPE (ptr_sval->get_type ());
3449 21245 : return m_mgr->get_offset_region (parent_region, type, offset);
3450 : }
3451 : default:
3452 : break;
3453 : }
3454 : }
3455 : break;
3456 :
3457 2638 : case SK_POISONED:
3458 2638 : {
3459 2638 : if (ctxt)
3460 : {
3461 624 : tree ptr = get_representative_tree (ptr_sval);
3462 : /* If we can't get a representative tree for PTR_SVAL
3463 : (e.g. if it hasn't been bound into the store), then
3464 : fall back on PTR_TREE, if non-NULL. */
3465 624 : if (!ptr)
3466 624 : ptr = ptr_tree;
3467 624 : if (ptr)
3468 : {
3469 0 : const poisoned_svalue *poisoned_sval
3470 0 : = as_a <const poisoned_svalue *> (ptr_sval);
3471 0 : enum poison_kind pkind = poisoned_sval->get_poison_kind ();
3472 0 : ctxt->warn (std::make_unique<poisoned_value_diagnostic>
3473 0 : (ptr, pkind, nullptr, nullptr));
3474 : }
3475 : }
3476 : }
3477 : break;
3478 : }
3479 :
3480 51579 : return m_mgr->get_symbolic_region (ptr_sval);
3481 : }
3482 :
3483 : /* Attempt to get BITS within any value of REG, as TYPE.
3484 : In particular, extract values from compound_svalues for the case
3485 : where there's a concrete binding at BITS.
3486 : Return an unknown svalue if we can't handle the given case.
3487 : Use CTXT to report any warnings associated with reading from REG. */
3488 :
3489 : const svalue *
3490 130 : region_model::get_rvalue_for_bits (tree type,
3491 : const region *reg,
3492 : const bit_range &bits,
3493 : region_model_context *ctxt) const
3494 : {
3495 130 : const svalue *sval = get_store_value (reg, ctxt);
3496 130 : return m_mgr->get_or_create_bits_within (type, bits, sval);
3497 : }
3498 :
3499 : /* A subclass of pending_diagnostic for complaining about writes to
3500 : constant regions of memory. */
3501 :
3502 : class write_to_const_diagnostic
3503 : : public pending_diagnostic_subclass<write_to_const_diagnostic>
3504 : {
3505 : public:
3506 33 : write_to_const_diagnostic (const region *reg, tree decl)
3507 33 : : m_reg (reg), m_decl (decl)
3508 : {}
3509 :
3510 421 : const char *get_kind () const final override
3511 : {
3512 421 : return "write_to_const_diagnostic";
3513 : }
3514 :
3515 33 : bool operator== (const write_to_const_diagnostic &other) const
3516 : {
3517 33 : return (m_reg == other.m_reg
3518 33 : && m_decl == other.m_decl);
3519 : }
3520 :
3521 66 : int get_controlling_option () const final override
3522 : {
3523 66 : return OPT_Wanalyzer_write_to_const;
3524 : }
3525 :
3526 33 : bool emit (diagnostic_emission_context &ctxt) final override
3527 : {
3528 33 : auto_diagnostic_group d;
3529 33 : bool warned;
3530 33 : switch (m_reg->get_kind ())
3531 : {
3532 20 : default:
3533 20 : warned = ctxt.warn ("write to %<const%> object %qE", m_decl);
3534 20 : break;
3535 9 : case RK_FUNCTION:
3536 9 : warned = ctxt.warn ("write to function %qE", m_decl);
3537 9 : break;
3538 4 : case RK_LABEL:
3539 4 : warned = ctxt.warn ("write to label %qE", m_decl);
3540 4 : break;
3541 : }
3542 33 : if (warned)
3543 33 : inform (DECL_SOURCE_LOCATION (m_decl), "declared here");
3544 66 : return warned;
3545 33 : }
3546 :
3547 : bool
3548 66 : describe_final_event (pretty_printer &pp,
3549 : const evdesc::final_event &) final override
3550 : {
3551 66 : switch (m_reg->get_kind ())
3552 : {
3553 40 : default:
3554 40 : {
3555 40 : pp_printf (&pp,
3556 : "write to %<const%> object %qE here", m_decl);
3557 40 : return true;
3558 : }
3559 18 : case RK_FUNCTION:
3560 18 : {
3561 18 : pp_printf (&pp,
3562 : "write to function %qE here", m_decl);
3563 18 : return true;
3564 : }
3565 8 : case RK_LABEL:
3566 8 : {
3567 8 : pp_printf (&pp,
3568 : "write to label %qE here", m_decl);
3569 8 : return true;
3570 : }
3571 : }
3572 : }
3573 :
3574 : private:
3575 : const region *m_reg;
3576 : tree m_decl;
3577 : };
3578 :
3579 : /* A subclass of pending_diagnostic for complaining about writes to
3580 : string literals. */
3581 :
3582 : class write_to_string_literal_diagnostic
3583 : : public pending_diagnostic_subclass<write_to_string_literal_diagnostic>
3584 : {
3585 : public:
3586 51 : write_to_string_literal_diagnostic (const region *reg)
3587 51 : : m_reg (reg)
3588 : {}
3589 :
3590 337 : const char *get_kind () const final override
3591 : {
3592 337 : return "write_to_string_literal_diagnostic";
3593 : }
3594 :
3595 47 : bool operator== (const write_to_string_literal_diagnostic &other) const
3596 : {
3597 47 : return m_reg == other.m_reg;
3598 : }
3599 :
3600 94 : int get_controlling_option () const final override
3601 : {
3602 94 : return OPT_Wanalyzer_write_to_string_literal;
3603 : }
3604 :
3605 43 : bool emit (diagnostic_emission_context &ctxt) final override
3606 : {
3607 43 : return ctxt.warn ("write to string literal");
3608 : /* Ideally we would show the location of the STRING_CST as well,
3609 : but it is not available at this point. */
3610 : }
3611 :
3612 : bool
3613 86 : describe_final_event (pretty_printer &pp,
3614 : const evdesc::final_event &) final override
3615 : {
3616 86 : pp_string (&pp, "write to string literal here");
3617 86 : return true;
3618 : }
3619 :
3620 : private:
3621 : const region *m_reg;
3622 : };
3623 :
3624 : /* Use CTXT to warn If DEST_REG is a region that shouldn't be written to. */
3625 :
3626 : void
3627 271370 : region_model::check_for_writable_region (const region* dest_reg,
3628 : region_model_context *ctxt) const
3629 : {
3630 : /* Fail gracefully if CTXT is nullptr. */
3631 271370 : if (!ctxt)
3632 : return;
3633 :
3634 271370 : const region *base_reg = dest_reg->get_base_region ();
3635 271370 : switch (base_reg->get_kind ())
3636 : {
3637 : default:
3638 : break;
3639 9 : case RK_FUNCTION:
3640 9 : {
3641 9 : const function_region *func_reg = as_a <const function_region *> (base_reg);
3642 9 : tree fndecl = func_reg->get_fndecl ();
3643 9 : ctxt->warn
3644 9 : (std::make_unique<write_to_const_diagnostic>
3645 9 : (func_reg, fndecl));
3646 : }
3647 9 : break;
3648 4 : case RK_LABEL:
3649 4 : {
3650 4 : const label_region *label_reg = as_a <const label_region *> (base_reg);
3651 4 : tree label = label_reg->get_label ();
3652 4 : ctxt->warn
3653 4 : (std::make_unique<write_to_const_diagnostic>
3654 4 : (label_reg, label));
3655 : }
3656 4 : break;
3657 254496 : case RK_DECL:
3658 254496 : {
3659 254496 : const decl_region *decl_reg = as_a <const decl_region *> (base_reg);
3660 254496 : tree decl = decl_reg->get_decl ();
3661 : /* Warn about writes to const globals.
3662 : Don't warn for writes to const locals, and params in particular,
3663 : since we would warn in push_frame when setting them up (e.g the
3664 : "this" param is "T* const"). */
3665 254496 : if (TREE_READONLY (decl)
3666 254496 : && is_global_var (decl))
3667 20 : ctxt->warn
3668 20 : (std::make_unique<write_to_const_diagnostic> (dest_reg, decl));
3669 : }
3670 254496 : break;
3671 51 : case RK_STRING:
3672 51 : ctxt->warn
3673 51 : (std::make_unique<write_to_string_literal_diagnostic> (dest_reg));
3674 51 : break;
3675 : }
3676 : }
3677 :
3678 : /* Get the capacity of REG in bytes. */
3679 :
3680 : const svalue *
3681 846164 : region_model::get_capacity (const region *reg) const
3682 : {
3683 846179 : switch (reg->get_kind ())
3684 : {
3685 : default:
3686 : break;
3687 785492 : case RK_DECL:
3688 785492 : {
3689 785492 : const decl_region *decl_reg = as_a <const decl_region *> (reg);
3690 785492 : tree decl = decl_reg->get_decl ();
3691 785492 : if (TREE_CODE (decl) == SSA_NAME)
3692 : {
3693 692626 : tree type = TREE_TYPE (decl);
3694 692626 : tree size = TYPE_SIZE (type);
3695 692626 : return get_rvalue (size, nullptr);
3696 : }
3697 : else
3698 : {
3699 92866 : tree size = decl_init_size (decl, false);
3700 92866 : if (size)
3701 92699 : return get_rvalue (size, nullptr);
3702 : }
3703 : }
3704 : break;
3705 15 : case RK_SIZED:
3706 : /* Look through sized regions to get at the capacity
3707 : of the underlying regions. */
3708 15 : return get_capacity (reg->get_parent_region ());
3709 523 : case RK_STRING:
3710 523 : {
3711 : /* "Capacity" here means "size". */
3712 523 : const string_region *string_reg = as_a <const string_region *> (reg);
3713 523 : tree string_cst = string_reg->get_string_cst ();
3714 523 : return m_mgr->get_or_create_int_cst (size_type_node,
3715 523 : TREE_STRING_LENGTH (string_cst));
3716 : }
3717 60316 : break;
3718 : }
3719 :
3720 60316 : if (const svalue *recorded = get_dynamic_extents (reg))
3721 : return recorded;
3722 :
3723 48151 : return m_mgr->get_or_create_unknown_svalue (sizetype);
3724 : }
3725 :
3726 : /* If CTXT is non-NULL, use it to warn about any problems accessing REG,
3727 : using DIR to determine if this access is a read or write.
3728 : Return TRUE if an OOB access was detected.
3729 : If SVAL_HINT is non-NULL, use it as a hint in diagnostics
3730 : about the value that would be written to REG. */
3731 :
3732 : bool
3733 4445735 : region_model::check_region_access (const region *reg,
3734 : enum access_direction dir,
3735 : const svalue *sval_hint,
3736 : region_model_context *ctxt) const
3737 : {
3738 : /* Fail gracefully if CTXT is NULL. */
3739 4445735 : if (!ctxt)
3740 : return false;
3741 :
3742 837344 : bool oob_access_detected = false;
3743 837344 : check_region_for_taint (reg, dir, ctxt);
3744 837344 : if (!check_region_bounds (reg, dir, sval_hint, ctxt))
3745 773 : oob_access_detected = true;
3746 :
3747 837344 : switch (dir)
3748 : {
3749 0 : default:
3750 0 : gcc_unreachable ();
3751 : case access_direction::read:
3752 : /* Currently a no-op. */
3753 : break;
3754 271370 : case access_direction::write:
3755 271370 : check_for_writable_region (reg, ctxt);
3756 271370 : break;
3757 : }
3758 : return oob_access_detected;
3759 : }
3760 :
3761 : /* If CTXT is non-NULL, use it to warn about any problems writing to REG. */
3762 :
3763 : void
3764 347832 : region_model::check_region_for_write (const region *dest_reg,
3765 : const svalue *sval_hint,
3766 : region_model_context *ctxt) const
3767 : {
3768 347832 : check_region_access (dest_reg, access_direction::write, sval_hint, ctxt);
3769 347832 : }
3770 :
3771 : /* If CTXT is non-NULL, use it to warn about any problems reading from REG.
3772 : Returns TRUE if an OOB read was detected. */
3773 :
3774 : bool
3775 4097903 : region_model::check_region_for_read (const region *src_reg,
3776 : region_model_context *ctxt) const
3777 : {
3778 4097903 : return check_region_access (src_reg, access_direction::read, nullptr, ctxt);
3779 : }
3780 :
3781 : /* Concrete subclass for casts of pointers that lead to trailing bytes. */
3782 :
3783 : class dubious_allocation_size
3784 : : public pending_diagnostic_subclass<dubious_allocation_size>
3785 : {
3786 : public:
3787 111 : dubious_allocation_size (const region *lhs, const region *rhs,
3788 : const svalue *capacity_sval, tree expr,
3789 : const gimple *stmt)
3790 111 : : m_lhs (lhs), m_rhs (rhs),
3791 111 : m_capacity_sval (capacity_sval), m_expr (expr),
3792 111 : m_stmt (stmt),
3793 111 : m_has_allocation_event (false)
3794 : {
3795 111 : gcc_assert (m_capacity_sval);
3796 : }
3797 :
3798 1186 : const char *get_kind () const final override
3799 : {
3800 1186 : return "dubious_allocation_size";
3801 : }
3802 :
3803 111 : bool operator== (const dubious_allocation_size &other) const
3804 : {
3805 111 : return (m_stmt == other.m_stmt
3806 111 : && pending_diagnostic::same_tree_p (m_expr, other.m_expr));
3807 : }
3808 :
3809 222 : int get_controlling_option () const final override
3810 : {
3811 222 : return OPT_Wanalyzer_allocation_size;
3812 : }
3813 :
3814 111 : bool emit (diagnostic_emission_context &ctxt) final override
3815 : {
3816 111 : ctxt.add_cwe (131);
3817 :
3818 111 : return ctxt.warn ("allocated buffer size is not a multiple"
3819 111 : " of the pointee's size");
3820 : }
3821 :
3822 : bool
3823 222 : describe_final_event (pretty_printer &pp,
3824 : const evdesc::final_event &) final override
3825 : {
3826 222 : tree pointee_type = TREE_TYPE (m_lhs->get_type ());
3827 222 : if (m_has_allocation_event)
3828 : {
3829 214 : pp_printf (&pp,
3830 : "assigned to %qT here;"
3831 : " %<sizeof (%T)%> is %qE",
3832 214 : m_lhs->get_type (), pointee_type,
3833 : size_in_bytes (pointee_type));
3834 214 : return true;
3835 : }
3836 : /* Fallback: Typically, we should always see an allocation_event
3837 : before. */
3838 8 : if (m_expr)
3839 : {
3840 8 : if (TREE_CODE (m_expr) == INTEGER_CST)
3841 : {
3842 8 : pp_printf (&pp,
3843 : "allocated %E bytes and assigned to"
3844 : " %qT here; %<sizeof (%T)%> is %qE",
3845 8 : m_expr, m_lhs->get_type (), pointee_type,
3846 : size_in_bytes (pointee_type));
3847 8 : return true;
3848 : }
3849 : else
3850 : {
3851 0 : pp_printf (&pp,
3852 : "allocated %qE bytes and assigned to"
3853 : " %qT here; %<sizeof (%T)%> is %qE",
3854 0 : m_expr, m_lhs->get_type (), pointee_type,
3855 : size_in_bytes (pointee_type));
3856 0 : return true;
3857 : }
3858 : }
3859 :
3860 0 : pp_printf (&pp,
3861 : "allocated and assigned to %qT here;"
3862 : " %<sizeof (%T)%> is %qE",
3863 0 : m_lhs->get_type (), pointee_type,
3864 : size_in_bytes (pointee_type));
3865 0 : return true;
3866 : }
3867 :
3868 : void
3869 107 : add_region_creation_events (const region *,
3870 : tree capacity,
3871 : const event_loc_info &loc_info,
3872 : checker_path &emission_path) final override
3873 : {
3874 107 : emission_path.add_event
3875 107 : (std::make_unique<region_creation_event_allocation_size>
3876 107 : (capacity, loc_info));
3877 :
3878 107 : m_has_allocation_event = true;
3879 107 : }
3880 :
3881 111 : void mark_interesting_stuff (interesting_t *interest) final override
3882 : {
3883 111 : interest->add_region_creation (m_rhs);
3884 111 : }
3885 :
3886 : void
3887 0 : maybe_add_sarif_properties (diagnostics::sarif_object &result_obj)
3888 : const final override
3889 : {
3890 0 : auto &props = result_obj.get_or_create_properties ();
3891 : #define PROPERTY_PREFIX "gcc/analyzer/dubious_allocation_size/"
3892 0 : props.set (PROPERTY_PREFIX "lhs", m_lhs->to_json ());
3893 0 : props.set (PROPERTY_PREFIX "rhs", m_rhs->to_json ());
3894 0 : props.set (PROPERTY_PREFIX "capacity_sval", m_capacity_sval->to_json ());
3895 : #undef PROPERTY_PREFIX
3896 0 : }
3897 :
3898 : private:
3899 : const region *m_lhs;
3900 : const region *m_rhs;
3901 : const svalue *m_capacity_sval;
3902 : const tree m_expr;
3903 : const gimple *m_stmt;
3904 : bool m_has_allocation_event;
3905 : };
3906 :
3907 : /* Return true on dubious allocation sizes for constant sizes. */
3908 :
3909 : static bool
3910 1859 : capacity_compatible_with_type (tree cst, tree pointee_size_tree,
3911 : bool is_struct)
3912 : {
3913 1859 : gcc_assert (TREE_CODE (cst) == INTEGER_CST);
3914 1859 : gcc_assert (TREE_CODE (pointee_size_tree) == INTEGER_CST);
3915 :
3916 1859 : unsigned HOST_WIDE_INT pointee_size = TREE_INT_CST_LOW (pointee_size_tree);
3917 1859 : unsigned HOST_WIDE_INT alloc_size = TREE_INT_CST_LOW (cst);
3918 :
3919 1859 : if (is_struct)
3920 619 : return alloc_size == 0 || alloc_size >= pointee_size;
3921 1240 : return alloc_size % pointee_size == 0;
3922 : }
3923 :
3924 : static bool
3925 394 : capacity_compatible_with_type (tree cst, tree pointee_size_tree)
3926 : {
3927 0 : return capacity_compatible_with_type (cst, pointee_size_tree, false);
3928 : }
3929 :
3930 : /* Checks whether SVAL could be a multiple of SIZE_CST.
3931 :
3932 : It works by visiting all svalues inside SVAL until it reaches
3933 : atomic nodes. From those, it goes back up again and adds each
3934 : node that is not a multiple of SIZE_CST to the RESULT_SET. */
3935 :
3936 2262 : class size_visitor : public visitor
3937 : {
3938 : public:
3939 1131 : size_visitor (tree size_cst, const svalue *root_sval, constraint_manager *cm)
3940 1131 : : m_size_cst (size_cst), m_root_sval (root_sval), m_cm (cm)
3941 : {
3942 1131 : m_root_sval->accept (this);
3943 1131 : }
3944 :
3945 1131 : bool is_dubious_capacity ()
3946 : {
3947 1131 : return result_set.contains (m_root_sval);
3948 : }
3949 :
3950 410 : void visit_constant_svalue (const constant_svalue *sval) final override
3951 : {
3952 410 : check_constant (sval->get_constant (), sval);
3953 410 : }
3954 :
3955 250 : void visit_unaryop_svalue (const unaryop_svalue *sval) final override
3956 : {
3957 250 : if (CONVERT_EXPR_CODE_P (sval->get_op ())
3958 291 : && result_set.contains (sval->get_arg ()))
3959 105 : result_set.add (sval);
3960 250 : }
3961 :
3962 406 : void visit_binop_svalue (const binop_svalue *sval) final override
3963 : {
3964 406 : const svalue *arg0 = sval->get_arg0 ();
3965 406 : const svalue *arg1 = sval->get_arg1 ();
3966 :
3967 406 : switch (sval->get_op ())
3968 : {
3969 288 : case MULT_EXPR:
3970 288 : if (result_set.contains (arg0) && result_set.contains (arg1))
3971 24 : result_set.add (sval);
3972 : break;
3973 90 : case PLUS_EXPR:
3974 90 : case MINUS_EXPR:
3975 90 : if (result_set.contains (arg0) || result_set.contains (arg1))
3976 28 : result_set.add (sval);
3977 : break;
3978 : default:
3979 : break;
3980 : }
3981 406 : }
3982 :
3983 0 : void visit_unmergeable_svalue (const unmergeable_svalue *sval) final override
3984 : {
3985 0 : if (result_set.contains (sval->get_arg ()))
3986 0 : result_set.add (sval);
3987 0 : }
3988 :
3989 12 : void visit_widening_svalue (const widening_svalue *sval) final override
3990 : {
3991 12 : const svalue *base = sval->get_base_svalue ();
3992 12 : const svalue *iter = sval->get_iter_svalue ();
3993 :
3994 12 : if (result_set.contains (base) || result_set.contains (iter))
3995 8 : result_set.add (sval);
3996 12 : }
3997 :
3998 303 : void visit_initial_svalue (const initial_svalue *sval) final override
3999 : {
4000 303 : equiv_class_id id = equiv_class_id::null ();
4001 303 : if (m_cm->get_equiv_class_by_svalue (sval, &id))
4002 : {
4003 75 : if (tree cst = id.get_obj (*m_cm).get_any_constant ())
4004 0 : check_constant (cst, sval);
4005 : }
4006 228 : else if (!m_cm->sval_constrained_p (sval))
4007 : {
4008 174 : result_set.add (sval);
4009 : }
4010 303 : }
4011 :
4012 30 : void visit_conjured_svalue (const conjured_svalue *sval) final override
4013 : {
4014 30 : equiv_class_id id = equiv_class_id::null ();
4015 30 : if (m_cm->get_equiv_class_by_svalue (sval, &id))
4016 13 : if (tree cst = id.get_obj (*m_cm).get_any_constant ())
4017 8 : check_constant (cst, sval);
4018 30 : }
4019 :
4020 : private:
4021 418 : void check_constant (tree cst, const svalue *sval)
4022 : {
4023 418 : switch (TREE_CODE (cst))
4024 : {
4025 : default:
4026 : /* Assume all unhandled operands are compatible. */
4027 : break;
4028 394 : case INTEGER_CST:
4029 394 : if (!capacity_compatible_with_type (cst, m_size_cst))
4030 68 : result_set.add (sval);
4031 : break;
4032 : }
4033 418 : }
4034 :
4035 : tree m_size_cst;
4036 : const svalue *m_root_sval;
4037 : constraint_manager *m_cm;
4038 : svalue_set result_set; /* Used as a mapping of svalue*->bool. */
4039 : };
4040 :
4041 : /* Return true if SIZE_CST is a power of 2, and we have
4042 : CAPACITY_SVAL == ((X | (Y - 1) ) + 1), since it is then a multiple
4043 : of SIZE_CST, as used by Linux kernel's round_up macro. */
4044 :
4045 : static bool
4046 1135 : is_round_up (tree size_cst,
4047 : const svalue *capacity_sval)
4048 : {
4049 1135 : if (!integer_pow2p (size_cst))
4050 : return false;
4051 1135 : const binop_svalue *binop_sval = capacity_sval->dyn_cast_binop_svalue ();
4052 1135 : if (!binop_sval)
4053 : return false;
4054 272 : if (binop_sval->get_op () != PLUS_EXPR)
4055 : return false;
4056 70 : tree rhs_cst = binop_sval->get_arg1 ()->maybe_get_constant ();
4057 70 : if (!rhs_cst)
4058 : return false;
4059 70 : if (!integer_onep (rhs_cst))
4060 : return false;
4061 :
4062 : /* We have CAPACITY_SVAL == (LHS + 1) for some LHS expression. */
4063 :
4064 4 : const binop_svalue *lhs_binop_sval
4065 4 : = binop_sval->get_arg0 ()->dyn_cast_binop_svalue ();
4066 4 : if (!lhs_binop_sval)
4067 : return false;
4068 4 : if (lhs_binop_sval->get_op () != BIT_IOR_EXPR)
4069 : return false;
4070 :
4071 4 : tree inner_rhs_cst = lhs_binop_sval->get_arg1 ()->maybe_get_constant ();
4072 4 : if (!inner_rhs_cst)
4073 : return false;
4074 :
4075 4 : if (wi::to_widest (inner_rhs_cst) + 1 != wi::to_widest (size_cst))
4076 : return false;
4077 : return true;
4078 : }
4079 :
4080 : /* Return true if CAPACITY_SVAL is known to be a multiple of SIZE_CST. */
4081 :
4082 : static bool
4083 1135 : is_multiple_p (tree size_cst,
4084 : const svalue *capacity_sval)
4085 : {
4086 1191 : if (const svalue *sval = capacity_sval->maybe_undo_cast ())
4087 : return is_multiple_p (size_cst, sval);
4088 :
4089 1135 : if (is_round_up (size_cst, capacity_sval))
4090 : return true;
4091 :
4092 : return false;
4093 : }
4094 :
4095 : /* Return true if we should emit a dubious_allocation_size warning
4096 : on assigning a region of capacity CAPACITY_SVAL bytes to a pointer
4097 : of type with size SIZE_CST, where CM expresses known constraints. */
4098 :
4099 : static bool
4100 1135 : is_dubious_capacity (tree size_cst,
4101 : const svalue *capacity_sval,
4102 : constraint_manager *cm)
4103 : {
4104 1135 : if (is_multiple_p (size_cst, capacity_sval))
4105 : return false;
4106 1131 : size_visitor v (size_cst, capacity_sval, cm);
4107 1131 : return v.is_dubious_capacity ();
4108 1131 : }
4109 :
4110 :
4111 : /* Return true if a struct or union either uses the inheritance pattern,
4112 : where the first field is a base struct, or the flexible array member
4113 : pattern, where the last field is an array without a specified size. */
4114 :
4115 : static bool
4116 3562 : struct_or_union_with_inheritance_p (tree struc)
4117 : {
4118 3562 : tree iter = TYPE_FIELDS (struc);
4119 3562 : if (iter == NULL_TREE)
4120 : return false;
4121 3554 : if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (iter)))
4122 : return true;
4123 :
4124 : tree last_field;
4125 51893 : while (iter != NULL_TREE)
4126 : {
4127 48586 : last_field = iter;
4128 48586 : iter = DECL_CHAIN (iter);
4129 : }
4130 :
4131 3307 : if (last_field != NULL_TREE
4132 3307 : && TREE_CODE (TREE_TYPE (last_field)) == ARRAY_TYPE)
4133 : return true;
4134 :
4135 : return false;
4136 : }
4137 :
4138 : /* Return true if the lhs and rhs of an assignment have different types. */
4139 :
4140 : static bool
4141 187747 : is_any_cast_p (const gimple *stmt)
4142 : {
4143 187747 : if (const gassign *assign = dyn_cast <const gassign *> (stmt))
4144 148063 : return gimple_assign_cast_p (assign)
4145 271214 : || !pending_diagnostic::same_tree_p (
4146 123151 : TREE_TYPE (gimple_assign_lhs (assign)),
4147 123151 : TREE_TYPE (gimple_assign_rhs1 (assign)));
4148 39684 : else if (const gcall *call = dyn_cast <const gcall *> (stmt))
4149 : {
4150 39276 : tree lhs = gimple_call_lhs (call);
4151 68893 : return lhs != NULL_TREE && !pending_diagnostic::same_tree_p (
4152 29617 : TREE_TYPE (gimple_call_lhs (call)),
4153 : gimple_call_return_type (call));
4154 : }
4155 :
4156 : return false;
4157 : }
4158 :
4159 : /* On pointer assignments, check whether the buffer size of
4160 : RHS_SVAL is compatible with the type of the LHS_REG.
4161 : Use a non-null CTXT to report allocation size warnings. */
4162 :
4163 : void
4164 346390 : region_model::check_region_size (const region *lhs_reg, const svalue *rhs_sval,
4165 : region_model_context *ctxt) const
4166 : {
4167 346390 : if (!ctxt || ctxt->get_stmt () == nullptr)
4168 341433 : return;
4169 : /* Only report warnings on assignments that actually change the type. */
4170 187747 : if (!is_any_cast_p (ctxt->get_stmt ()))
4171 : return;
4172 :
4173 59115 : tree pointer_type = lhs_reg->get_type ();
4174 59115 : if (pointer_type == NULL_TREE || !POINTER_TYPE_P (pointer_type))
4175 : return;
4176 :
4177 15694 : tree pointee_type = TREE_TYPE (pointer_type);
4178 : /* Make sure that the type on the left-hand size actually has a size. */
4179 15694 : if (pointee_type == NULL_TREE || VOID_TYPE_P (pointee_type)
4180 30979 : || TYPE_SIZE_UNIT (pointee_type) == NULL_TREE)
4181 : return;
4182 :
4183 : /* Bail out early on function pointers. */
4184 15185 : if (TREE_CODE (pointee_type) == FUNCTION_TYPE)
4185 : return;
4186 :
4187 : /* Bail out early on pointers to structs where we can
4188 : not deduce whether the buffer size is compatible. */
4189 14897 : bool is_struct = RECORD_OR_UNION_TYPE_P (pointee_type);
4190 14897 : if (is_struct && struct_or_union_with_inheritance_p (pointee_type))
4191 : return;
4192 :
4193 14615 : tree pointee_size_tree = size_in_bytes (pointee_type);
4194 : /* We give up if the type size is not known at compile-time or the
4195 : type size is always compatible regardless of the buffer size. */
4196 14615 : if (TREE_CODE (pointee_size_tree) != INTEGER_CST
4197 14536 : || integer_zerop (pointee_size_tree)
4198 29130 : || integer_onep (pointee_size_tree))
4199 9658 : return;
4200 :
4201 4957 : const region *rhs_reg = deref_rvalue (rhs_sval, NULL_TREE, ctxt, false);
4202 4957 : const svalue *capacity = get_capacity (rhs_reg);
4203 4957 : switch (capacity->get_kind ())
4204 : {
4205 1465 : case svalue_kind::SK_CONSTANT:
4206 1465 : {
4207 1465 : const constant_svalue *cst_cap_sval
4208 1465 : = as_a <const constant_svalue *> (capacity);
4209 1465 : tree cst_cap = cst_cap_sval->get_constant ();
4210 1465 : if (TREE_CODE (cst_cap) == INTEGER_CST
4211 1465 : && !capacity_compatible_with_type (cst_cap, pointee_size_tree,
4212 : is_struct))
4213 63 : ctxt->warn
4214 126 : (std::make_unique <dubious_allocation_size> (lhs_reg, rhs_reg,
4215 : capacity, cst_cap,
4216 126 : ctxt->get_stmt ()));
4217 : }
4218 1465 : break;
4219 3492 : default:
4220 3492 : {
4221 3492 : if (!is_struct)
4222 : {
4223 1135 : if (is_dubious_capacity (pointee_size_tree,
4224 : capacity,
4225 1135 : m_constraints))
4226 : {
4227 48 : tree expr = get_representative_tree (capacity);
4228 48 : ctxt->warn
4229 96 : (std::make_unique <dubious_allocation_size> (lhs_reg,
4230 : rhs_reg,
4231 : capacity, expr,
4232 96 : ctxt->get_stmt ()));
4233 : }
4234 : }
4235 : break;
4236 : }
4237 : }
4238 : }
4239 :
4240 : /* Set the value of the region given by LHS_REG to the value given
4241 : by RHS_SVAL.
4242 : Use CTXT to report any warnings associated with writing to LHS_REG. */
4243 :
4244 : void
4245 346410 : region_model::set_value (const region *lhs_reg, const svalue *rhs_sval,
4246 : region_model_context *ctxt)
4247 : {
4248 346410 : gcc_assert (lhs_reg);
4249 346410 : gcc_assert (rhs_sval);
4250 :
4251 : /* Setting the value of an empty region is a no-op. */
4252 346410 : if (lhs_reg->empty_p ())
4253 : return;
4254 :
4255 346390 : check_region_size (lhs_reg, rhs_sval, ctxt);
4256 :
4257 346390 : check_region_for_write (lhs_reg, rhs_sval, ctxt);
4258 :
4259 616809 : m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
4260 270419 : ctxt ? ctxt->get_uncertainty () : nullptr);
4261 : }
4262 :
4263 : /* Set the value of the region given by LHS to the value given by RHS. */
4264 :
4265 : void
4266 84 : region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt)
4267 : {
4268 84 : const region *lhs_reg = get_lvalue (lhs, ctxt);
4269 84 : const svalue *rhs_sval = get_rvalue (rhs, ctxt);
4270 84 : gcc_assert (lhs_reg);
4271 84 : gcc_assert (rhs_sval);
4272 84 : set_value (lhs_reg, rhs_sval, ctxt);
4273 84 : }
4274 :
4275 : /* Issue a note specifying that a particular function parameter is expected
4276 : to be a valid null-terminated string. */
4277 :
4278 : static void
4279 147 : inform_about_expected_null_terminated_string_arg (const call_arg_details &ad)
4280 : {
4281 : // TODO: ideally we'd underline the param here
4282 147 : inform (DECL_SOURCE_LOCATION (ad.m_called_fndecl),
4283 : "argument %d of %qD must be a pointer to a null-terminated string",
4284 147 : ad.m_arg_idx + 1, ad.m_called_fndecl);
4285 147 : }
4286 :
4287 : /* A binding of a specific svalue at a concrete byte range. */
4288 :
4289 : struct fragment
4290 : {
4291 3194 : fragment ()
4292 3194 : : m_byte_range (0, 0), m_sval (nullptr)
4293 : {
4294 3194 : }
4295 :
4296 774 : fragment (const byte_range &bytes, const svalue *sval)
4297 774 : : m_byte_range (bytes), m_sval (sval)
4298 : {
4299 : }
4300 :
4301 1810 : static int cmp_ptrs (const void *p1, const void *p2)
4302 : {
4303 1810 : const fragment *f1 = (const fragment *)p1;
4304 1810 : const fragment *f2 = (const fragment *)p2;
4305 1810 : return byte_range::cmp (f1->m_byte_range, f2->m_byte_range);
4306 : }
4307 :
4308 : void
4309 2 : dump_to_pp (pretty_printer *pp) const
4310 : {
4311 2 : pp_string (pp, "fragment(");
4312 2 : m_byte_range.dump_to_pp (pp);
4313 2 : pp_string (pp, ", sval: ");
4314 2 : if (m_sval)
4315 2 : m_sval->dump_to_pp (pp, true);
4316 : else
4317 0 : pp_string (pp, "nullptr");
4318 2 : pp_string (pp, ")");
4319 2 : }
4320 :
4321 : byte_range m_byte_range;
4322 : const svalue *m_sval;
4323 : };
4324 :
4325 : /* Determine if there is a zero terminator somewhere in the
4326 : part of STRING_CST covered by BYTES (where BYTES is relative to the
4327 : start of the constant).
4328 :
4329 : Return a tristate:
4330 : - true if there definitely is a zero byte, writing to *OUT_BYTES_READ
4331 : the number of bytes from that would be read, including the zero byte.
4332 : - false if there definitely isn't a zero byte
4333 : - unknown if we don't know. */
4334 :
4335 : static tristate
4336 284 : string_cst_has_null_terminator (tree string_cst,
4337 : const byte_range &bytes,
4338 : byte_offset_t *out_bytes_read)
4339 : {
4340 284 : gcc_assert (bytes.m_start_byte_offset >= 0);
4341 :
4342 : /* If we're beyond the string_cst, reads are unsuccessful. */
4343 284 : if (tree cst_size = get_string_cst_size (string_cst))
4344 284 : if (TREE_CODE (cst_size) == INTEGER_CST)
4345 284 : if (bytes.m_start_byte_offset >= TREE_INT_CST_LOW (cst_size))
4346 0 : return tristate::unknown ();
4347 :
4348 : /* Assume all bytes after TREE_STRING_LENGTH are zero. This handles
4349 : the case where an array is initialized with a string_cst that isn't
4350 : as long as the array, where the remaining elements are
4351 : empty-initialized and thus zeroed. */
4352 284 : if (bytes.m_start_byte_offset >= TREE_STRING_LENGTH (string_cst))
4353 : {
4354 2 : *out_bytes_read = 1;
4355 2 : return tristate (true);
4356 : }
4357 :
4358 : /* Look for the first 0 byte within STRING_CST
4359 : from START_READ_OFFSET onwards. */
4360 282 : const byte_offset_t num_bytes_to_search
4361 564 : = std::min<byte_offset_t> ((TREE_STRING_LENGTH (string_cst)
4362 282 : - bytes.m_start_byte_offset),
4363 282 : bytes.m_size_in_bytes);
4364 282 : const char *start = (TREE_STRING_POINTER (string_cst)
4365 282 : + bytes.m_start_byte_offset.slow ());
4366 282 : if (num_bytes_to_search >= 0)
4367 282 : if (const void *p = memchr (start, 0, bytes.m_size_in_bytes.slow ()))
4368 : {
4369 162 : *out_bytes_read = (const char *)p - start + 1;
4370 162 : return tristate (true);
4371 : }
4372 :
4373 120 : *out_bytes_read = bytes.m_size_in_bytes;
4374 120 : return tristate (false);
4375 : }
4376 :
4377 : static tristate
4378 : svalue_byte_range_has_null_terminator (const svalue *sval,
4379 : const byte_range &bytes,
4380 : byte_offset_t *out_bytes_read,
4381 : logger *logger);
4382 :
4383 : /* Determine if there is a zero terminator somewhere in the
4384 : part of SVAL covered by BYTES (where BYTES is relative to the svalue).
4385 :
4386 : Return a tristate:
4387 : - true if there definitely is a zero byte, writing to *OUT_BYTES_READ
4388 : the number of bytes from that would be read, including the zero byte.
4389 : - false if there definitely isn't a zero byte
4390 : - unknown if we don't know.
4391 :
4392 : Use LOGGER (if non-null) for any logging. */
4393 :
4394 : static tristate
4395 610 : svalue_byte_range_has_null_terminator_1 (const svalue *sval,
4396 : const byte_range &bytes,
4397 : byte_offset_t *out_bytes_read,
4398 : logger *logger)
4399 : {
4400 610 : if (bytes.m_start_byte_offset == 0
4401 610 : && sval->all_zeroes_p ())
4402 : {
4403 : /* The initial byte of an all-zeroes SVAL is a zero byte. */
4404 22 : *out_bytes_read = 1;
4405 22 : return tristate (true);
4406 : }
4407 :
4408 588 : switch (sval->get_kind ())
4409 : {
4410 181 : case SK_CONSTANT:
4411 181 : {
4412 181 : tree cst
4413 181 : = as_a <const constant_svalue *> (sval)->get_constant ();
4414 181 : switch (TREE_CODE (cst))
4415 : {
4416 166 : case STRING_CST:
4417 166 : return string_cst_has_null_terminator (cst, bytes, out_bytes_read);
4418 15 : case INTEGER_CST:
4419 15 : if (bytes.m_start_byte_offset == 0
4420 15 : && integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (cst))))
4421 : {
4422 : /* Model accesses to the initial byte of a 1-byte
4423 : INTEGER_CST. */
4424 13 : *out_bytes_read = 1;
4425 13 : if (zerop (cst))
4426 0 : return tristate (true);
4427 : else
4428 13 : return tristate (false);
4429 : }
4430 : /* Treat any other access to an INTEGER_CST as unknown. */
4431 2 : return tristate::TS_UNKNOWN;
4432 :
4433 : default:
4434 : break;
4435 : }
4436 : }
4437 : break;
4438 :
4439 127 : case SK_INITIAL:
4440 127 : {
4441 127 : const initial_svalue *initial_sval = (const initial_svalue *)sval;
4442 127 : const region *reg = initial_sval->get_region ();
4443 127 : if (const string_region *string_reg = reg->dyn_cast_string_region ())
4444 : {
4445 118 : tree string_cst = string_reg->get_string_cst ();
4446 118 : return string_cst_has_null_terminator (string_cst,
4447 : bytes,
4448 118 : out_bytes_read);
4449 : }
4450 9 : return tristate::TS_UNKNOWN;
4451 : }
4452 69 : break;
4453 :
4454 69 : case SK_BITS_WITHIN:
4455 69 : {
4456 69 : const bits_within_svalue *bits_within_sval
4457 : = (const bits_within_svalue *)sval;
4458 69 : byte_range bytes_within_inner (0, 0);
4459 69 : if (bits_within_sval->get_bits ().as_byte_range (&bytes_within_inner))
4460 : {
4461 : /* Consider e.g. looking for null terminator of
4462 : bytes 2-4 of BITS_WITHIN(bytes 10-15 of inner_sval)
4463 :
4464 : This is equivalent to looking within bytes 12-14 of
4465 : inner_sval. */
4466 69 : const byte_offset_t start_byte_relative_to_inner
4467 69 : = (bytes.m_start_byte_offset
4468 69 : + bytes_within_inner.m_start_byte_offset);
4469 69 : const byte_offset_t next_byte_relative_to_inner
4470 69 : = (bytes.get_next_byte_offset ()
4471 69 : + bytes_within_inner.m_start_byte_offset);
4472 69 : if (next_byte_relative_to_inner > start_byte_relative_to_inner)
4473 : {
4474 69 : const byte_range relative_to_inner
4475 : (start_byte_relative_to_inner,
4476 69 : next_byte_relative_to_inner - start_byte_relative_to_inner);
4477 69 : const svalue *inner_sval
4478 69 : = bits_within_sval->get_inner_svalue ();
4479 69 : return svalue_byte_range_has_null_terminator (inner_sval,
4480 : relative_to_inner,
4481 : out_bytes_read,
4482 : logger);
4483 : }
4484 : }
4485 : }
4486 0 : break;
4487 :
4488 : default:
4489 : // TODO: it may be possible to handle other cases here.
4490 : break;
4491 : }
4492 211 : return tristate::TS_UNKNOWN;
4493 : }
4494 :
4495 : /* Like svalue_byte_range_has_null_terminator_1, but add logging. */
4496 :
4497 : static tristate
4498 610 : svalue_byte_range_has_null_terminator (const svalue *sval,
4499 : const byte_range &bytes,
4500 : byte_offset_t *out_bytes_read,
4501 : logger *logger)
4502 : {
4503 610 : LOG_SCOPE (logger);
4504 610 : if (logger)
4505 : {
4506 1 : pretty_printer *pp = logger->get_printer ();
4507 1 : logger->start_log_line ();
4508 1 : bytes.dump_to_pp (pp);
4509 1 : logger->log_partial (" of sval: ");
4510 1 : sval->dump_to_pp (pp, true);
4511 1 : logger->end_log_line ();
4512 : }
4513 610 : tristate ts
4514 610 : = svalue_byte_range_has_null_terminator_1 (sval, bytes,
4515 : out_bytes_read, logger);
4516 610 : if (logger)
4517 : {
4518 1 : pretty_printer *pp = logger->get_printer ();
4519 1 : logger->start_log_line ();
4520 1 : pp_printf (pp, "has null terminator: %s", ts.as_string ());
4521 1 : if (ts.is_true ())
4522 : {
4523 1 : pp_string (pp, "; bytes read: ");
4524 1 : pp_wide_int (pp, *out_bytes_read, SIGNED);
4525 : }
4526 1 : logger->end_log_line ();
4527 : }
4528 1220 : return ts;
4529 610 : }
4530 :
4531 : /* A frozen copy of a single base region's binding_cluster within a store,
4532 : optimized for traversal of the concrete parts in byte order.
4533 : This only captures concrete bindings, and is an implementation detail
4534 : of region_model::scan_for_null_terminator. */
4535 :
4536 3061 : class iterable_cluster
4537 : {
4538 : public:
4539 3061 : iterable_cluster (const binding_cluster *cluster)
4540 3061 : {
4541 3061 : if (!cluster)
4542 : return;
4543 2470 : for (auto iter : cluster->get_map ().get_concrete_bindings ())
4544 : {
4545 774 : const bit_range &bits = iter.first;
4546 774 : const svalue *sval = iter.second;
4547 :
4548 774 : byte_range fragment_bytes (0, 0);
4549 774 : if (bits.as_byte_range (&fragment_bytes))
4550 774 : m_fragments.safe_push (fragment (fragment_bytes, sval));
4551 : }
4552 1806 : for (auto iter : cluster->get_map ().get_symbolic_bindings ())
4553 110 : m_symbolic_bindings.safe_push (iter);
4554 1696 : m_fragments.qsort (fragment::cmp_ptrs);
4555 : }
4556 :
4557 : bool
4558 3194 : get_fragment_for_byte (byte_offset_t byte, fragment *out_frag) const
4559 : {
4560 : /* TODO: binary search rather than linear. */
4561 3194 : unsigned iter_idx;
4562 3405 : for (iter_idx = 0; iter_idx < m_fragments.length (); iter_idx++)
4563 : {
4564 752 : if (m_fragments[iter_idx].m_byte_range.contains_p (byte))
4565 : {
4566 541 : *out_frag = m_fragments[iter_idx];
4567 541 : return true;
4568 : }
4569 : }
4570 : return false;
4571 : }
4572 :
4573 2653 : bool has_symbolic_bindings_p () const
4574 : {
4575 5306 : return !m_symbolic_bindings.is_empty ();
4576 : }
4577 :
4578 2 : void dump_to_pp (pretty_printer *pp) const
4579 : {
4580 2 : pp_string (pp, "iterable_cluster (fragments: [");
4581 5 : for (auto const &iter : &m_fragments)
4582 : {
4583 2 : if (&iter != m_fragments.begin ())
4584 0 : pp_string (pp, ", ");
4585 1 : iter.dump_to_pp (pp);
4586 : }
4587 2 : pp_printf (pp, "], symbolic bindings: [");
4588 2 : for (auto const &iter : m_symbolic_bindings)
4589 : {
4590 0 : if (&iter != m_symbolic_bindings.begin ())
4591 0 : pp_string (pp, ", ");
4592 0 : iter.m_region->dump_to_pp (pp, true);
4593 : }
4594 2 : pp_string (pp, "])");
4595 2 : }
4596 :
4597 : private:
4598 : auto_vec<fragment> m_fragments;
4599 : auto_vec<binding_map::symbolic_binding> m_symbolic_bindings;
4600 : };
4601 :
4602 : /* Simulate reading the bytes at BYTES from BASE_REG.
4603 : Complain to CTXT about any issues with the read e.g. out-of-bounds. */
4604 :
4605 : const svalue *
4606 7334 : region_model::get_store_bytes (const region *base_reg,
4607 : const byte_range &bytes,
4608 : region_model_context *ctxt) const
4609 : {
4610 : /* Shortcut reading all of a string_region. */
4611 7334 : if (bytes.get_start_byte_offset () == 0)
4612 7139 : if (const string_region *string_reg = base_reg->dyn_cast_string_region ())
4613 4468 : if (bytes.m_size_in_bytes
4614 4468 : == TREE_STRING_LENGTH (string_reg->get_string_cst ()))
4615 4468 : return m_mgr->get_or_create_initial_value (base_reg);
4616 :
4617 2866 : const svalue *index_sval
4618 2866 : = m_mgr->get_or_create_int_cst (size_type_node,
4619 2866 : bytes.get_start_byte_offset ());
4620 2866 : const region *offset_reg = m_mgr->get_offset_region (base_reg,
4621 : NULL_TREE,
4622 : index_sval);
4623 2866 : const svalue *byte_size_sval
4624 2866 : = m_mgr->get_or_create_int_cst (size_type_node, bytes.m_size_in_bytes);
4625 2866 : const region *read_reg = m_mgr->get_sized_region (offset_reg,
4626 : NULL_TREE,
4627 : byte_size_sval);
4628 :
4629 : /* Simulate reading those bytes from the store. */
4630 2866 : const svalue *sval = get_store_value (read_reg, ctxt);
4631 2866 : return sval;
4632 : }
4633 :
4634 : static tree
4635 2417 : get_tree_for_byte_offset (tree ptr_expr, byte_offset_t byte_offset)
4636 : {
4637 2417 : gcc_assert (ptr_expr);
4638 2417 : tree ptype = build_pointer_type_for_mode (char_type_node, ptr_mode, true);
4639 2417 : return fold_build2 (MEM_REF,
4640 : char_type_node,
4641 : ptr_expr, wide_int_to_tree (ptype, byte_offset));
4642 : }
4643 :
4644 : /* Simulate a series of reads of REG until we find a 0 byte
4645 : (equivalent to calling strlen).
4646 :
4647 : Complain to CTXT and return NULL if:
4648 : - the buffer pointed to isn't null-terminated
4649 : - the buffer pointed to has any uninitialized bytes before any 0-terminator
4650 : - any of the reads aren't within the bounds of the underlying base region
4651 :
4652 : Otherwise, return a svalue for the number of bytes read (strlen + 1),
4653 : and, if OUT_SVAL is non-NULL, write to *OUT_SVAL with an svalue
4654 : representing the content of REG up to and including the terminator.
4655 :
4656 : Algorithm
4657 : =========
4658 :
4659 : Get offset for first byte to read.
4660 : Find the binding (if any) that contains it.
4661 : Find the size in bits of that binding.
4662 : Round to the nearest byte (which way???)
4663 : Or maybe give up if we have a partial binding there.
4664 : Get the svalue from the binding.
4665 : Determine the strlen (if any) of that svalue.
4666 : Does it have a 0-terminator within it?
4667 : If so, we have a partial read up to and including that terminator
4668 : Read those bytes from the store; add to the result in the correct place.
4669 : Finish
4670 : If not, we have a full read of that svalue
4671 : Read those bytes from the store; add to the result in the correct place.
4672 : Update read/write offsets
4673 : Continue
4674 : If unknown:
4675 : Result is unknown
4676 : Finish
4677 : */
4678 :
4679 : const svalue *
4680 7648 : region_model::scan_for_null_terminator_1 (const region *reg,
4681 : tree expr,
4682 : const svalue **out_sval,
4683 : region_model_context *ctxt) const
4684 : {
4685 7648 : logger *logger = ctxt ? ctxt->get_logger () : nullptr;
4686 7648 : store_manager *store_mgr = m_mgr->get_store_manager ();
4687 :
4688 7648 : region_offset offset = reg->get_offset (m_mgr);
4689 7648 : if (offset.symbolic_p ())
4690 : {
4691 115 : if (out_sval)
4692 0 : *out_sval = get_store_value (reg, nullptr);
4693 115 : if (logger)
4694 0 : logger->log ("offset is symbolic");
4695 115 : return m_mgr->get_or_create_unknown_svalue (size_type_node);
4696 : }
4697 7533 : byte_offset_t src_byte_offset;
4698 7533 : if (!offset.get_concrete_byte_offset (&src_byte_offset))
4699 : {
4700 0 : if (out_sval)
4701 0 : *out_sval = get_store_value (reg, nullptr);
4702 0 : if (logger)
4703 0 : logger->log ("can't get concrete byte offset");
4704 0 : return m_mgr->get_or_create_unknown_svalue (size_type_node);
4705 : }
4706 7533 : const byte_offset_t initial_src_byte_offset = src_byte_offset;
4707 7533 : byte_offset_t dst_byte_offset = 0;
4708 :
4709 7533 : const region *base_reg = reg->get_base_region ();
4710 :
4711 7533 : if (const string_region *str_reg = base_reg->dyn_cast_string_region ())
4712 : {
4713 4474 : tree string_cst = str_reg->get_string_cst ();
4714 4476 : if (src_byte_offset >= 0
4715 4473 : && src_byte_offset < TREE_STRING_LENGTH (string_cst)
4716 8946 : && wi::fits_shwi_p (src_byte_offset))
4717 : {
4718 4472 : HOST_WIDE_INT str_byte_offset = src_byte_offset.to_shwi ();
4719 4472 : const char *effective_start
4720 4472 : = TREE_STRING_POINTER (string_cst) + str_byte_offset;
4721 4472 : size_t effective_len
4722 4472 : = TREE_STRING_LENGTH (string_cst) - str_byte_offset;
4723 4472 : if (const void *p = memchr (effective_start, 0, effective_len))
4724 : {
4725 4472 : size_t num_bytes_read
4726 4472 : = (const char *)p - effective_start + 1;
4727 : /* Simulate the read. */
4728 4472 : byte_range bytes_to_read (0, num_bytes_read);
4729 4472 : const svalue *sval = get_store_bytes (reg, bytes_to_read, ctxt);
4730 4472 : if (out_sval)
4731 834 : *out_sval = sval;
4732 4472 : if (logger)
4733 0 : logger->log ("using string_cst");
4734 4472 : return m_mgr->get_or_create_int_cst (size_type_node,
4735 4472 : num_bytes_read);
4736 : }
4737 : }
4738 : }
4739 3061 : const binding_cluster *cluster = m_store.get_cluster (base_reg);
4740 3061 : iterable_cluster c (cluster);
4741 3061 : if (logger)
4742 : {
4743 2 : pretty_printer *pp = logger->get_printer ();
4744 2 : logger->start_log_line ();
4745 2 : c.dump_to_pp (pp);
4746 2 : logger->end_log_line ();
4747 : }
4748 :
4749 3061 : binding_map result (*store_mgr);
4750 :
4751 133 : while (1)
4752 : {
4753 3194 : fragment f;
4754 3194 : if (c.get_fragment_for_byte (src_byte_offset, &f))
4755 : {
4756 541 : if (logger)
4757 : {
4758 1 : logger->start_log_line ();
4759 1 : pretty_printer *pp = logger->get_printer ();
4760 1 : pp_printf (pp, "src_byte_offset: ");
4761 1 : pp_wide_int (pp, src_byte_offset, SIGNED);
4762 1 : pp_string (pp, ": ");
4763 1 : f.dump_to_pp (pp);
4764 1 : logger->end_log_line ();
4765 : }
4766 541 : gcc_assert (f.m_byte_range.contains_p (src_byte_offset));
4767 : /* src_byte_offset and f.m_byte_range are both expressed relative to
4768 : the base region.
4769 : Convert to a byte_range relative to the svalue. */
4770 541 : const byte_range bytes_relative_to_svalue
4771 541 : (src_byte_offset - f.m_byte_range.get_start_byte_offset (),
4772 541 : f.m_byte_range.get_next_byte_offset () - src_byte_offset);
4773 541 : byte_offset_t fragment_bytes_read;
4774 541 : tristate is_terminated
4775 541 : = svalue_byte_range_has_null_terminator (f.m_sval,
4776 : bytes_relative_to_svalue,
4777 : &fragment_bytes_read,
4778 : logger);
4779 541 : if (is_terminated.is_unknown ())
4780 : {
4781 222 : if (out_sval)
4782 2 : *out_sval = get_store_value (reg, nullptr);
4783 408 : return m_mgr->get_or_create_unknown_svalue (size_type_node);
4784 : }
4785 :
4786 : /* Simulate reading those bytes from the store. */
4787 319 : byte_range bytes_to_read (src_byte_offset, fragment_bytes_read);
4788 319 : const svalue *sval = get_store_bytes (base_reg, bytes_to_read, ctxt);
4789 319 : check_for_poison (sval, expr, nullptr, ctxt);
4790 :
4791 319 : if (out_sval)
4792 : {
4793 9 : byte_range bytes_to_write (dst_byte_offset, fragment_bytes_read);
4794 9 : const binding_key *key
4795 9 : = store_mgr->get_concrete_binding (bytes_to_write);
4796 9 : result.put (key, sval);
4797 : }
4798 :
4799 319 : src_byte_offset += fragment_bytes_read;
4800 319 : dst_byte_offset += fragment_bytes_read;
4801 :
4802 319 : if (is_terminated.is_true ())
4803 : {
4804 186 : if (out_sval)
4805 6 : *out_sval = m_mgr->get_or_create_compound_svalue (NULL_TREE,
4806 : result);
4807 186 : if (logger)
4808 1 : logger->log ("got terminator");
4809 186 : return m_mgr->get_or_create_int_cst (size_type_node,
4810 186 : dst_byte_offset);
4811 : }
4812 : }
4813 : else
4814 : break;
4815 : }
4816 :
4817 : /* No binding for this base_region, or no binding at src_byte_offset
4818 : (or a symbolic binding). */
4819 :
4820 2653 : if (c.has_symbolic_bindings_p ())
4821 : {
4822 110 : if (out_sval)
4823 33 : *out_sval = get_store_value (reg, nullptr);
4824 110 : if (logger)
4825 0 : logger->log ("got symbolic binding");
4826 110 : return m_mgr->get_or_create_unknown_svalue (size_type_node);
4827 : }
4828 :
4829 : /* TODO: the various special-cases seen in
4830 : region_model::get_store_value. */
4831 :
4832 : /* Simulate reading from this byte, then give up. */
4833 2543 : byte_range bytes_to_read (src_byte_offset, 1);
4834 2543 : const svalue *sval = get_store_bytes (base_reg, bytes_to_read, ctxt);
4835 2543 : tree byte_expr
4836 : = (expr
4837 4960 : ? get_tree_for_byte_offset (expr,
4838 : src_byte_offset - initial_src_byte_offset)
4839 : : NULL_TREE);
4840 2543 : check_for_poison (sval, byte_expr, nullptr, ctxt);
4841 2543 : if (base_reg->can_have_initial_svalue_p ())
4842 : {
4843 2342 : if (out_sval)
4844 264 : *out_sval = get_store_value (reg, nullptr);
4845 2342 : return m_mgr->get_or_create_unknown_svalue (size_type_node);
4846 : }
4847 : else
4848 : return nullptr;
4849 6122 : }
4850 :
4851 : /* Like region_model::scan_for_null_terminator_1, but add logging. */
4852 :
4853 : const svalue *
4854 7648 : region_model::scan_for_null_terminator (const region *reg,
4855 : tree expr,
4856 : const svalue **out_sval,
4857 : region_model_context *ctxt) const
4858 : {
4859 7648 : logger *logger = ctxt ? ctxt->get_logger () : nullptr;
4860 7648 : LOG_SCOPE (logger);
4861 7648 : if (logger)
4862 : {
4863 2 : pretty_printer *pp = logger->get_printer ();
4864 2 : logger->start_log_line ();
4865 2 : logger->log_partial ("region: ");
4866 2 : reg->dump_to_pp (pp, true);
4867 2 : logger->end_log_line ();
4868 : }
4869 7648 : if (out_sval)
4870 1146 : *out_sval = nullptr;
4871 7648 : const svalue *sval = scan_for_null_terminator_1 (reg, expr, out_sval, ctxt);
4872 7648 : if (sval && out_sval)
4873 1139 : gcc_assert (*out_sval);
4874 7648 : if (logger)
4875 : {
4876 2 : pretty_printer *pp = logger->get_printer ();
4877 2 : logger->start_log_line ();
4878 2 : logger->log_partial ("length result: ");
4879 2 : if (sval)
4880 1 : sval->dump_to_pp (pp, true);
4881 : else
4882 1 : pp_printf (pp, "NULL");
4883 2 : logger->end_log_line ();
4884 2 : if (out_sval)
4885 : {
4886 2 : logger->start_log_line ();
4887 2 : logger->log_partial ("content result: ");
4888 2 : if (*out_sval)
4889 1 : (*out_sval)->dump_to_pp (pp, true);
4890 : else
4891 1 : pp_printf (pp, "NULL");
4892 2 : logger->end_log_line ();
4893 : }
4894 : }
4895 15296 : return sval;
4896 7648 : }
4897 :
4898 : /* Check that argument ARG_IDX (0-based) to the call described by CD
4899 : is a pointer to a valid null-terminated string.
4900 :
4901 : Simulate scanning through the buffer, reading until we find a 0 byte
4902 : (equivalent to calling strlen).
4903 :
4904 : Complain and return nullptr if:
4905 : - the buffer pointed to isn't null-terminated
4906 : - the buffer pointed to has any uninitalized bytes before any 0-terminator
4907 : - any of the reads aren't within the bounds of the underlying base region
4908 :
4909 : Otherwise, return a svalue for strlen of the buffer (*not* including
4910 : the null terminator).
4911 :
4912 : TODO: we should also complain if:
4913 : - the pointer is NULL (or could be). */
4914 :
4915 : const svalue *
4916 207 : region_model::check_for_null_terminated_string_arg (const call_details &cd,
4917 : unsigned arg_idx) const
4918 : {
4919 207 : return check_for_null_terminated_string_arg (cd,
4920 : arg_idx,
4921 : false, /* include_terminator */
4922 207 : nullptr); // out_sval
4923 : }
4924 :
4925 :
4926 : /* Check that argument ARG_IDX (0-based) to the call described by CD
4927 : is a pointer to a valid null-terminated string.
4928 :
4929 : Simulate scanning through the buffer, reading until we find a 0 byte
4930 : (equivalent to calling strlen).
4931 :
4932 : Complain and return nullptr if:
4933 : - the buffer pointed to isn't null-terminated
4934 : - the buffer pointed to has any uninitalized bytes before any 0-terminator
4935 : - any of the reads aren't within the bounds of the underlying base region
4936 :
4937 : Otherwise, return a svalue. This will be the number of bytes read
4938 : (including the null terminator) if INCLUDE_TERMINATOR is true, or strlen
4939 : of the buffer (not including the null terminator) if it is false.
4940 :
4941 : Also, when returning an svalue, if OUT_SVAL is non-nullptr, write to
4942 : *OUT_SVAL with an svalue representing the content of the buffer up to
4943 : and including the terminator.
4944 :
4945 : TODO: we should also complain if:
4946 : - the pointer is NULL (or could be). */
4947 :
4948 : const svalue *
4949 7177 : region_model::check_for_null_terminated_string_arg (const call_details &cd,
4950 : unsigned arg_idx,
4951 : bool include_terminator,
4952 : const svalue **out_sval) const
4953 : {
4954 0 : class null_terminator_check_event : public custom_event
4955 : {
4956 : public:
4957 159 : null_terminator_check_event (const event_loc_info &loc_info,
4958 : const call_arg_details &arg_details)
4959 159 : : custom_event (loc_info),
4960 159 : m_arg_details (arg_details)
4961 : {
4962 : }
4963 :
4964 300 : void print_desc (pretty_printer &pp) const final override
4965 : {
4966 300 : if (m_arg_details.m_arg_expr)
4967 300 : pp_printf (&pp,
4968 : "while looking for null terminator"
4969 : " for argument %i (%qE) of %qD...",
4970 300 : m_arg_details.m_arg_idx + 1,
4971 : m_arg_details.m_arg_expr,
4972 300 : m_arg_details.m_called_fndecl);
4973 : else
4974 0 : pp_printf (&pp,
4975 : "while looking for null terminator"
4976 : " for argument %i of %qD...",
4977 0 : m_arg_details.m_arg_idx + 1,
4978 0 : m_arg_details.m_called_fndecl);
4979 300 : }
4980 :
4981 : private:
4982 : const call_arg_details m_arg_details;
4983 : };
4984 :
4985 0 : class null_terminator_check_decl_note
4986 : : public pending_note_subclass<null_terminator_check_decl_note>
4987 : {
4988 : public:
4989 159 : null_terminator_check_decl_note (const call_arg_details &arg_details)
4990 159 : : m_arg_details (arg_details)
4991 : {
4992 : }
4993 :
4994 1278 : const char *get_kind () const final override
4995 : {
4996 1278 : return "null_terminator_check_decl_note";
4997 : }
4998 :
4999 147 : void emit () const final override
5000 : {
5001 147 : inform_about_expected_null_terminated_string_arg (m_arg_details);
5002 147 : }
5003 :
5004 639 : bool operator== (const null_terminator_check_decl_note &other) const
5005 : {
5006 639 : return m_arg_details == other.m_arg_details;
5007 : }
5008 :
5009 : private:
5010 : const call_arg_details m_arg_details;
5011 : };
5012 :
5013 : /* Subclass of decorated_region_model_context that
5014 : adds the above event and note to any saved diagnostics. */
5015 7177 : class annotating_ctxt : public annotating_context
5016 : {
5017 : public:
5018 7177 : annotating_ctxt (const call_details &cd,
5019 : unsigned arg_idx)
5020 7177 : : annotating_context (cd.get_ctxt ()),
5021 7177 : m_cd (cd),
5022 7177 : m_arg_idx (arg_idx)
5023 : {
5024 : }
5025 159 : void add_annotations () final override
5026 : {
5027 159 : call_arg_details arg_details (m_cd, m_arg_idx);
5028 318 : event_loc_info loc_info (m_cd.get_location (),
5029 159 : m_cd.get_model ()->get_current_function ()->decl,
5030 318 : m_cd.get_model ()->get_stack_depth ());
5031 :
5032 159 : add_event
5033 159 : (std::make_unique<null_terminator_check_event> (loc_info,
5034 : arg_details));
5035 159 : add_note
5036 159 : (std::make_unique <null_terminator_check_decl_note> (arg_details));
5037 159 : }
5038 : private:
5039 : const call_details &m_cd;
5040 : unsigned m_arg_idx;
5041 : };
5042 :
5043 : /* Use this ctxt below so that any diagnostics that get added
5044 : get annotated. */
5045 7177 : annotating_ctxt my_ctxt (cd, arg_idx);
5046 :
5047 7177 : const svalue *arg_sval = cd.get_arg_svalue (arg_idx);
5048 7177 : const region *buf_reg
5049 7177 : = deref_rvalue (arg_sval, cd.get_arg_tree (arg_idx), &my_ctxt);
5050 :
5051 14354 : if (const svalue *num_bytes_read_sval
5052 7177 : = scan_for_null_terminator (buf_reg,
5053 : cd.get_arg_tree (arg_idx),
5054 : out_sval,
5055 : &my_ctxt))
5056 : {
5057 7014 : if (out_sval)
5058 1139 : gcc_assert (*out_sval);
5059 7014 : if (include_terminator)
5060 : return num_bytes_read_sval;
5061 : else
5062 : {
5063 : /* strlen is (bytes_read - 1). */
5064 5875 : const svalue *one = m_mgr->get_or_create_int_cst (size_type_node, 1);
5065 5875 : return m_mgr->get_or_create_binop (size_type_node,
5066 : MINUS_EXPR,
5067 : num_bytes_read_sval,
5068 5875 : one);
5069 : }
5070 : }
5071 : else
5072 : return nullptr;
5073 : }
5074 :
5075 : /* Remove all bindings overlapping REG within the store. */
5076 :
5077 : void
5078 6131 : region_model::clobber_region (const region *reg)
5079 : {
5080 6131 : m_store.clobber_region (m_mgr->get_store_manager(), reg);
5081 6131 : }
5082 :
5083 : /* Remove any bindings for REG within the store. */
5084 :
5085 : void
5086 216671 : region_model::purge_region (const region *reg)
5087 : {
5088 216671 : m_store.purge_region (m_mgr->get_store_manager(), reg);
5089 216671 : }
5090 :
5091 : /* Fill REG with SVAL.
5092 : Use CTXT to report any warnings associated with the write
5093 : (e.g. out-of-bounds). */
5094 :
5095 : void
5096 640 : region_model::fill_region (const region *reg,
5097 : const svalue *sval,
5098 : region_model_context *ctxt)
5099 : {
5100 640 : check_region_for_write (reg, nullptr, ctxt);
5101 640 : m_store.fill_region (m_mgr->get_store_manager(), reg, sval);
5102 640 : }
5103 :
5104 : /* Zero-fill REG.
5105 : Use CTXT to report any warnings associated with the write
5106 : (e.g. out-of-bounds). */
5107 :
5108 : void
5109 705 : region_model::zero_fill_region (const region *reg,
5110 : region_model_context *ctxt)
5111 : {
5112 705 : check_region_for_write (reg, nullptr, ctxt);
5113 705 : m_store.zero_fill_region (m_mgr->get_store_manager(), reg);
5114 705 : }
5115 :
5116 : /* Copy NUM_BYTES_SVAL of SVAL to DEST_REG.
5117 : Use CTXT to report any warnings associated with the copy
5118 : (e.g. out-of-bounds writes). */
5119 :
5120 : void
5121 2103 : region_model::write_bytes (const region *dest_reg,
5122 : const svalue *num_bytes_sval,
5123 : const svalue *sval,
5124 : region_model_context *ctxt)
5125 : {
5126 2103 : const region *sized_dest_reg
5127 2103 : = m_mgr->get_sized_region (dest_reg, NULL_TREE, num_bytes_sval);
5128 2103 : set_value (sized_dest_reg, sval, ctxt);
5129 2103 : }
5130 :
5131 : /* Read NUM_BYTES_SVAL from SRC_REG.
5132 : Use CTXT to report any warnings associated with the copy
5133 : (e.g. out-of-bounds reads, copying of uninitialized values, etc). */
5134 :
5135 : const svalue *
5136 1092 : region_model::read_bytes (const region *src_reg,
5137 : tree src_ptr_expr,
5138 : const svalue *num_bytes_sval,
5139 : region_model_context *ctxt) const
5140 : {
5141 1092 : if (num_bytes_sval->get_kind () == SK_UNKNOWN)
5142 187 : return m_mgr->get_or_create_unknown_svalue (NULL_TREE);
5143 905 : const region *sized_src_reg
5144 905 : = m_mgr->get_sized_region (src_reg, NULL_TREE, num_bytes_sval);
5145 905 : const svalue *src_contents_sval = get_store_value (sized_src_reg, ctxt);
5146 905 : check_for_poison (src_contents_sval, src_ptr_expr,
5147 : sized_src_reg, ctxt);
5148 905 : return src_contents_sval;
5149 : }
5150 :
5151 : /* Copy NUM_BYTES_SVAL bytes from SRC_REG to DEST_REG.
5152 : Use CTXT to report any warnings associated with the copy
5153 : (e.g. out-of-bounds reads/writes, copying of uninitialized values,
5154 : etc). */
5155 :
5156 : void
5157 506 : region_model::copy_bytes (const region *dest_reg,
5158 : const region *src_reg,
5159 : tree src_ptr_expr,
5160 : const svalue *num_bytes_sval,
5161 : region_model_context *ctxt)
5162 : {
5163 506 : const svalue *data_sval
5164 506 : = read_bytes (src_reg, src_ptr_expr, num_bytes_sval, ctxt);
5165 506 : write_bytes (dest_reg, num_bytes_sval, data_sval, ctxt);
5166 506 : }
5167 :
5168 : /* Mark REG as having unknown content. */
5169 :
5170 : void
5171 253 : region_model::mark_region_as_unknown (const region *reg,
5172 : uncertainty_t *uncertainty)
5173 : {
5174 253 : svalue_set maybe_live_values;
5175 253 : m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg,
5176 : uncertainty, &maybe_live_values);
5177 253 : m_store.on_maybe_live_values (*m_mgr->get_store_manager (),
5178 : maybe_live_values);
5179 253 : }
5180 :
5181 : /* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
5182 : this model. */
5183 :
5184 : tristate
5185 210106 : region_model::eval_condition (const svalue *lhs,
5186 : enum tree_code op,
5187 : const svalue *rhs) const
5188 : {
5189 210106 : gcc_assert (lhs);
5190 210106 : gcc_assert (rhs);
5191 :
5192 : /* For now, make no attempt to capture constraints on floating-point
5193 : values. */
5194 210106 : if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
5195 363330 : || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
5196 72 : return tristate::unknown ();
5197 :
5198 : /* See what we know based on the values. */
5199 :
5200 : /* Unwrap any unmergeable values. */
5201 210034 : lhs = lhs->unwrap_any_unmergeable ();
5202 210034 : rhs = rhs->unwrap_any_unmergeable ();
5203 :
5204 210034 : if (lhs == rhs)
5205 : {
5206 : /* If we have the same svalue, then we have equality
5207 : (apart from NaN-handling).
5208 : TODO: should this definitely be the case for poisoned values? */
5209 : /* Poisoned and unknown values are "unknowable". */
5210 21210 : if (lhs->get_kind () == SK_POISONED
5211 21210 : || lhs->get_kind () == SK_UNKNOWN)
5212 9564 : return tristate::TS_UNKNOWN;
5213 :
5214 11646 : switch (op)
5215 : {
5216 8623 : case EQ_EXPR:
5217 8623 : case GE_EXPR:
5218 8623 : case LE_EXPR:
5219 8623 : return tristate::TS_TRUE;
5220 :
5221 3023 : case NE_EXPR:
5222 3023 : case GT_EXPR:
5223 3023 : case LT_EXPR:
5224 3023 : return tristate::TS_FALSE;
5225 :
5226 : default:
5227 : /* For other ops, use the logic below. */
5228 : break;
5229 : }
5230 : }
5231 :
5232 : /* If we have a pair of region_svalues, compare them. */
5233 188824 : if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
5234 20715 : if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
5235 : {
5236 307 : tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
5237 307 : if (res.is_known ())
5238 299 : return res;
5239 : /* Otherwise, only known through constraints. */
5240 : }
5241 :
5242 188525 : if (const constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
5243 : {
5244 : /* If we have a pair of constants, compare them. */
5245 47690 : if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
5246 14683 : return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
5247 : else
5248 : {
5249 : /* When we have one constant, put it on the RHS. */
5250 33007 : std::swap (lhs, rhs);
5251 33007 : op = swap_tree_comparison (op);
5252 : }
5253 : }
5254 173842 : gcc_assert (lhs->get_kind () != SK_CONSTANT);
5255 :
5256 : /* Handle comparison against zero. */
5257 173842 : if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
5258 145265 : if (zerop (cst_rhs->get_constant ()))
5259 : {
5260 92262 : if (const region_svalue *ptr = lhs->dyn_cast_region_svalue ())
5261 : {
5262 : /* A region_svalue is a non-NULL pointer, except in certain
5263 : special cases (see the comment for region::non_null_p). */
5264 20205 : const region *pointee = ptr->get_pointee ();
5265 20205 : if (pointee->non_null_p ())
5266 : {
5267 10426 : switch (op)
5268 : {
5269 0 : default:
5270 0 : gcc_unreachable ();
5271 :
5272 206 : case EQ_EXPR:
5273 206 : case GE_EXPR:
5274 206 : case LE_EXPR:
5275 206 : return tristate::TS_FALSE;
5276 :
5277 10220 : case NE_EXPR:
5278 10220 : case GT_EXPR:
5279 10220 : case LT_EXPR:
5280 10220 : return tristate::TS_TRUE;
5281 : }
5282 : }
5283 : }
5284 72057 : else if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
5285 : {
5286 : /* Treat offsets from a non-NULL pointer as being non-NULL. This
5287 : isn't strictly true, in that eventually ptr++ will wrap
5288 : around and be NULL, but it won't occur in practise and thus
5289 : can be used to suppress effectively false positives that we
5290 : shouldn't warn for. */
5291 19722 : if (binop->get_op () == POINTER_PLUS_EXPR)
5292 : {
5293 12341 : tristate lhs_ts = eval_condition (binop->get_arg0 (), op, rhs);
5294 12341 : if (lhs_ts.is_known ())
5295 11683 : return lhs_ts;
5296 : }
5297 : }
5298 104670 : else if (const unaryop_svalue *unaryop
5299 52335 : = lhs->dyn_cast_unaryop_svalue ())
5300 : {
5301 2805 : if (unaryop->get_op () == NEGATE_EXPR)
5302 : {
5303 : /* e.g. "-X <= 0" is equivalent to X >= 0". */
5304 51 : tristate lhs_ts = eval_condition (unaryop->get_arg (),
5305 : swap_tree_comparison (op),
5306 : rhs);
5307 51 : if (lhs_ts.is_known ())
5308 48 : return lhs_ts;
5309 : }
5310 : }
5311 : }
5312 :
5313 : /* Handle rejection of equality for comparisons of the initial values of
5314 : "external" values (such as params) with the address of locals. */
5315 151685 : if (const initial_svalue *init_lhs = lhs->dyn_cast_initial_svalue ())
5316 36262 : if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
5317 : {
5318 89 : tristate res = compare_initial_and_pointer (init_lhs, rhs_ptr);
5319 89 : if (res.is_known ())
5320 32 : return res;
5321 : }
5322 151653 : if (const initial_svalue *init_rhs = rhs->dyn_cast_initial_svalue ())
5323 5126 : if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
5324 : {
5325 161 : tristate res = compare_initial_and_pointer (init_rhs, lhs_ptr);
5326 161 : if (res.is_known ())
5327 0 : return res;
5328 : }
5329 :
5330 151653 : if (const widening_svalue *widen_lhs = lhs->dyn_cast_widening_svalue ())
5331 5218 : if (tree rhs_cst = rhs->maybe_get_constant ())
5332 : {
5333 2838 : tristate res = widen_lhs->eval_condition_without_cm (op, rhs_cst);
5334 2838 : if (res.is_known ())
5335 65 : return res;
5336 : }
5337 :
5338 : /* Handle comparisons between two svalues with more than one operand. */
5339 151588 : if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
5340 : {
5341 26388 : switch (op)
5342 : {
5343 : default:
5344 : break;
5345 3596 : case EQ_EXPR:
5346 3596 : {
5347 : /* TODO: binops can be equal even if they are not structurally
5348 : equal in case of commutative operators. */
5349 3596 : tristate res = structural_equality (lhs, rhs);
5350 3596 : if (res.is_true ())
5351 44 : return res;
5352 : }
5353 3552 : break;
5354 1073 : case LE_EXPR:
5355 1073 : {
5356 1073 : tristate res = structural_equality (lhs, rhs);
5357 1073 : if (res.is_true ())
5358 0 : return res;
5359 : }
5360 1073 : break;
5361 7162 : case GE_EXPR:
5362 7162 : {
5363 7162 : tristate res = structural_equality (lhs, rhs);
5364 7162 : if (res.is_true ())
5365 39 : return res;
5366 7123 : res = symbolic_greater_than (binop, rhs);
5367 7123 : if (res.is_true ())
5368 36 : return res;
5369 : }
5370 : break;
5371 8568 : case GT_EXPR:
5372 8568 : {
5373 8568 : tristate res = symbolic_greater_than (binop, rhs);
5374 8568 : if (res.is_true ())
5375 158 : return res;
5376 : }
5377 8410 : break;
5378 : }
5379 : }
5380 :
5381 : /* Try range_op, but avoid cases where we have been sloppy about types. */
5382 151311 : if (lhs->get_type ()
5383 105011 : && rhs->get_type ()
5384 250787 : && range_compatible_p (lhs->get_type (), rhs->get_type ()))
5385 : {
5386 93264 : value_range lhs_vr, rhs_vr;
5387 93264 : if (lhs->maybe_get_value_range (lhs_vr))
5388 43397 : if (rhs->maybe_get_value_range (rhs_vr))
5389 : {
5390 42827 : range_op_handler handler (op);
5391 42827 : if (handler)
5392 : {
5393 42827 : int_range_max out;
5394 42827 : if (handler.fold_range (out, boolean_type_node, lhs_vr, rhs_vr))
5395 : {
5396 42827 : if (out.zero_p ())
5397 187 : return tristate::TS_FALSE;
5398 42640 : if (out.nonzero_p ())
5399 155 : return tristate::TS_TRUE;
5400 : }
5401 42827 : }
5402 : }
5403 93264 : }
5404 :
5405 : /* Attempt to unwrap cast if there is one, and the types match. */
5406 150969 : tree lhs_type = lhs->get_type ();
5407 150969 : tree rhs_type = rhs->get_type ();
5408 150969 : if (lhs_type && rhs_type)
5409 : {
5410 99134 : const unaryop_svalue *lhs_un_op = dyn_cast <const unaryop_svalue *> (lhs);
5411 99134 : const unaryop_svalue *rhs_un_op = dyn_cast <const unaryop_svalue *> (rhs);
5412 3516 : if (lhs_un_op && CONVERT_EXPR_CODE_P (lhs_un_op->get_op ())
5413 3370 : && rhs_un_op && CONVERT_EXPR_CODE_P (rhs_un_op->get_op ())
5414 99251 : && lhs_type == rhs_type)
5415 : {
5416 117 : tristate res = eval_condition (lhs_un_op->get_arg (),
5417 : op,
5418 : rhs_un_op->get_arg ());
5419 117 : if (res.is_known ())
5420 0 : return res;
5421 : }
5422 3399 : else if (lhs_un_op && CONVERT_EXPR_CODE_P (lhs_un_op->get_op ())
5423 102270 : && lhs_type == rhs_type)
5424 : {
5425 2673 : tristate res = eval_condition (lhs_un_op->get_arg (), op, rhs);
5426 2673 : if (res.is_known ())
5427 35 : return res;
5428 : }
5429 2419 : else if (rhs_un_op && CONVERT_EXPR_CODE_P (rhs_un_op->get_op ())
5430 98763 : && lhs_type == rhs_type)
5431 : {
5432 1556 : tristate res = eval_condition (lhs, op, rhs_un_op->get_arg ());
5433 1556 : if (res.is_known ())
5434 0 : return res;
5435 : }
5436 : }
5437 :
5438 : /* Otherwise, try constraints.
5439 : Cast to const to ensure we don't change the constraint_manager as we
5440 : do this (e.g. by creating equivalence classes). */
5441 150934 : const constraint_manager *constraints = m_constraints;
5442 150934 : return constraints->eval_condition (lhs, op, rhs);
5443 : }
5444 :
5445 : /* Subroutine of region_model::eval_condition, for rejecting
5446 : equality of INIT_VAL(PARM) with &LOCAL. */
5447 :
5448 : tristate
5449 250 : region_model::compare_initial_and_pointer (const initial_svalue *init,
5450 : const region_svalue *ptr) const
5451 : {
5452 250 : const region *pointee = ptr->get_pointee ();
5453 :
5454 : /* If we have a pointer to something within a stack frame, it can't be the
5455 : initial value of a param. */
5456 250 : if (pointee->maybe_get_frame_region ())
5457 32 : if (init->initial_value_of_param_p ())
5458 32 : return tristate::TS_FALSE;
5459 :
5460 218 : return tristate::TS_UNKNOWN;
5461 : }
5462 :
5463 : /* Return true if SVAL is definitely positive. */
5464 :
5465 : static bool
5466 14300 : is_positive_svalue (const svalue *sval)
5467 : {
5468 14300 : if (tree cst = sval->maybe_get_constant ())
5469 14025 : return !zerop (cst) && get_range_pos_neg (cst) == 1;
5470 275 : tree type = sval->get_type ();
5471 275 : if (!type)
5472 : return false;
5473 : /* Consider a binary operation size_t + int. The analyzer wraps the int in
5474 : an unaryop_svalue, converting it to a size_t, but in the dynamic execution
5475 : the result is smaller than the first operand. Thus, we have to look if
5476 : the argument of the unaryop_svalue is also positive. */
5477 216 : if (const unaryop_svalue *un_op = dyn_cast <const unaryop_svalue *> (sval))
5478 10 : return CONVERT_EXPR_CODE_P (un_op->get_op ()) && TYPE_UNSIGNED (type)
5479 18 : && is_positive_svalue (un_op->get_arg ());
5480 206 : return TYPE_UNSIGNED (type);
5481 : }
5482 :
5483 : /* Return true if A is definitely larger than B.
5484 :
5485 : Limitation: does not account for integer overflows and does not try to
5486 : return false, so it can not be used negated. */
5487 :
5488 : tristate
5489 15691 : region_model::symbolic_greater_than (const binop_svalue *bin_a,
5490 : const svalue *b) const
5491 : {
5492 15691 : if (bin_a->get_op () == PLUS_EXPR || bin_a->get_op () == MULT_EXPR)
5493 : {
5494 : /* Eliminate the right-hand side of both svalues. */
5495 14332 : if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
5496 2544 : if (bin_a->get_op () == bin_b->get_op ()
5497 1412 : && eval_condition (bin_a->get_arg1 (),
5498 : GT_EXPR,
5499 1412 : bin_b->get_arg1 ()).is_true ()
5500 3956 : && eval_condition (bin_a->get_arg0 (),
5501 : GE_EXPR,
5502 63 : bin_b->get_arg0 ()).is_true ())
5503 40 : return tristate (tristate::TS_TRUE);
5504 :
5505 : /* Otherwise, try to remove a positive offset or factor from BIN_A. */
5506 14292 : if (is_positive_svalue (bin_a->get_arg1 ())
5507 14292 : && eval_condition (bin_a->get_arg0 (),
5508 13581 : GE_EXPR, b).is_true ())
5509 154 : return tristate (tristate::TS_TRUE);
5510 : }
5511 15497 : return tristate::unknown ();
5512 : }
5513 :
5514 : /* Return true if A and B are equal structurally.
5515 :
5516 : Structural equality means that A and B are equal if the svalues A and B have
5517 : the same nodes at the same positions in the tree and the leafs are equal.
5518 : Equality for conjured_svalues and initial_svalues is determined by comparing
5519 : the pointers while constants are compared by value. That behavior is useful
5520 : to check for binaryop_svlaues that evaluate to the same concrete value but
5521 : might use one operand with a different type but the same constant value.
5522 :
5523 : For example,
5524 : binop_svalue (mult_expr,
5525 : initial_svalue (‘size_t’, decl_region (..., 'some_var')),
5526 : constant_svalue (‘size_t’, 4))
5527 : and
5528 : binop_svalue (mult_expr,
5529 : initial_svalue (‘size_t’, decl_region (..., 'some_var'),
5530 : constant_svalue (‘sizetype’, 4))
5531 : are structurally equal. A concrete C code example, where this occurs, can
5532 : be found in test7 of out-of-bounds-5.c. */
5533 :
5534 : tristate
5535 14587 : region_model::structural_equality (const svalue *a, const svalue *b) const
5536 : {
5537 : /* If A and B are referentially equal, they are also structurally equal. */
5538 14587 : if (a == b)
5539 392 : return tristate (tristate::TS_TRUE);
5540 :
5541 14195 : switch (a->get_kind ())
5542 : {
5543 1223 : default:
5544 1223 : return tristate::unknown ();
5545 : /* SK_CONJURED and SK_INITIAL are already handled
5546 : by the referential equality above. */
5547 1018 : case SK_CONSTANT:
5548 1018 : {
5549 1018 : tree a_cst = a->maybe_get_constant ();
5550 1018 : tree b_cst = b->maybe_get_constant ();
5551 1018 : if (a_cst && b_cst)
5552 1806 : return tristate (tree_int_cst_equal (a_cst, b_cst));
5553 : }
5554 94 : return tristate (tristate::TS_FALSE);
5555 9 : case SK_UNARYOP:
5556 9 : {
5557 9 : const unaryop_svalue *un_a = as_a <const unaryop_svalue *> (a);
5558 9 : if (const unaryop_svalue *un_b = dyn_cast <const unaryop_svalue *> (b))
5559 8 : return tristate (pending_diagnostic::same_tree_p (un_a->get_type (),
5560 : un_b->get_type ())
5561 8 : && un_a->get_op () == un_b->get_op ()
5562 : && structural_equality (un_a->get_arg (),
5563 16 : un_b->get_arg ()));
5564 : }
5565 1 : return tristate (tristate::TS_FALSE);
5566 11945 : case SK_BINOP:
5567 11945 : {
5568 11945 : const binop_svalue *bin_a = as_a <const binop_svalue *> (a);
5569 11945 : if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
5570 2306 : return tristate (bin_a->get_op () == bin_b->get_op ()
5571 : && structural_equality (bin_a->get_arg0 (),
5572 2748 : bin_b->get_arg0 ())
5573 : && structural_equality (bin_a->get_arg1 (),
5574 2748 : bin_b->get_arg1 ()));
5575 : }
5576 10571 : return tristate (tristate::TS_FALSE);
5577 : }
5578 : }
5579 :
5580 : /* Handle various constraints of the form:
5581 : LHS: ((bool)INNER_LHS INNER_OP INNER_RHS))
5582 : OP : == or !=
5583 : RHS: zero
5584 : and (with a cast):
5585 : LHS: CAST([long]int, ((bool)INNER_LHS INNER_OP INNER_RHS))
5586 : OP : == or !=
5587 : RHS: zero
5588 : by adding constraints for INNER_LHS INNEROP INNER_RHS.
5589 :
5590 : Return true if this function can fully handle the constraint; if
5591 : so, add the implied constraint(s) and write true to *OUT if they
5592 : are consistent with existing constraints, or write false to *OUT
5593 : if they contradicts existing constraints.
5594 :
5595 : Return false for cases that this function doeesn't know how to handle.
5596 :
5597 : For example, if we're checking a stored conditional, we'll have
5598 : something like:
5599 : LHS: CAST(long int, (&HEAP_ALLOCATED_REGION(8)!=(int *)0B))
5600 : OP : NE_EXPR
5601 : RHS: zero
5602 : which this function can turn into an add_constraint of:
5603 : (&HEAP_ALLOCATED_REGION(8) != (int *)0B)
5604 :
5605 : Similarly, optimized && and || conditionals lead to e.g.
5606 : if (p && q)
5607 : becoming gimple like this:
5608 : _1 = p_6 == 0B;
5609 : _2 = q_8 == 0B
5610 : _3 = _1 | _2
5611 : On the "_3 is false" branch we can have constraints of the form:
5612 : ((&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
5613 : | (&HEAP_ALLOCATED_REGION(10)!=(int *)0B))
5614 : == 0
5615 : which implies that both _1 and _2 are false,
5616 : which this function can turn into a pair of add_constraints of
5617 : (&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
5618 : and:
5619 : (&HEAP_ALLOCATED_REGION(10)!=(int *)0B). */
5620 :
5621 : bool
5622 53165 : region_model::add_constraints_from_binop (const svalue *outer_lhs,
5623 : enum tree_code outer_op,
5624 : const svalue *outer_rhs,
5625 : bool *out,
5626 : region_model_context *ctxt)
5627 : {
5628 55197 : while (const svalue *cast = outer_lhs->maybe_undo_cast ())
5629 : outer_lhs = cast;
5630 53165 : const binop_svalue *binop_sval = outer_lhs->dyn_cast_binop_svalue ();
5631 53165 : if (!binop_sval)
5632 : return false;
5633 7809 : if (!outer_rhs->all_zeroes_p ())
5634 : return false;
5635 :
5636 5437 : const svalue *inner_lhs = binop_sval->get_arg0 ();
5637 5437 : enum tree_code inner_op = binop_sval->get_op ();
5638 5437 : const svalue *inner_rhs = binop_sval->get_arg1 ();
5639 :
5640 5437 : if (outer_op != NE_EXPR && outer_op != EQ_EXPR)
5641 : return false;
5642 :
5643 : /* We have either
5644 : - "OUTER_LHS != false" (i.e. OUTER is true), or
5645 : - "OUTER_LHS == false" (i.e. OUTER is false). */
5646 4783 : bool is_true = outer_op == NE_EXPR;
5647 :
5648 4783 : switch (inner_op)
5649 : {
5650 : default:
5651 : return false;
5652 :
5653 2498 : case EQ_EXPR:
5654 2498 : case NE_EXPR:
5655 2498 : case GE_EXPR:
5656 2498 : case GT_EXPR:
5657 2498 : case LE_EXPR:
5658 2498 : case LT_EXPR:
5659 2498 : {
5660 : /* ...and "(inner_lhs OP inner_rhs) == 0"
5661 : then (inner_lhs OP inner_rhs) must have the same
5662 : logical value as LHS. */
5663 2498 : if (!is_true)
5664 1209 : inner_op = invert_tree_comparison (inner_op, false /* honor_nans */);
5665 2498 : *out = add_constraint (inner_lhs, inner_op, inner_rhs, ctxt);
5666 2498 : return true;
5667 : }
5668 877 : break;
5669 :
5670 877 : case BIT_AND_EXPR:
5671 877 : if (is_true)
5672 : {
5673 : /* ...and "(inner_lhs & inner_rhs) != 0"
5674 : then both inner_lhs and inner_rhs must be true. */
5675 449 : const svalue *false_sval
5676 449 : = m_mgr->get_or_create_constant_svalue (boolean_false_node);
5677 449 : bool sat1 = add_constraint (inner_lhs, NE_EXPR, false_sval, ctxt);
5678 449 : bool sat2 = add_constraint (inner_rhs, NE_EXPR, false_sval, ctxt);
5679 449 : *out = sat1 && sat2;
5680 449 : return true;
5681 : }
5682 : return false;
5683 :
5684 644 : case BIT_IOR_EXPR:
5685 644 : if (!is_true)
5686 : {
5687 : /* ...and "(inner_lhs | inner_rhs) == 0"
5688 : i.e. "(inner_lhs | inner_rhs)" is false
5689 : then both inner_lhs and inner_rhs must be false. */
5690 362 : const svalue *false_sval
5691 362 : = m_mgr->get_or_create_constant_svalue (boolean_false_node);
5692 362 : bool sat1 = add_constraint (inner_lhs, EQ_EXPR, false_sval, ctxt);
5693 362 : bool sat2 = add_constraint (inner_rhs, EQ_EXPR, false_sval, ctxt);
5694 362 : *out = sat1 && sat2;
5695 362 : return true;
5696 : }
5697 : return false;
5698 : }
5699 : }
5700 :
5701 : /* Attempt to add the constraint "LHS OP RHS" to this region_model.
5702 : If it is consistent with existing constraints, add it, and return true.
5703 : Return false if it contradicts existing constraints.
5704 : Use CTXT for reporting any diagnostics associated with the accesses. */
5705 :
5706 : bool
5707 73657 : region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
5708 : region_model_context *ctxt)
5709 : {
5710 : /* For now, make no attempt to capture constraints on floating-point
5711 : values. */
5712 73657 : if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
5713 : return true;
5714 :
5715 73287 : const svalue *lhs_sval = get_rvalue (lhs, ctxt);
5716 73287 : const svalue *rhs_sval = get_rvalue (rhs, ctxt);
5717 :
5718 73287 : return add_constraint (lhs_sval, op, rhs_sval, ctxt);
5719 : }
5720 :
5721 : static bool
5722 17090 : unusable_in_infinite_loop_constraint_p (const svalue *sval)
5723 : {
5724 17090 : if (sval->get_kind () == SK_WIDENING)
5725 0 : return true;
5726 : return false;
5727 : }
5728 :
5729 : /* Attempt to add the constraint "LHS OP RHS" to this region_model.
5730 : If it is consistent with existing constraints, add it, and return true.
5731 : Return false if it contradicts existing constraints.
5732 : Use CTXT for reporting any diagnostics associated with the accesses. */
5733 :
5734 : bool
5735 81672 : region_model::add_constraint (const svalue *lhs,
5736 : enum tree_code op,
5737 : const svalue *rhs,
5738 : region_model_context *ctxt)
5739 : {
5740 81672 : const bool checking_for_infinite_loop
5741 81672 : = ctxt ? ctxt->checking_for_infinite_loop_p () : false;
5742 :
5743 8672 : if (checking_for_infinite_loop)
5744 : {
5745 17090 : if (unusable_in_infinite_loop_constraint_p (lhs)
5746 81672 : || unusable_in_infinite_loop_constraint_p (rhs))
5747 : {
5748 257 : gcc_assert (ctxt);
5749 257 : ctxt->on_unusable_in_infinite_loop ();
5750 257 : return false;
5751 : }
5752 : }
5753 :
5754 81415 : tristate t_cond = eval_condition (lhs, op, rhs);
5755 :
5756 : /* If we already have the condition, do nothing. */
5757 81415 : if (t_cond.is_true ())
5758 : return true;
5759 :
5760 : /* Reject a constraint that would contradict existing knowledge, as
5761 : unsatisfiable. */
5762 66076 : if (t_cond.is_false ())
5763 : return false;
5764 :
5765 56744 : if (checking_for_infinite_loop)
5766 : {
5767 : /* Here, we don't have a definite true/false value, so bail out
5768 : when checking for infinite loops. */
5769 3579 : gcc_assert (ctxt);
5770 3579 : ctxt->on_unusable_in_infinite_loop ();
5771 3579 : return false;
5772 : }
5773 :
5774 53165 : bool out;
5775 53165 : if (add_constraints_from_binop (lhs, op, rhs, &out, ctxt))
5776 3309 : return out;
5777 :
5778 : /* Attempt to store the constraint. */
5779 49856 : if (!m_constraints->add_constraint (lhs, op, rhs))
5780 : return false;
5781 :
5782 : /* Notify the context, if any. This exists so that the state machines
5783 : in a program_state can be notified about the condition, and so can
5784 : set sm-state for e.g. unchecked->checked, both for cfg-edges, and
5785 : when synthesizing constraints as above. */
5786 49752 : if (ctxt)
5787 34707 : ctxt->on_condition (lhs, op, rhs);
5788 :
5789 : /* If we have ®ION == NULL, then drop dynamic extents for REGION (for
5790 : the case where REGION is heap-allocated and thus could be NULL). */
5791 49752 : if (tree rhs_cst = rhs->maybe_get_constant ())
5792 40918 : if (op == EQ_EXPR && zerop (rhs_cst))
5793 12682 : if (const region_svalue *region_sval = lhs->dyn_cast_region_svalue ())
5794 1757 : unset_dynamic_extents (region_sval->get_pointee ());
5795 :
5796 : return true;
5797 : }
5798 :
5799 : /* As above, but when returning false, if OUT is non-NULL, write a
5800 : new rejected_constraint to *OUT. */
5801 :
5802 : bool
5803 72515 : region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
5804 : region_model_context *ctxt,
5805 : std::unique_ptr<rejected_constraint> *out)
5806 : {
5807 72515 : bool sat = add_constraint (lhs, op, rhs, ctxt);
5808 72515 : if (!sat && out)
5809 : {
5810 2213 : const svalue *lhs_sval = get_rvalue (lhs, nullptr);
5811 2213 : const svalue *rhs_sval = get_rvalue (rhs, nullptr);
5812 4426 : *out = std::make_unique <rejected_op_constraint> (*this,
5813 2213 : lhs_sval, op, rhs_sval);
5814 : }
5815 72515 : return sat;
5816 : }
5817 :
5818 : /* Determine what is known about the condition "LHS OP RHS" within
5819 : this model.
5820 : Use CTXT for reporting any diagnostics associated with the accesses. */
5821 :
5822 : tristate
5823 33285 : region_model::eval_condition (tree lhs,
5824 : enum tree_code op,
5825 : tree rhs,
5826 : region_model_context *ctxt) const
5827 : {
5828 : /* For now, make no attempt to model constraints on floating-point
5829 : values. */
5830 33285 : if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
5831 16 : return tristate::unknown ();
5832 :
5833 33269 : return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
5834 : }
5835 :
5836 : /* Implementation of region_model::get_representative_path_var.
5837 : Attempt to return a path_var that represents SVAL, or return NULL_TREE.
5838 : Use VISITED to prevent infinite mutual recursion with the overload for
5839 : regions. */
5840 :
5841 : path_var
5842 12787 : region_model::get_representative_path_var_1 (const svalue *sval,
5843 : svalue_set *visited,
5844 : logger *logger) const
5845 : {
5846 12787 : gcc_assert (sval);
5847 :
5848 : /* Prevent infinite recursion. */
5849 12787 : if (visited->contains (sval))
5850 : {
5851 14 : if (sval->get_kind () == SK_CONSTANT)
5852 14 : return path_var (sval->maybe_get_constant (), 0);
5853 : else
5854 0 : return path_var (NULL_TREE, 0);
5855 : }
5856 12773 : visited->add (sval);
5857 :
5858 : /* Handle casts by recursion into get_representative_path_var. */
5859 12773 : if (const svalue *cast_sval = sval->maybe_undo_cast ())
5860 : {
5861 402 : path_var result = get_representative_path_var (cast_sval, visited,
5862 : logger);
5863 402 : tree orig_type = sval->get_type ();
5864 : /* If necessary, wrap the result in a cast. */
5865 402 : if (result.m_tree && orig_type)
5866 332 : result.m_tree = build1 (NOP_EXPR, orig_type, result.m_tree);
5867 402 : return result;
5868 : }
5869 :
5870 12371 : auto_vec<path_var> pvs;
5871 12371 : m_store.get_representative_path_vars (this, visited, sval, logger, &pvs);
5872 :
5873 12371 : if (tree cst = sval->maybe_get_constant ())
5874 1789 : pvs.safe_push (path_var (cst, 0));
5875 :
5876 : /* Handle string literals and various other pointers. */
5877 12371 : if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
5878 : {
5879 4307 : const region *reg = ptr_sval->get_pointee ();
5880 4307 : if (path_var pv = get_representative_path_var (reg, visited, logger))
5881 30 : return path_var (build1 (ADDR_EXPR,
5882 : sval->get_type (),
5883 : pv.m_tree),
5884 30 : pv.m_stack_depth);
5885 : }
5886 :
5887 : /* If we have a sub_svalue, look for ways to represent the parent. */
5888 12341 : if (const sub_svalue *sub_sval = sval->dyn_cast_sub_svalue ())
5889 : {
5890 370 : const svalue *parent_sval = sub_sval->get_parent ();
5891 370 : const region *subreg = sub_sval->get_subregion ();
5892 740 : if (path_var parent_pv
5893 370 : = get_representative_path_var (parent_sval, visited, logger))
5894 153 : if (const field_region *field_reg = subreg->dyn_cast_field_region ())
5895 111 : return path_var (build3 (COMPONENT_REF,
5896 : sval->get_type (),
5897 : parent_pv.m_tree,
5898 : field_reg->get_field (),
5899 : NULL_TREE),
5900 111 : parent_pv.m_stack_depth);
5901 : }
5902 :
5903 : /* Handle binops. */
5904 12230 : if (const binop_svalue *binop_sval = sval->dyn_cast_binop_svalue ())
5905 534 : if (path_var lhs_pv
5906 534 : = get_representative_path_var (binop_sval->get_arg0 (), visited,
5907 534 : logger))
5908 468 : if (path_var rhs_pv
5909 468 : = get_representative_path_var (binop_sval->get_arg1 (), visited,
5910 468 : logger))
5911 439 : return path_var (build2 (binop_sval->get_op (),
5912 : sval->get_type (),
5913 : lhs_pv.m_tree, rhs_pv.m_tree),
5914 439 : lhs_pv.m_stack_depth);
5915 :
5916 11791 : if (pvs.length () < 1)
5917 2070 : return path_var (NULL_TREE, 0);
5918 :
5919 9721 : pvs.qsort (readability_comparator);
5920 9721 : return pvs[0];
5921 12371 : }
5922 :
5923 : /* Attempt to return a path_var that represents SVAL, or return NULL_TREE.
5924 : Use VISITED to prevent infinite mutual recursion with the overload for
5925 : regions
5926 :
5927 : This function defers to get_representative_path_var_1 to do the work;
5928 : it adds verification that get_representative_path_var_1 returned a tree
5929 : of the correct type. */
5930 :
5931 : path_var
5932 18034 : region_model::get_representative_path_var (const svalue *sval,
5933 : svalue_set *visited,
5934 : logger *logger) const
5935 : {
5936 18034 : if (sval == nullptr)
5937 5247 : return path_var (NULL_TREE, 0);
5938 :
5939 12787 : LOG_SCOPE (logger);
5940 12787 : if (logger)
5941 : {
5942 0 : logger->start_log_line ();
5943 0 : logger->log_partial ("sval: ");
5944 0 : sval->dump_to_pp (logger->get_printer (), true);
5945 0 : logger->end_log_line ();
5946 : }
5947 :
5948 12787 : tree orig_type = sval->get_type ();
5949 :
5950 12787 : path_var result = get_representative_path_var_1 (sval, visited, logger);
5951 :
5952 : /* Verify that the result has the same type as SVAL, if any. */
5953 12787 : if (result.m_tree && orig_type)
5954 10559 : gcc_assert (TREE_TYPE (result.m_tree) == orig_type);
5955 :
5956 12787 : if (logger)
5957 : {
5958 0 : logger->start_log_line ();
5959 0 : logger->log_partial ("sval: ");
5960 0 : sval->dump_to_pp (logger->get_printer (), true);
5961 0 : logger->end_log_line ();
5962 :
5963 0 : if (result.m_tree)
5964 0 : logger->log ("tree: %qE", result.m_tree);
5965 : else
5966 0 : logger->log ("tree: NULL");
5967 : }
5968 :
5969 12787 : return result;
5970 12787 : }
5971 :
5972 : /* Attempt to return a tree that represents SVAL, or return NULL_TREE.
5973 :
5974 : Strip off any top-level cast, to avoid messages like
5975 : double-free of '(void *)ptr'
5976 : from analyzer diagnostics. */
5977 :
5978 : tree
5979 14252 : region_model::get_representative_tree (const svalue *sval, logger *logger) const
5980 : {
5981 14252 : svalue_set visited;
5982 14252 : tree expr = get_representative_path_var (sval, &visited, logger).m_tree;
5983 :
5984 : /* Strip off any top-level cast. */
5985 14252 : if (expr && TREE_CODE (expr) == NOP_EXPR)
5986 459 : expr = TREE_OPERAND (expr, 0);
5987 :
5988 14252 : return fixup_tree_for_diagnostic (expr);
5989 14252 : }
5990 :
5991 : tree
5992 775 : region_model::get_representative_tree (const region *reg, logger *logger) const
5993 : {
5994 775 : svalue_set visited;
5995 775 : tree expr = get_representative_path_var (reg, &visited, logger).m_tree;
5996 :
5997 : /* Strip off any top-level cast. */
5998 775 : if (expr && TREE_CODE (expr) == NOP_EXPR)
5999 0 : expr = TREE_OPERAND (expr, 0);
6000 :
6001 775 : return fixup_tree_for_diagnostic (expr);
6002 775 : }
6003 :
6004 : /* Implementation of region_model::get_representative_path_var.
6005 :
6006 : Attempt to return a path_var that represents REG, or return
6007 : the NULL path_var.
6008 : For example, a region for a field of a local would be a path_var
6009 : wrapping a COMPONENT_REF.
6010 : Use VISITED to prevent infinite mutual recursion with the overload for
6011 : svalues. */
6012 :
6013 : path_var
6014 15334 : region_model::get_representative_path_var_1 (const region *reg,
6015 : svalue_set *visited,
6016 : logger *logger) const
6017 : {
6018 15334 : switch (reg->get_kind ())
6019 : {
6020 0 : default:
6021 0 : gcc_unreachable ();
6022 :
6023 0 : case RK_FRAME:
6024 0 : case RK_GLOBALS:
6025 0 : case RK_CODE:
6026 0 : case RK_HEAP:
6027 0 : case RK_STACK:
6028 0 : case RK_THREAD_LOCAL:
6029 0 : case RK_ROOT:
6030 : /* Regions that represent memory spaces are not expressible as trees. */
6031 0 : return path_var (NULL_TREE, 0);
6032 :
6033 1 : case RK_FUNCTION:
6034 1 : {
6035 1 : const function_region *function_reg
6036 1 : = as_a <const function_region *> (reg);
6037 1 : return path_var (function_reg->get_fndecl (), 0);
6038 : }
6039 1 : case RK_LABEL:
6040 1 : {
6041 1 : const label_region *label_reg = as_a <const label_region *> (reg);
6042 1 : return path_var (label_reg->get_label (), 0);
6043 : }
6044 :
6045 225 : case RK_SYMBOLIC:
6046 225 : {
6047 225 : const symbolic_region *symbolic_reg
6048 225 : = as_a <const symbolic_region *> (reg);
6049 225 : const svalue *pointer = symbolic_reg->get_pointer ();
6050 225 : path_var pointer_pv = get_representative_path_var (pointer, visited,
6051 : logger);
6052 225 : if (!pointer_pv)
6053 16 : return path_var (NULL_TREE, 0);
6054 209 : tree offset = build_int_cst (pointer->get_type (), 0);
6055 209 : return path_var (build2 (MEM_REF,
6056 : reg->get_type (),
6057 : pointer_pv.m_tree,
6058 : offset),
6059 209 : pointer_pv.m_stack_depth);
6060 : }
6061 9626 : case RK_DECL:
6062 9626 : {
6063 9626 : const decl_region *decl_reg = as_a <const decl_region *> (reg);
6064 9626 : return path_var (decl_reg->get_decl (), decl_reg->get_stack_depth ());
6065 : }
6066 777 : case RK_FIELD:
6067 777 : {
6068 777 : const field_region *field_reg = as_a <const field_region *> (reg);
6069 777 : path_var parent_pv
6070 777 : = get_representative_path_var (reg->get_parent_region (), visited,
6071 : logger);
6072 777 : if (!parent_pv)
6073 37 : return path_var (NULL_TREE, 0);
6074 740 : return path_var (build3 (COMPONENT_REF,
6075 : reg->get_type (),
6076 : parent_pv.m_tree,
6077 : field_reg->get_field (),
6078 : NULL_TREE),
6079 740 : parent_pv.m_stack_depth);
6080 : }
6081 :
6082 150 : case RK_ELEMENT:
6083 150 : {
6084 150 : const element_region *element_reg
6085 150 : = as_a <const element_region *> (reg);
6086 150 : path_var parent_pv
6087 150 : = get_representative_path_var (reg->get_parent_region (), visited,
6088 : logger);
6089 150 : if (!parent_pv)
6090 0 : return path_var (NULL_TREE, 0);
6091 150 : path_var index_pv
6092 150 : = get_representative_path_var (element_reg->get_index (), visited,
6093 : logger);
6094 150 : if (!index_pv)
6095 0 : return path_var (NULL_TREE, 0);
6096 150 : return path_var (build4 (ARRAY_REF,
6097 : reg->get_type (),
6098 : parent_pv.m_tree, index_pv.m_tree,
6099 : NULL_TREE, NULL_TREE),
6100 150 : parent_pv.m_stack_depth);
6101 : }
6102 :
6103 42 : case RK_OFFSET:
6104 42 : {
6105 42 : const offset_region *offset_reg
6106 42 : = as_a <const offset_region *> (reg);
6107 42 : path_var parent_pv
6108 42 : = get_representative_path_var (reg->get_parent_region (), visited,
6109 : logger);
6110 42 : if (!parent_pv)
6111 0 : return path_var (NULL_TREE, 0);
6112 42 : path_var offset_pv
6113 42 : = get_representative_path_var (offset_reg->get_byte_offset (),
6114 : visited, logger);
6115 42 : if (!offset_pv || TREE_CODE (offset_pv.m_tree) != INTEGER_CST)
6116 42 : return path_var (NULL_TREE, 0);
6117 0 : tree addr_parent = build1 (ADDR_EXPR,
6118 : build_pointer_type (reg->get_type ()),
6119 : parent_pv.m_tree);
6120 0 : tree ptype = build_pointer_type_for_mode (char_type_node, ptr_mode,
6121 : true);
6122 0 : return path_var (build2 (MEM_REF, reg->get_type (), addr_parent,
6123 : fold_convert (ptype, offset_pv.m_tree)),
6124 0 : parent_pv.m_stack_depth);
6125 : }
6126 :
6127 42 : case RK_SIZED:
6128 42 : return path_var (NULL_TREE, 0);
6129 :
6130 5 : case RK_CAST:
6131 5 : {
6132 5 : path_var parent_pv
6133 5 : = get_representative_path_var (reg->get_parent_region (), visited,
6134 : logger);
6135 5 : if (!parent_pv)
6136 0 : return path_var (NULL_TREE, 0);
6137 5 : return path_var (build1 (NOP_EXPR,
6138 : reg->get_type (),
6139 : parent_pv.m_tree),
6140 5 : parent_pv.m_stack_depth);
6141 : }
6142 :
6143 4451 : case RK_HEAP_ALLOCATED:
6144 4451 : case RK_ALLOCA:
6145 : /* No good way to express heap-allocated/alloca regions as trees. */
6146 4451 : return path_var (NULL_TREE, 0);
6147 :
6148 10 : case RK_STRING:
6149 10 : {
6150 10 : const string_region *string_reg = as_a <const string_region *> (reg);
6151 10 : return path_var (string_reg->get_string_cst (), 0);
6152 : }
6153 :
6154 4 : case RK_VAR_ARG:
6155 4 : case RK_ERRNO:
6156 4 : case RK_UNKNOWN:
6157 4 : case RK_PRIVATE:
6158 4 : return path_var (NULL_TREE, 0);
6159 : }
6160 : }
6161 :
6162 : /* Attempt to return a path_var that represents REG, or return
6163 : the NULL path_var.
6164 : For example, a region for a field of a local would be a path_var
6165 : wrapping a COMPONENT_REF.
6166 : Use VISITED to prevent infinite mutual recursion with the overload for
6167 : svalues.
6168 :
6169 : This function defers to get_representative_path_var_1 to do the work;
6170 : it adds verification that get_representative_path_var_1 returned a tree
6171 : of the correct type. */
6172 :
6173 : path_var
6174 15334 : region_model::get_representative_path_var (const region *reg,
6175 : svalue_set *visited,
6176 : logger *logger) const
6177 : {
6178 15334 : LOG_SCOPE (logger);
6179 15334 : if (logger)
6180 : {
6181 0 : logger->start_log_line ();
6182 0 : logger->log_partial ("reg: ");
6183 0 : reg->dump_to_pp (logger->get_printer (), true);
6184 0 : logger->end_log_line ();
6185 : }
6186 :
6187 15334 : path_var result = get_representative_path_var_1 (reg, visited, logger);
6188 :
6189 : /* Verify that the result has the same type as REG, if any. */
6190 15334 : if (result.m_tree && reg->get_type ())
6191 10741 : gcc_assert (TREE_TYPE (result.m_tree) == reg->get_type ());
6192 :
6193 15334 : if (logger)
6194 : {
6195 0 : logger->start_log_line ();
6196 0 : logger->log_partial ("reg: ");
6197 0 : reg->dump_to_pp (logger->get_printer (), true);
6198 0 : logger->end_log_line ();
6199 :
6200 0 : if (result.m_tree)
6201 0 : logger->log ("tree: %qE", result.m_tree);
6202 : else
6203 0 : logger->log ("tree: NULL");
6204 : }
6205 :
6206 30668 : return result;
6207 15334 : }
6208 :
6209 : /* Push a new frame_region on to the stack region.
6210 : Populate the frame_region with child regions for the function call's
6211 : parameters, using values from the arguments at the callsite in the
6212 : caller's frame. */
6213 :
6214 : void
6215 11361 : region_model::update_for_gcall (const gcall &call_stmt,
6216 : region_model_context *ctxt,
6217 : function *callee)
6218 : {
6219 : /* Build a vec of argument svalues, using the current top
6220 : frame for resolving tree expressions. */
6221 11361 : auto_vec<const svalue *> arg_svals (gimple_call_num_args (&call_stmt));
6222 :
6223 24029 : for (unsigned i = 0; i < gimple_call_num_args (&call_stmt); i++)
6224 : {
6225 12668 : tree arg = gimple_call_arg (&call_stmt, i);
6226 12668 : arg_svals.quick_push (get_rvalue (arg, ctxt));
6227 : }
6228 :
6229 11361 : if(!callee)
6230 : {
6231 : /* Get the function * from the gcall. */
6232 0 : tree fn_decl = get_fndecl_for_call (call_stmt, ctxt);
6233 0 : callee = DECL_STRUCT_FUNCTION (fn_decl);
6234 : }
6235 :
6236 0 : gcc_assert (callee);
6237 11361 : push_frame (*callee, &call_stmt, &arg_svals, ctxt);
6238 11361 : }
6239 :
6240 : /* Pop the top-most frame_region from the stack, and copy the return
6241 : region's values (if any) into the region for the lvalue of the LHS of
6242 : the call (if any). */
6243 :
6244 : void
6245 8310 : region_model::update_for_return_gcall (const gcall &call_stmt,
6246 : region_model_context *ctxt)
6247 : {
6248 : /* Get the lvalue for the result of the call, passing it to pop_frame,
6249 : so that pop_frame can determine the region with respect to the
6250 : *caller* frame. */
6251 8310 : tree lhs = gimple_call_lhs (&call_stmt);
6252 8310 : pop_frame (lhs, nullptr, ctxt, &call_stmt);
6253 8310 : }
6254 :
6255 : /* Attempt to use R to replay SUMMARY into this object.
6256 : Return true if it is possible. */
6257 :
6258 : bool
6259 1631 : region_model::replay_call_summary (call_summary_replay &r,
6260 : const region_model &summary)
6261 : {
6262 1631 : gcc_assert (summary.get_stack_depth () == 1);
6263 :
6264 1631 : m_store.replay_call_summary (r, summary.m_store);
6265 :
6266 1631 : if (r.get_ctxt ())
6267 1544 : r.get_ctxt ()->maybe_did_work ();
6268 :
6269 1631 : if (!m_constraints->replay_call_summary (r, *summary.m_constraints))
6270 : return false;
6271 :
6272 4396 : for (auto kv : summary.m_dynamic_extents)
6273 : {
6274 1445 : const region *summary_reg = kv.first;
6275 1445 : const region *caller_reg = r.convert_region_from_summary (summary_reg);
6276 1445 : if (!caller_reg)
6277 2 : continue;
6278 1443 : const svalue *summary_sval = kv.second;
6279 1443 : const svalue *caller_sval = r.convert_svalue_from_summary (summary_sval);
6280 1443 : if (!caller_sval)
6281 0 : continue;
6282 1443 : m_dynamic_extents.put (caller_reg, caller_sval);
6283 : }
6284 :
6285 1506 : return true;
6286 : }
6287 :
6288 : /* For use with push_frame when handling a top-level call within the analysis.
6289 : PARAM has a defined but unknown initial value.
6290 : Anything it points to has escaped, since the calling context "knows"
6291 : the pointer, and thus calls to unknown functions could read/write into
6292 : the region.
6293 : If NONNULL is true, then assume that PARAM must be non-NULL. */
6294 :
6295 : void
6296 18335 : region_model::on_top_level_param (tree param,
6297 : bool nonnull,
6298 : region_model_context *ctxt)
6299 : {
6300 18335 : if (POINTER_TYPE_P (TREE_TYPE (param)))
6301 : {
6302 8709 : const region *param_reg = get_lvalue (param, ctxt);
6303 8709 : const svalue *init_ptr_sval
6304 8709 : = m_mgr->get_or_create_initial_value (param_reg);
6305 8709 : const region *pointee_reg = m_mgr->get_symbolic_region (init_ptr_sval);
6306 8709 : store_manager *store_mgr = m_mgr->get_store_manager ();
6307 8709 : m_store.mark_as_escaped (*store_mgr, pointee_reg);
6308 8709 : if (nonnull)
6309 : {
6310 455 : const svalue *null_ptr_sval
6311 455 : = m_mgr->get_or_create_null_ptr (TREE_TYPE (param));
6312 455 : add_constraint (init_ptr_sval, NE_EXPR, null_ptr_sval, ctxt);
6313 : }
6314 : }
6315 18335 : }
6316 :
6317 : /* Update this region_model to reflect pushing a frame onto the stack
6318 : for a call to FUN.
6319 :
6320 : If CALL_STMT is non-NULL, this is for the interprocedural case where
6321 : we already have an execution path into the caller. It can be NULL for
6322 : top-level entrypoints into the analysis, or in selftests.
6323 :
6324 : If ARG_SVALS is non-NULL, use it to populate the parameters
6325 : in the new frame.
6326 : Otherwise, the params have their initial_svalues.
6327 :
6328 : Return the frame_region for the new frame. */
6329 :
6330 : const region *
6331 31042 : region_model::push_frame (const function &fun,
6332 : const gcall *call_stmt,
6333 : const vec<const svalue *> *arg_svals,
6334 : region_model_context *ctxt)
6335 : {
6336 31042 : tree fndecl = fun.decl;
6337 31042 : if (arg_svals)
6338 : {
6339 : /* If the result of the callee is DECL_BY_REFERENCE, then
6340 : we'll need to store a reference to the caller's lhs of
6341 : CALL_STMT within callee's result.
6342 : If so, determine the region of CALL_STMT's lhs within
6343 : the caller's frame before updating m_current_frame. */
6344 11361 : const region *caller_return_by_reference_reg = nullptr;
6345 11361 : if (tree result = DECL_RESULT (fndecl))
6346 11361 : if (DECL_BY_REFERENCE (result))
6347 : {
6348 39 : gcc_assert (call_stmt);
6349 39 : tree lhs = gimple_call_lhs (call_stmt);
6350 39 : gcc_assert (lhs);
6351 39 : caller_return_by_reference_reg = get_lvalue (lhs, ctxt);
6352 : }
6353 :
6354 : /* Update m_current_frame. */
6355 11361 : m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
6356 :
6357 : /* Arguments supplied from a caller frame. */
6358 11361 : unsigned idx = 0;
6359 23611 : for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
6360 12250 : iter_parm = DECL_CHAIN (iter_parm), ++idx)
6361 : {
6362 : /* If there's a mismatching declaration, the call stmt might
6363 : not have enough args. Handle this case by leaving the
6364 : rest of the params as uninitialized. */
6365 12253 : if (idx >= arg_svals->length ())
6366 : break;
6367 12250 : tree parm_lval = iter_parm;
6368 12250 : if (tree parm_default_ssa = get_ssa_default_def (fun, iter_parm))
6369 11262 : parm_lval = parm_default_ssa;
6370 12250 : const region *parm_reg = get_lvalue (parm_lval, ctxt);
6371 12250 : const svalue *arg_sval = (*arg_svals)[idx];
6372 12250 : set_value (parm_reg, arg_sval, ctxt);
6373 : }
6374 :
6375 : /* Handle any variadic args. */
6376 : unsigned va_arg_idx = 0;
6377 11779 : for (; idx < arg_svals->length (); idx++, va_arg_idx++)
6378 : {
6379 418 : const svalue *arg_sval = (*arg_svals)[idx];
6380 418 : const region *var_arg_reg
6381 418 : = m_mgr->get_var_arg_region (m_current_frame,
6382 : va_arg_idx);
6383 418 : set_value (var_arg_reg, arg_sval, ctxt);
6384 : }
6385 :
6386 : /* If the result of the callee is DECL_BY_REFERENCE, then above
6387 : we should have determined the region within the
6388 : caller's frame that the callee will be writing back to.
6389 : Use this now to initialize the reference in callee's frame. */
6390 11361 : if (tree result = DECL_RESULT (fndecl))
6391 11361 : if (DECL_BY_REFERENCE (result))
6392 : {
6393 : /* Get reference to the caller lhs. */
6394 39 : gcc_assert (caller_return_by_reference_reg);
6395 39 : const svalue *ref_sval
6396 39 : = m_mgr->get_ptr_svalue (TREE_TYPE (result),
6397 : caller_return_by_reference_reg);
6398 :
6399 : /* Get region for default val of DECL_RESULT within the
6400 : callee. */
6401 39 : if (tree result_default_ssa = get_ssa_default_def (fun, result))
6402 : {
6403 36 : const region *callee_result_reg
6404 36 : = get_lvalue (result_default_ssa, ctxt);
6405 :
6406 : /* Set the callee's reference to refer to the caller's lhs. */
6407 36 : set_value (callee_result_reg, ref_sval, ctxt);
6408 : }
6409 : }
6410 : }
6411 : else
6412 : {
6413 : /* Otherwise we have a top-level call within the analysis. The params
6414 : have defined but unknown initial values.
6415 : Anything they point to has escaped. */
6416 :
6417 : /* Update m_current_frame. */
6418 19681 : m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
6419 :
6420 : /* Handle "__attribute__((nonnull))". */
6421 19681 : tree fntype = TREE_TYPE (fndecl);
6422 19681 : bitmap nonnull_args = get_nonnull_args (fntype);
6423 :
6424 19681 : unsigned parm_idx = 0;
6425 38016 : for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
6426 18335 : iter_parm = DECL_CHAIN (iter_parm))
6427 : {
6428 18335 : bool non_null = (nonnull_args
6429 18335 : ? (bitmap_empty_p (nonnull_args)
6430 514 : || bitmap_bit_p (nonnull_args, parm_idx))
6431 18335 : : false);
6432 18335 : if (tree parm_default_ssa = get_ssa_default_def (fun, iter_parm))
6433 15455 : on_top_level_param (parm_default_ssa, non_null, ctxt);
6434 : else
6435 2880 : on_top_level_param (iter_parm, non_null, ctxt);
6436 18335 : parm_idx++;
6437 : }
6438 :
6439 19681 : BITMAP_FREE (nonnull_args);
6440 : }
6441 :
6442 31042 : return m_current_frame;
6443 : }
6444 :
6445 : /* Get the function of the top-most frame in this region_model's stack.
6446 : There must be such a frame. */
6447 :
6448 : const function *
6449 184 : region_model::get_current_function () const
6450 : {
6451 184 : const frame_region *frame = get_current_frame ();
6452 184 : gcc_assert (frame);
6453 184 : return &frame->get_function ();
6454 : }
6455 :
6456 : /* Custom region_model_context for the assignment to the result
6457 : at a call statement when popping a frame (PR analyzer/106203). */
6458 :
6459 : class caller_context : public region_model_context_decorator
6460 : {
6461 : public:
6462 4135 : caller_context (region_model_context *inner,
6463 : const gcall *call_stmt,
6464 : const frame_region &caller_frame)
6465 4135 : : region_model_context_decorator (inner),
6466 4135 : m_call_stmt (call_stmt),
6467 4135 : m_caller_frame (caller_frame)
6468 : {}
6469 :
6470 : pending_location
6471 9 : get_pending_location_for_diag () const override
6472 : {
6473 9 : pending_location ploc
6474 9 : = region_model_context_decorator::get_pending_location_for_diag ();
6475 :
6476 9 : ploc.m_event_loc_info
6477 9 : = event_loc_info (m_call_stmt->location,
6478 9 : m_caller_frame.get_fndecl (),
6479 9 : m_caller_frame.get_stack_depth ());
6480 :
6481 9 : return ploc;
6482 : }
6483 :
6484 8279 : const gimple *get_stmt () const override
6485 : {
6486 8279 : return m_call_stmt;
6487 : };
6488 :
6489 : private:
6490 : const gcall *m_call_stmt;
6491 : const frame_region &m_caller_frame;
6492 : };
6493 :
6494 :
6495 : /* Pop the topmost frame_region from this region_model's stack;
6496 :
6497 : If RESULT_LVALUE is non-null, copy any return value from the frame
6498 : into the corresponding region (evaluated with respect to the *caller*
6499 : frame, rather than the called frame).
6500 : If OUT_RESULT is non-null, copy any return value from the frame
6501 : into *OUT_RESULT.
6502 :
6503 : If non-null, use CALL_STMT as the location when complaining about
6504 : assignment of the return value to RESULT_LVALUE.
6505 :
6506 : If EVAL_RETURN_SVALUE is false, then don't evaluate the return value.
6507 : This is for use when unwinding frames e.g. due to longjmp, to suppress
6508 : erroneously reporting uninitialized return values.
6509 :
6510 : Purge the frame region and all its descendent regions.
6511 : Convert any pointers that point into such regions into
6512 : poison_kind::popped_stack svalues. */
6513 :
6514 : void
6515 26068 : region_model::pop_frame (tree result_lvalue,
6516 : const svalue **out_result,
6517 : region_model_context *ctxt,
6518 : const gcall *call_stmt,
6519 : bool eval_return_svalue)
6520 : {
6521 26068 : gcc_assert (m_current_frame);
6522 :
6523 26068 : const region_model pre_popped_model = *this;
6524 26068 : const frame_region *frame_reg = m_current_frame;
6525 :
6526 : /* Notify state machines. */
6527 26068 : if (ctxt)
6528 23818 : ctxt->on_pop_frame (frame_reg);
6529 :
6530 : /* Evaluate the result, within the callee frame. */
6531 26068 : tree fndecl = m_current_frame->get_function ().decl;
6532 26068 : tree result = DECL_RESULT (fndecl);
6533 26068 : const svalue *retval = nullptr;
6534 26068 : if (result
6535 26060 : && TREE_TYPE (result) != void_type_node
6536 37998 : && eval_return_svalue)
6537 : {
6538 9706 : retval = get_rvalue (result, ctxt);
6539 9706 : if (out_result)
6540 5211 : *out_result = retval;
6541 : }
6542 :
6543 : /* Pop the frame. */
6544 26068 : m_current_frame = m_current_frame->get_calling_frame ();
6545 :
6546 26068 : if (result_lvalue
6547 26068 : && retval
6548 : /* Don't write back for DECL_BY_REFERENCE; the writes
6549 : should have happened within the callee already. */
6550 26068 : && !DECL_BY_REFERENCE (result))
6551 : {
6552 4135 : gcc_assert (eval_return_svalue);
6553 :
6554 : /* Compute result_dst_reg using RESULT_LVALUE *after* popping
6555 : the frame, but before poisoning pointers into the old frame. */
6556 4135 : const region *result_dst_reg = get_lvalue (result_lvalue, ctxt);
6557 :
6558 : /* Assign retval to result_dst_reg, using caller_context
6559 : to set the call_stmt and the popped_frame for any diagnostics
6560 : due to the assignment. */
6561 4135 : gcc_assert (m_current_frame);
6562 4135 : caller_context caller_ctxt (ctxt, call_stmt, *m_current_frame);
6563 4135 : set_value (result_dst_reg, retval, call_stmt ? &caller_ctxt : ctxt);
6564 : }
6565 :
6566 26068 : unbind_region_and_descendents (frame_reg,poison_kind::popped_stack);
6567 :
6568 26068 : if (auto chan = g->get_channels ().analyzer_events_channel.get_if_active ())
6569 : {
6570 236 : gcc::topics::analyzer_events::on_frame_popped msg
6571 236 : {this, &pre_popped_model, retval, ctxt};
6572 236 : chan->publish (msg);
6573 : }
6574 26068 : }
6575 :
6576 : /* Get the number of frames in this region_model's stack. */
6577 :
6578 : int
6579 5413742 : region_model::get_stack_depth () const
6580 : {
6581 5413742 : const frame_region *frame = get_current_frame ();
6582 5413742 : if (frame)
6583 5396854 : return frame->get_stack_depth ();
6584 : else
6585 : return 0;
6586 : }
6587 :
6588 : /* Get the frame_region with the given index within the stack.
6589 : The frame_region must exist. */
6590 :
6591 : const frame_region *
6592 1613082 : region_model::get_frame_at_index (int index) const
6593 : {
6594 1613082 : const frame_region *frame = get_current_frame ();
6595 1613082 : gcc_assert (frame);
6596 1613082 : gcc_assert (index >= 0);
6597 1613082 : gcc_assert (index <= frame->get_index ());
6598 1838931 : while (index != frame->get_index ())
6599 : {
6600 225849 : frame = frame->get_calling_frame ();
6601 225849 : gcc_assert (frame);
6602 : }
6603 1613082 : return frame;
6604 : }
6605 :
6606 : /* Unbind svalues for any regions in REG and below.
6607 : Find any pointers to such regions; convert them to
6608 : poisoned values of kind PKIND.
6609 : Also purge any dynamic extents. */
6610 :
6611 : void
6612 36668 : region_model::unbind_region_and_descendents (const region *reg,
6613 : enum poison_kind pkind)
6614 : {
6615 : /* Gather a set of base regions to be unbound. */
6616 36668 : hash_set<const region *> base_regs;
6617 205722 : for (store::cluster_map_t::iterator iter = m_store.begin ();
6618 374776 : iter != m_store.end (); ++iter)
6619 : {
6620 169054 : const region *iter_base_reg = (*iter).first;
6621 169054 : if (iter_base_reg->descendent_of_p (reg))
6622 34994 : base_regs.add (iter_base_reg);
6623 : }
6624 71662 : for (hash_set<const region *>::iterator iter = base_regs.begin ();
6625 106656 : iter != base_regs.end (); ++iter)
6626 34994 : m_store.purge_cluster (*iter);
6627 :
6628 : /* Find any pointers to REG or its descendents; convert to poisoned. */
6629 36668 : poison_any_pointers_to_descendents (reg, pkind);
6630 :
6631 : /* Purge dynamic extents of any base regions in REG and below
6632 : (e.g. VLAs and alloca stack regions). */
6633 110190 : for (auto iter : m_dynamic_extents)
6634 : {
6635 18427 : const region *iter_reg = iter.first;
6636 18427 : if (iter_reg->descendent_of_p (reg))
6637 6013 : unset_dynamic_extents (iter_reg);
6638 : }
6639 36668 : }
6640 :
6641 : /* Find any pointers to REG or its descendents; convert them to
6642 : poisoned values of kind PKIND. */
6643 :
6644 : void
6645 36668 : region_model::poison_any_pointers_to_descendents (const region *reg,
6646 : enum poison_kind pkind)
6647 : {
6648 304788 : for (const auto &cluster_iter : m_store)
6649 : {
6650 134060 : binding_cluster *cluster = cluster_iter.second;
6651 134060 : for (auto iter = cluster->begin ();
6652 269586 : iter != cluster->end ();
6653 135526 : ++iter)
6654 : {
6655 135526 : auto bp = *iter;
6656 135526 : const svalue *sval = bp.m_sval;
6657 135526 : if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
6658 : {
6659 31061 : const region *ptr_dst = ptr_sval->get_pointee ();
6660 : /* Poison ptrs to descendents of REG, but not to REG itself,
6661 : otherwise double-free detection doesn't work (since sm-state
6662 : for "free" is stored on the original ptr svalue). */
6663 31061 : if (ptr_dst->descendent_of_p (reg)
6664 31061 : && ptr_dst != reg)
6665 : {
6666 133 : const svalue *new_sval
6667 133 : = m_mgr->get_or_create_poisoned_svalue (pkind,
6668 : sval->get_type ());
6669 133 : cluster->get_map ().overwrite (iter, new_sval);
6670 : }
6671 : }
6672 : }
6673 : }
6674 36668 : }
6675 :
6676 : /* Attempt to merge THIS with OTHER_MODEL, writing the result
6677 : to OUT_MODEL. Use POINT to distinguish values created as a
6678 : result of merging. */
6679 :
6680 : bool
6681 148044 : region_model::can_merge_with_p (const region_model &other_model,
6682 : const program_point &point,
6683 : region_model *out_model,
6684 : const extrinsic_state *ext_state,
6685 : const program_state *state_a,
6686 : const program_state *state_b) const
6687 : {
6688 148044 : gcc_assert (out_model);
6689 148044 : gcc_assert (m_mgr == other_model.m_mgr);
6690 148044 : gcc_assert (m_mgr == out_model->m_mgr);
6691 :
6692 148044 : if (m_current_frame != other_model.m_current_frame)
6693 : return false;
6694 148044 : out_model->m_current_frame = m_current_frame;
6695 :
6696 148044 : model_merger m (this, &other_model, point, out_model,
6697 148044 : ext_state, state_a, state_b);
6698 :
6699 148044 : if (!store::can_merge_p (&m_store, &other_model.m_store,
6700 148044 : &out_model->m_store, m_mgr->get_store_manager (),
6701 : &m))
6702 : return false;
6703 :
6704 42241 : if (!m_dynamic_extents.can_merge_with_p (other_model.m_dynamic_extents,
6705 : &out_model->m_dynamic_extents))
6706 : return false;
6707 :
6708 : /* Merge constraints. */
6709 40261 : constraint_manager::merge (*m_constraints,
6710 40261 : *other_model.m_constraints,
6711 : out_model->m_constraints);
6712 :
6713 40989 : for (auto iter : m.m_svals_changing_meaning)
6714 728 : out_model->m_constraints->purge_state_involving (iter);
6715 :
6716 40261 : if (m_thrown_exceptions_stack != other_model.m_thrown_exceptions_stack)
6717 : return false;
6718 40232 : out_model->m_thrown_exceptions_stack = m_thrown_exceptions_stack;
6719 :
6720 40232 : if (m_caught_exceptions_stack != other_model.m_caught_exceptions_stack)
6721 : return false;
6722 40232 : out_model->m_caught_exceptions_stack = m_caught_exceptions_stack;
6723 :
6724 40232 : return true;
6725 148044 : }
6726 :
6727 : /* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
6728 : otherwise. */
6729 :
6730 : tree
6731 925819 : region_model::get_fndecl_for_call (const gcall &call,
6732 : region_model_context *ctxt)
6733 : {
6734 925819 : tree fn_ptr = gimple_call_fn (&call);
6735 925819 : if (fn_ptr == NULL_TREE)
6736 : return NULL_TREE;
6737 883929 : const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
6738 1767858 : if (const region_svalue *fn_ptr_ptr
6739 883929 : = fn_ptr_sval->dyn_cast_region_svalue ())
6740 : {
6741 877921 : const region *reg = fn_ptr_ptr->get_pointee ();
6742 877921 : if (const function_region *fn_reg = reg->dyn_cast_function_region ())
6743 : {
6744 877865 : tree fn_decl = fn_reg->get_fndecl ();
6745 877865 : cgraph_node *node = cgraph_node::get (fn_decl);
6746 877865 : if (!node)
6747 : return NULL_TREE;
6748 877865 : const cgraph_node *ultimate_node = node->ultimate_alias_target ();
6749 877865 : if (ultimate_node)
6750 877865 : return ultimate_node->decl;
6751 : }
6752 : }
6753 :
6754 : return NULL_TREE;
6755 : }
6756 :
6757 : /* Would be much simpler to use a lambda here, if it were supported. */
6758 :
6759 : struct append_regions_cb_data
6760 : {
6761 : const region_model *model;
6762 : auto_vec<const decl_region *> *out;
6763 : };
6764 :
6765 : /* Populate *OUT with all decl_regions in the current
6766 : frame that have clusters within the store. */
6767 :
6768 : void
6769 393678 : region_model::
6770 : get_regions_for_current_frame (auto_vec<const decl_region *> *out) const
6771 : {
6772 393678 : append_regions_cb_data data;
6773 393678 : data.model = this;
6774 393678 : data.out = out;
6775 393678 : m_store.for_each_cluster (append_regions_cb, &data);
6776 393678 : }
6777 :
6778 : /* Implementation detail of get_regions_for_current_frame. */
6779 :
6780 : void
6781 3096853 : region_model::append_regions_cb (const region *base_reg,
6782 : append_regions_cb_data *cb_data)
6783 : {
6784 3096853 : if (base_reg->get_parent_region () != cb_data->model->m_current_frame)
6785 : return;
6786 1759956 : if (const decl_region *decl_reg = base_reg->dyn_cast_decl_region ())
6787 1741623 : cb_data->out->safe_push (decl_reg);
6788 : }
6789 :
6790 :
6791 : /* Abstract class for diagnostics related to the use of
6792 : floating-point arithmetic where precision is needed. */
6793 :
6794 25 : class imprecise_floating_point_arithmetic : public pending_diagnostic
6795 : {
6796 : public:
6797 50 : int get_controlling_option () const final override
6798 : {
6799 50 : return OPT_Wanalyzer_imprecise_fp_arithmetic;
6800 : }
6801 : };
6802 :
6803 : /* Concrete diagnostic to complain about uses of floating-point arithmetic
6804 : in the size argument of malloc etc. */
6805 :
6806 : class float_as_size_arg : public imprecise_floating_point_arithmetic
6807 : {
6808 : public:
6809 25 : float_as_size_arg (tree arg) : m_arg (arg)
6810 : {}
6811 :
6812 305 : const char *get_kind () const final override
6813 : {
6814 305 : return "float_as_size_arg_diagnostic";
6815 : }
6816 :
6817 25 : bool subclass_equal_p (const pending_diagnostic &other) const final override
6818 : {
6819 25 : return same_tree_p (m_arg, ((const float_as_size_arg &) other).m_arg);
6820 : }
6821 :
6822 25 : bool emit (diagnostic_emission_context &ctxt) final override
6823 : {
6824 25 : bool warned = ctxt.warn ("use of floating-point arithmetic here might"
6825 : " yield unexpected results");
6826 25 : if (warned)
6827 25 : inform (ctxt.get_location (),
6828 : "only use operands of an integer type"
6829 : " inside the size argument");
6830 25 : return warned;
6831 : }
6832 :
6833 : bool
6834 50 : describe_final_event (pretty_printer &pp,
6835 : const evdesc::final_event &) final override
6836 : {
6837 50 : if (m_arg)
6838 50 : pp_printf (&pp,
6839 : "operand %qE is of type %qT",
6840 50 : m_arg, TREE_TYPE (m_arg));
6841 : else
6842 0 : pp_printf (&pp,
6843 : "at least one operand of the size argument is"
6844 : " of a floating-point type");
6845 50 : return true;
6846 : }
6847 :
6848 : private:
6849 : tree m_arg;
6850 : };
6851 :
6852 : /* Visitor to find uses of floating-point variables/constants in an svalue. */
6853 :
6854 : class contains_floating_point_visitor : public visitor
6855 : {
6856 : public:
6857 7818 : contains_floating_point_visitor (const svalue *root_sval) : m_result (nullptr)
6858 : {
6859 7818 : root_sval->accept (this);
6860 : }
6861 :
6862 7818 : const svalue *get_svalue_to_report ()
6863 : {
6864 7818 : return m_result;
6865 : }
6866 :
6867 7600 : void visit_constant_svalue (const constant_svalue *sval) final override
6868 : {
6869 : /* At the point the analyzer runs, constant integer operands in a floating
6870 : point expression are already implictly converted to floating-points.
6871 : Thus, we do prefer to report non-constants such that the diagnostic
6872 : always reports a floating-point operand. */
6873 7600 : tree type = sval->get_type ();
6874 7600 : if (type && FLOAT_TYPE_P (type) && !m_result)
6875 9 : m_result = sval;
6876 7600 : }
6877 :
6878 496 : void visit_conjured_svalue (const conjured_svalue *sval) final override
6879 : {
6880 496 : tree type = sval->get_type ();
6881 496 : if (type && FLOAT_TYPE_P (type))
6882 0 : m_result = sval;
6883 496 : }
6884 :
6885 949 : void visit_initial_svalue (const initial_svalue *sval) final override
6886 : {
6887 949 : tree type = sval->get_type ();
6888 949 : if (type && FLOAT_TYPE_P (type))
6889 16 : m_result = sval;
6890 949 : }
6891 :
6892 : private:
6893 : /* Non-null if at least one floating-point operand was found. */
6894 : const svalue *m_result;
6895 : };
6896 :
6897 : /* May complain about uses of floating-point operands in SIZE_IN_BYTES. */
6898 :
6899 : void
6900 7818 : region_model::check_dynamic_size_for_floats (const svalue *size_in_bytes,
6901 : region_model_context *ctxt) const
6902 : {
6903 7818 : gcc_assert (ctxt);
6904 :
6905 7818 : contains_floating_point_visitor v (size_in_bytes);
6906 7818 : if (const svalue *float_sval = v.get_svalue_to_report ())
6907 : {
6908 25 : tree diag_arg = get_representative_tree (float_sval);
6909 25 : ctxt->warn (std::make_unique<float_as_size_arg> (diag_arg));
6910 : }
6911 7818 : }
6912 :
6913 : /* Return a region describing a heap-allocated block of memory.
6914 : Use CTXT to complain about tainted sizes.
6915 :
6916 : Reuse an existing heap_allocated_region if it's not being referenced by
6917 : this region_model; otherwise create a new one.
6918 :
6919 : Optionally (update_state_machine) transitions the pointer pointing to the
6920 : heap_allocated_region from start to assumed non-null. */
6921 :
6922 : const region *
6923 17376 : region_model::get_or_create_region_for_heap_alloc (const svalue *size_in_bytes,
6924 : region_model_context *ctxt,
6925 : bool update_state_machine,
6926 : const call_details *cd)
6927 : {
6928 : /* Determine which regions are referenced in this region_model, so that
6929 : we can reuse an existing heap_allocated_region if it's not in use on
6930 : this path. */
6931 17376 : auto_bitmap base_regs_in_use;
6932 17376 : get_referenced_base_regions (base_regs_in_use);
6933 :
6934 : /* Don't reuse regions that are marked as TOUCHED. */
6935 105783 : for (store::cluster_map_t::iterator iter = m_store.begin ();
6936 194190 : iter != m_store.end (); ++iter)
6937 88407 : if ((*iter).second->touched_p ())
6938 : {
6939 9346 : const region *base_reg = (*iter).first;
6940 9346 : bitmap_set_bit (base_regs_in_use, base_reg->get_id ());
6941 : }
6942 :
6943 17376 : const region *reg
6944 17376 : = m_mgr->get_or_create_region_for_heap_alloc (base_regs_in_use);
6945 17376 : if (size_in_bytes)
6946 11895 : if (compat_types_p (size_in_bytes->get_type (), size_type_node))
6947 11895 : set_dynamic_extents (reg, size_in_bytes, ctxt);
6948 :
6949 17376 : if (update_state_machine && cd)
6950 : {
6951 0 : const svalue *ptr_sval
6952 0 : = m_mgr->get_ptr_svalue (cd->get_lhs_type (), reg);
6953 0 : transition_ptr_sval_non_null (ctxt, ptr_sval);
6954 : }
6955 :
6956 17376 : return reg;
6957 17376 : }
6958 :
6959 : /* Populate OUT_IDS with the set of IDs of those base regions which are
6960 : reachable in this region_model. */
6961 :
6962 : void
6963 19279 : region_model::get_referenced_base_regions (auto_bitmap &out_ids) const
6964 : {
6965 19279 : reachable_regions reachable_regs (const_cast<region_model *> (this));
6966 19279 : m_store.for_each_cluster (reachable_regions::init_cluster_cb,
6967 : &reachable_regs);
6968 : /* Get regions for locals that have explicitly bound values. */
6969 138100 : for (store::cluster_map_t::iterator iter = m_store.begin ();
6970 256921 : iter != m_store.end (); ++iter)
6971 : {
6972 118821 : const region *base_reg = (*iter).first;
6973 118821 : if (const region *parent = base_reg->get_parent_region ())
6974 118821 : if (parent->get_kind () == RK_FRAME)
6975 74055 : reachable_regs.add (base_reg, false);
6976 : }
6977 :
6978 19283 : for (auto &eh_node : m_thrown_exceptions_stack)
6979 4 : eh_node.add_to_reachable_regions (reachable_regs);
6980 19339 : for (auto &eh_node : m_caught_exceptions_stack)
6981 60 : eh_node.add_to_reachable_regions (reachable_regs);
6982 :
6983 :
6984 19279 : bitmap_clear (out_ids);
6985 141744 : for (auto iter_reg : reachable_regs)
6986 122465 : bitmap_set_bit (out_ids, iter_reg->get_id ());
6987 19279 : }
6988 :
6989 : /* Return a new region describing a block of memory allocated within the
6990 : current frame.
6991 : Use CTXT to complain about tainted sizes. */
6992 :
6993 : const region *
6994 426 : region_model::create_region_for_alloca (const svalue *size_in_bytes,
6995 : region_model_context *ctxt)
6996 : {
6997 426 : const region *reg = m_mgr->create_region_for_alloca (m_current_frame);
6998 426 : if (compat_types_p (size_in_bytes->get_type (), size_type_node))
6999 425 : set_dynamic_extents (reg, size_in_bytes, ctxt);
7000 426 : return reg;
7001 : }
7002 :
7003 : /* Record that the size of REG is SIZE_IN_BYTES.
7004 : Use CTXT to complain about tainted sizes. */
7005 :
7006 : void
7007 12778 : region_model::set_dynamic_extents (const region *reg,
7008 : const svalue *size_in_bytes,
7009 : region_model_context *ctxt)
7010 : {
7011 12778 : assert_compat_types (size_in_bytes->get_type (), size_type_node);
7012 12778 : if (ctxt)
7013 : {
7014 7818 : check_dynamic_size_for_taint (reg->get_memory_space (), size_in_bytes,
7015 : ctxt);
7016 7818 : check_dynamic_size_for_floats (size_in_bytes, ctxt);
7017 : }
7018 12778 : m_dynamic_extents.put (reg, size_in_bytes);
7019 12778 : }
7020 :
7021 : /* Get the recording of REG in bytes, or nullptr if no dynamic size was
7022 : recorded. */
7023 :
7024 : const svalue *
7025 61780 : region_model::get_dynamic_extents (const region *reg) const
7026 : {
7027 61780 : if (const svalue * const *slot = m_dynamic_extents.get (reg))
7028 12838 : return *slot;
7029 : return nullptr;
7030 : }
7031 :
7032 : /* Unset any recorded dynamic size of REG. */
7033 :
7034 : void
7035 49725 : region_model::unset_dynamic_extents (const region *reg)
7036 : {
7037 49725 : m_dynamic_extents.remove (reg);
7038 49725 : }
7039 :
7040 : /* A subclass of pending_diagnostic for complaining about uninitialized data
7041 : being copied across a trust boundary to an untrusted output
7042 : (e.g. copy_to_user infoleaks in the Linux kernel). */
7043 :
7044 : class exposure_through_uninit_copy
7045 : : public pending_diagnostic_subclass<exposure_through_uninit_copy>
7046 : {
7047 : public:
7048 25 : exposure_through_uninit_copy (const region *src_region,
7049 : const region *dest_region,
7050 : const svalue *copied_sval)
7051 25 : : m_src_region (src_region),
7052 25 : m_dest_region (dest_region),
7053 25 : m_copied_sval (copied_sval)
7054 : {
7055 25 : gcc_assert (m_copied_sval->get_kind () == SK_POISONED
7056 : || m_copied_sval->get_kind () == SK_COMPOUND);
7057 25 : }
7058 :
7059 294 : const char *get_kind () const final override
7060 : {
7061 294 : return "exposure_through_uninit_copy";
7062 : }
7063 :
7064 25 : bool operator== (const exposure_through_uninit_copy &other) const
7065 : {
7066 25 : return (m_src_region == other.m_src_region
7067 25 : && m_dest_region == other.m_dest_region
7068 50 : && m_copied_sval == other.m_copied_sval);
7069 : }
7070 :
7071 50 : int get_controlling_option () const final override
7072 : {
7073 50 : return OPT_Wanalyzer_exposure_through_uninit_copy;
7074 : }
7075 :
7076 25 : bool emit (diagnostic_emission_context &ctxt) final override
7077 : {
7078 : /* CWE-200: Exposure of Sensitive Information to an Unauthorized Actor. */
7079 25 : ctxt.add_cwe (200);
7080 50 : enum memory_space mem_space = get_src_memory_space ();
7081 25 : bool warned;
7082 25 : switch (mem_space)
7083 : {
7084 0 : default:
7085 0 : warned = ctxt.warn ("potential exposure of sensitive information"
7086 : " by copying uninitialized data"
7087 : " across trust boundary");
7088 0 : break;
7089 25 : case MEMSPACE_STACK:
7090 25 : warned = ctxt.warn ("potential exposure of sensitive information"
7091 : " by copying uninitialized data from stack"
7092 : " across trust boundary");
7093 25 : break;
7094 0 : case MEMSPACE_HEAP:
7095 0 : warned = ctxt.warn ("potential exposure of sensitive information"
7096 : " by copying uninitialized data from heap"
7097 : " across trust boundary");
7098 0 : break;
7099 : }
7100 25 : if (warned)
7101 : {
7102 25 : const location_t loc = ctxt.get_location ();
7103 25 : inform_number_of_uninit_bits (loc);
7104 25 : complain_about_uninit_ranges (loc);
7105 :
7106 25 : if (mem_space == MEMSPACE_STACK)
7107 25 : maybe_emit_fixit_hint ();
7108 : }
7109 25 : return warned;
7110 : }
7111 :
7112 : bool
7113 50 : describe_final_event (pretty_printer &pp,
7114 : const evdesc::final_event &) final override
7115 : {
7116 100 : enum memory_space mem_space = get_src_memory_space ();
7117 50 : switch (mem_space)
7118 : {
7119 0 : default:
7120 0 : pp_string (&pp, "uninitialized data copied here");
7121 0 : return true;
7122 :
7123 50 : case MEMSPACE_STACK:
7124 50 : pp_string (&pp, "uninitialized data copied from stack here");
7125 50 : return true;
7126 :
7127 0 : case MEMSPACE_HEAP:
7128 0 : pp_string (&pp, "uninitialized data copied from heap here");
7129 0 : return true;
7130 : }
7131 : }
7132 :
7133 25 : void mark_interesting_stuff (interesting_t *interest) final override
7134 : {
7135 25 : if (m_src_region)
7136 25 : interest->add_region_creation (m_src_region);
7137 25 : }
7138 :
7139 : void
7140 0 : maybe_add_sarif_properties (diagnostics::sarif_object &result_obj)
7141 : const final override
7142 : {
7143 0 : auto &props = result_obj.get_or_create_properties ();
7144 : #define PROPERTY_PREFIX "gcc/-Wanalyzer-exposure-through-uninit-copy/"
7145 0 : props.set (PROPERTY_PREFIX "src_region", m_src_region->to_json ());
7146 0 : props.set (PROPERTY_PREFIX "dest_region", m_dest_region->to_json ());
7147 0 : props.set (PROPERTY_PREFIX "copied_sval", m_copied_sval->to_json ());
7148 : #undef PROPERTY_PREFIX
7149 0 : }
7150 :
7151 : private:
7152 75 : enum memory_space get_src_memory_space () const
7153 : {
7154 75 : return m_src_region ? m_src_region->get_memory_space () : MEMSPACE_UNKNOWN;
7155 : }
7156 :
7157 25 : bit_size_t calc_num_uninit_bits () const
7158 : {
7159 25 : switch (m_copied_sval->get_kind ())
7160 : {
7161 0 : default:
7162 0 : gcc_unreachable ();
7163 4 : break;
7164 4 : case SK_POISONED:
7165 4 : {
7166 4 : const poisoned_svalue *poisoned_sval
7167 4 : = as_a <const poisoned_svalue *> (m_copied_sval);
7168 4 : gcc_assert (poisoned_sval->get_poison_kind () == poison_kind::uninit);
7169 :
7170 : /* Give up if don't have type information. */
7171 4 : if (m_copied_sval->get_type () == NULL_TREE)
7172 0 : return 0;
7173 :
7174 4 : bit_size_t size_in_bits;
7175 4 : if (int_size_in_bits (m_copied_sval->get_type (), &size_in_bits))
7176 4 : return size_in_bits;
7177 :
7178 : /* Give up if we can't get the size of the type. */
7179 0 : return 0;
7180 : }
7181 21 : break;
7182 21 : case SK_COMPOUND:
7183 21 : {
7184 21 : const compound_svalue *compound_sval
7185 21 : = as_a <const compound_svalue *> (m_copied_sval);
7186 21 : bit_size_t result = 0;
7187 : /* Find keys for uninit svals. */
7188 82 : for (auto iter : compound_sval->get_map ().get_concrete_bindings ())
7189 : {
7190 61 : const svalue *sval = iter.second;
7191 122 : if (const poisoned_svalue *psval
7192 61 : = sval->dyn_cast_poisoned_svalue ())
7193 24 : if (psval->get_poison_kind () == poison_kind::uninit)
7194 : {
7195 24 : const bit_range &bits = iter.first;
7196 24 : result += bits.m_size_in_bits;
7197 : }
7198 : }
7199 21 : return result;
7200 : }
7201 : }
7202 : }
7203 :
7204 25 : void inform_number_of_uninit_bits (location_t loc) const
7205 : {
7206 25 : bit_size_t num_uninit_bits = calc_num_uninit_bits ();
7207 25 : if (num_uninit_bits <= 0)
7208 0 : return;
7209 25 : if (num_uninit_bits % BITS_PER_UNIT == 0)
7210 : {
7211 : /* Express in bytes. */
7212 25 : byte_size_t num_uninit_bytes = num_uninit_bits / BITS_PER_UNIT;
7213 25 : if (num_uninit_bytes == 1)
7214 3 : inform (loc, "1 byte is uninitialized");
7215 : else
7216 22 : inform (loc,
7217 : "%wu bytes are uninitialized", num_uninit_bytes.to_uhwi ());
7218 : }
7219 : else
7220 : {
7221 : /* Express in bits. */
7222 0 : if (num_uninit_bits == 1)
7223 0 : inform (loc, "1 bit is uninitialized");
7224 : else
7225 0 : inform (loc,
7226 : "%wu bits are uninitialized", num_uninit_bits.to_uhwi ());
7227 : }
7228 : }
7229 :
7230 25 : void complain_about_uninit_ranges (location_t loc) const
7231 : {
7232 50 : if (const compound_svalue *compound_sval
7233 25 : = m_copied_sval->dyn_cast_compound_svalue ())
7234 : {
7235 : /* Find keys for uninit svals. */
7236 21 : auto_vec<bit_range> uninit_bit_ranges;
7237 82 : for (auto iter : compound_sval->get_map ().get_concrete_bindings ())
7238 : {
7239 61 : const svalue *sval = iter.second;
7240 122 : if (const poisoned_svalue *psval
7241 61 : = sval->dyn_cast_poisoned_svalue ())
7242 24 : if (psval->get_poison_kind () == poison_kind::uninit)
7243 24 : uninit_bit_ranges.safe_push (iter.first);
7244 : }
7245 :
7246 21 : std::unique_ptr<record_layout> layout;
7247 :
7248 21 : tree type = m_copied_sval->get_type ();
7249 21 : if (type && TREE_CODE (type) == RECORD_TYPE)
7250 : {
7251 17 : layout = std::make_unique<record_layout> (type);
7252 :
7253 17 : if (0)
7254 : layout->dump ();
7255 : }
7256 :
7257 : unsigned i;
7258 : bit_range *bits;
7259 45 : FOR_EACH_VEC_ELT (uninit_bit_ranges, i, bits)
7260 : {
7261 24 : bit_offset_t start_bit = bits->get_start_bit_offset ();
7262 24 : bit_offset_t next_bit = bits->get_next_bit_offset ();
7263 24 : complain_about_uninit_range (loc, start_bit, next_bit,
7264 24 : layout.get ());
7265 : }
7266 21 : }
7267 25 : }
7268 :
7269 24 : void complain_about_uninit_range (location_t loc,
7270 : bit_offset_t start_bit,
7271 : bit_offset_t next_bit,
7272 : const record_layout *layout) const
7273 : {
7274 24 : if (layout)
7275 : {
7276 75 : while (start_bit < next_bit)
7277 : {
7278 165 : if (const record_layout::item *item
7279 55 : = layout->get_item_at (start_bit))
7280 : {
7281 55 : gcc_assert (start_bit >= item->get_start_bit_offset ());
7282 55 : gcc_assert (start_bit < item->get_next_bit_offset ());
7283 55 : if (item->get_start_bit_offset () == start_bit
7284 108 : && item->get_next_bit_offset () <= next_bit)
7285 53 : complain_about_fully_uninit_item (*item);
7286 : else
7287 2 : complain_about_partially_uninit_item (*item);
7288 55 : start_bit = item->get_next_bit_offset ();
7289 55 : continue;
7290 : }
7291 : else
7292 : break;
7293 : }
7294 : }
7295 :
7296 24 : if (start_bit >= next_bit)
7297 : return;
7298 :
7299 4 : if (start_bit % 8 == 0 && next_bit % 8 == 0)
7300 : {
7301 : /* Express in bytes. */
7302 4 : byte_offset_t start_byte = start_bit / 8;
7303 4 : byte_offset_t last_byte = (next_bit / 8) - 1;
7304 4 : if (last_byte == start_byte)
7305 0 : inform (loc,
7306 : "byte %wu is uninitialized",
7307 : start_byte.to_uhwi ());
7308 : else
7309 4 : inform (loc,
7310 : "bytes %wu - %wu are uninitialized",
7311 : start_byte.to_uhwi (),
7312 : last_byte.to_uhwi ());
7313 : }
7314 : else
7315 : {
7316 : /* Express in bits. */
7317 0 : bit_offset_t last_bit = next_bit - 1;
7318 0 : if (last_bit == start_bit)
7319 0 : inform (loc,
7320 : "bit %wu is uninitialized",
7321 : start_bit.to_uhwi ());
7322 : else
7323 0 : inform (loc,
7324 : "bits %wu - %wu are uninitialized",
7325 : start_bit.to_uhwi (),
7326 : last_bit.to_uhwi ());
7327 : }
7328 : }
7329 :
7330 : static void
7331 53 : complain_about_fully_uninit_item (const record_layout::item &item)
7332 : {
7333 53 : const_tree field = item.m_field;
7334 53 : bit_size_t num_bits = item.m_bit_range.m_size_in_bits;
7335 53 : if (item.m_is_padding)
7336 : {
7337 11 : if (num_bits % 8 == 0)
7338 : {
7339 : /* Express in bytes. */
7340 9 : byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
7341 9 : if (num_bytes == 1)
7342 2 : inform (DECL_SOURCE_LOCATION (field),
7343 : "padding after field %qD is uninitialized (1 byte)",
7344 : field);
7345 : else
7346 7 : inform (DECL_SOURCE_LOCATION (field),
7347 : "padding after field %qD is uninitialized (%wu bytes)",
7348 : field, num_bytes.to_uhwi ());
7349 : }
7350 : else
7351 : {
7352 : /* Express in bits. */
7353 2 : if (num_bits == 1)
7354 0 : inform (DECL_SOURCE_LOCATION (field),
7355 : "padding after field %qD is uninitialized (1 bit)",
7356 : field);
7357 : else
7358 2 : inform (DECL_SOURCE_LOCATION (field),
7359 : "padding after field %qD is uninitialized (%wu bits)",
7360 : field, num_bits.to_uhwi ());
7361 : }
7362 : }
7363 : else
7364 : {
7365 42 : if (num_bits % 8 == 0)
7366 : {
7367 : /* Express in bytes. */
7368 32 : byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
7369 32 : if (num_bytes == 1)
7370 1 : inform (DECL_SOURCE_LOCATION (field),
7371 : "field %qD is uninitialized (1 byte)", field);
7372 : else
7373 31 : inform (DECL_SOURCE_LOCATION (field),
7374 : "field %qD is uninitialized (%wu bytes)",
7375 : field, num_bytes.to_uhwi ());
7376 : }
7377 : else
7378 : {
7379 : /* Express in bits. */
7380 10 : if (num_bits == 1)
7381 9 : inform (DECL_SOURCE_LOCATION (field),
7382 : "field %qD is uninitialized (1 bit)", field);
7383 : else
7384 1 : inform (DECL_SOURCE_LOCATION (field),
7385 : "field %qD is uninitialized (%wu bits)",
7386 : field, num_bits.to_uhwi ());
7387 : }
7388 : }
7389 53 : }
7390 :
7391 : static void
7392 2 : complain_about_partially_uninit_item (const record_layout::item &item)
7393 : {
7394 2 : const_tree field = item.m_field;
7395 2 : if (item.m_is_padding)
7396 0 : inform (DECL_SOURCE_LOCATION (field),
7397 : "padding after field %qD is partially uninitialized",
7398 : field);
7399 : else
7400 2 : inform (DECL_SOURCE_LOCATION (field),
7401 : "field %qD is partially uninitialized",
7402 : field);
7403 : /* TODO: ideally we'd describe what parts are uninitialized. */
7404 2 : }
7405 :
7406 25 : void maybe_emit_fixit_hint () const
7407 : {
7408 25 : if (tree decl = m_src_region->maybe_get_decl ())
7409 : {
7410 25 : gcc_rich_location hint_richloc (DECL_SOURCE_LOCATION (decl));
7411 25 : hint_richloc.add_fixit_insert_after (" = {0}");
7412 25 : inform (&hint_richloc,
7413 : "suggest forcing zero-initialization by"
7414 : " providing a %<{0}%> initializer");
7415 25 : }
7416 25 : }
7417 :
7418 : private:
7419 : const region *m_src_region;
7420 : const region *m_dest_region;
7421 : const svalue *m_copied_sval;
7422 : };
7423 :
7424 : /* Return true if any part of SVAL is uninitialized. */
7425 :
7426 : static bool
7427 80 : contains_uninit_p (const svalue *sval)
7428 : {
7429 80 : switch (sval->get_kind ())
7430 : {
7431 : default:
7432 : return false;
7433 4 : case SK_POISONED:
7434 4 : {
7435 4 : const poisoned_svalue *psval
7436 4 : = as_a <const poisoned_svalue *> (sval);
7437 4 : return psval->get_poison_kind () == poison_kind::uninit;
7438 : }
7439 43 : case SK_COMPOUND:
7440 43 : {
7441 43 : const compound_svalue *compound_sval
7442 43 : = as_a <const compound_svalue *> (sval);
7443 :
7444 43 : for (auto iter = compound_sval->begin ();
7445 141 : iter != compound_sval->end (); ++iter)
7446 : {
7447 119 : const svalue *inner_sval = iter.get_svalue ();
7448 238 : if (const poisoned_svalue *psval
7449 119 : = inner_sval->dyn_cast_poisoned_svalue ())
7450 21 : if (psval->get_poison_kind () == poison_kind::uninit)
7451 21 : return true;
7452 : }
7453 :
7454 22 : return false;
7455 : }
7456 : }
7457 : }
7458 :
7459 : /* Function for use by plugins when simulating writing data through a
7460 : pointer to an "untrusted" region DST_REG (and thus crossing a security
7461 : boundary), such as copying data to user space in an OS kernel.
7462 :
7463 : Check that COPIED_SVAL is fully initialized. If not, complain about
7464 : an infoleak to CTXT.
7465 :
7466 : SRC_REG can be nullptr; if non-NULL it is used as a hint in the diagnostic
7467 : as to where COPIED_SVAL came from. */
7468 :
7469 : void
7470 80 : region_model::maybe_complain_about_infoleak (const region *dst_reg,
7471 : const svalue *copied_sval,
7472 : const region *src_reg,
7473 : region_model_context *ctxt)
7474 : {
7475 : /* Check for exposure. */
7476 80 : if (contains_uninit_p (copied_sval))
7477 25 : ctxt->warn
7478 25 : (std::make_unique<exposure_through_uninit_copy> (src_reg,
7479 : dst_reg,
7480 : copied_sval));
7481 80 : }
7482 :
7483 : /* Set errno to a positive symbolic int, as if some error has occurred. */
7484 :
7485 : void
7486 431 : region_model::set_errno (const call_details &cd)
7487 : {
7488 431 : const region *errno_reg = m_mgr->get_errno_region ();
7489 431 : conjured_purge p (this, cd.get_ctxt ());
7490 431 : const svalue *new_errno_sval
7491 431 : = m_mgr->get_or_create_conjured_svalue (integer_type_node,
7492 431 : &cd.get_call_stmt (),
7493 : errno_reg, p);
7494 431 : const svalue *zero
7495 431 : = m_mgr->get_or_create_int_cst (integer_type_node, 0);
7496 431 : add_constraint (new_errno_sval, GT_EXPR, zero, cd.get_ctxt ());
7497 431 : set_value (errno_reg, new_errno_sval, cd.get_ctxt ());
7498 431 : }
7499 :
7500 : // class region_model_context
7501 :
7502 : bool
7503 3951 : region_model_context::
7504 : warn (std::unique_ptr<pending_diagnostic> d,
7505 : std::unique_ptr<pending_location::fixer_for_epath> ploc_fixer)
7506 : {
7507 3951 : pending_location ploc (get_pending_location_for_diag ());
7508 3951 : ploc.m_fixer_for_epath = std::move (ploc_fixer);
7509 3951 : return warn_at (std::move (d), std::move (ploc));
7510 3951 : }
7511 :
7512 : /* class noop_region_model_context : public region_model_context. */
7513 :
7514 : void
7515 0 : noop_region_model_context::add_note (std::unique_ptr<pending_note>)
7516 : {
7517 0 : }
7518 :
7519 : void
7520 0 : noop_region_model_context::add_event (std::unique_ptr<checker_event>)
7521 : {
7522 0 : }
7523 :
7524 : void
7525 78 : noop_region_model_context::bifurcate (std::unique_ptr<custom_edge_info>)
7526 : {
7527 78 : }
7528 :
7529 : void
7530 0 : noop_region_model_context::terminate_path ()
7531 : {
7532 0 : }
7533 :
7534 : /* class region_model_context_decorator : public region_model_context. */
7535 :
7536 : void
7537 167 : region_model_context_decorator::add_event (std::unique_ptr<checker_event> event)
7538 : {
7539 167 : if (m_inner)
7540 167 : m_inner->add_event (std::move (event));
7541 167 : }
7542 :
7543 : /* struct model_merger. */
7544 :
7545 : /* Dump a multiline representation of this merger to PP. */
7546 :
7547 : void
7548 0 : model_merger::dump_to_pp (pretty_printer *pp, bool simple) const
7549 : {
7550 0 : pp_string (pp, "model A:");
7551 0 : pp_newline (pp);
7552 0 : m_model_a->dump_to_pp (pp, simple, true);
7553 0 : pp_newline (pp);
7554 :
7555 0 : pp_string (pp, "model B:");
7556 0 : pp_newline (pp);
7557 0 : m_model_b->dump_to_pp (pp, simple, true);
7558 0 : pp_newline (pp);
7559 :
7560 0 : pp_string (pp, "merged model:");
7561 0 : pp_newline (pp);
7562 0 : m_merged_model->dump_to_pp (pp, simple, true);
7563 0 : pp_newline (pp);
7564 0 : }
7565 :
7566 : /* Dump a multiline representation of this merger to FILE. */
7567 :
7568 : void
7569 0 : model_merger::dump (FILE *fp, bool simple) const
7570 : {
7571 0 : tree_dump_pretty_printer pp (fp);
7572 0 : dump_to_pp (&pp, simple);
7573 0 : }
7574 :
7575 : /* Dump a multiline representation of this merger to stderr. */
7576 :
7577 : DEBUG_FUNCTION void
7578 0 : model_merger::dump (bool simple) const
7579 : {
7580 0 : dump (stderr, simple);
7581 0 : }
7582 :
7583 : /* Return true if it's OK to merge SVAL with other svalues. */
7584 :
7585 : bool
7586 578125 : model_merger::mergeable_svalue_p (const svalue *sval) const
7587 : {
7588 578125 : if (m_ext_state)
7589 : {
7590 : /* Reject merging svalues that have non-purgable sm-state,
7591 : to avoid falsely reporting memory leaks by merging them
7592 : with something else. For example, given a local var "p",
7593 : reject the merger of a:
7594 : store_a mapping "p" to a malloc-ed ptr
7595 : with:
7596 : store_b mapping "p" to a NULL ptr. */
7597 578077 : if (m_state_a)
7598 578077 : if (!m_state_a->can_purge_p (*m_ext_state, sval))
7599 : return false;
7600 576133 : if (m_state_b)
7601 576133 : if (!m_state_b->can_purge_p (*m_ext_state, sval))
7602 : return false;
7603 : }
7604 : return true;
7605 : }
7606 :
7607 : /* Mark WIDENING_SVAL as changing meaning during the merge. */
7608 :
7609 : void
7610 843 : model_merger::on_widening_reuse (const widening_svalue *widening_sval)
7611 : {
7612 843 : m_svals_changing_meaning.add (widening_sval);
7613 843 : }
7614 :
7615 : } // namespace ana
7616 :
7617 : /* Dump RMODEL fully to stderr (i.e. without summarization). */
7618 :
7619 : DEBUG_FUNCTION void
7620 0 : debug (const region_model &rmodel)
7621 : {
7622 0 : rmodel.dump (false);
7623 0 : }
7624 :
7625 : /* class rejected_op_constraint : public rejected_constraint. */
7626 :
7627 : void
7628 4 : rejected_op_constraint::dump_to_pp (pretty_printer *pp) const
7629 : {
7630 4 : region_model m (m_model);
7631 4 : m_lhs->dump_to_pp (pp, true);
7632 4 : pp_printf (pp, " %s ", op_symbol_code (m_op));
7633 4 : m_rhs->dump_to_pp (pp, true);
7634 4 : }
7635 :
7636 : /* class rejected_default_case : public rejected_constraint. */
7637 :
7638 : void
7639 0 : rejected_default_case::dump_to_pp (pretty_printer *pp) const
7640 : {
7641 0 : pp_string (pp, "implicit default for enum");
7642 0 : }
7643 :
7644 : /* class rejected_ranges_constraint : public rejected_constraint. */
7645 :
7646 : void
7647 0 : rejected_ranges_constraint::dump_to_pp (pretty_printer *pp) const
7648 : {
7649 0 : region_model m (m_model);
7650 0 : const svalue *sval = m.get_rvalue (m_expr, nullptr);
7651 0 : sval->dump_to_pp (pp, true);
7652 0 : pp_string (pp, " in ");
7653 0 : m_ranges->dump_to_pp (pp, true);
7654 0 : }
7655 :
7656 : /* class engine. */
7657 :
7658 : /* engine's ctor. */
7659 :
7660 3416 : engine::engine (region_model_manager &mgr,
7661 3416 : const supergraph *sg)
7662 3416 : : m_mgr (mgr),
7663 3416 : m_sg (sg)
7664 : {
7665 3416 : }
7666 :
7667 : /* Dump the managed objects by class to LOGGER, and the per-class totals. */
7668 :
7669 : void
7670 5 : engine::log_stats (logger *logger) const
7671 : {
7672 5 : m_mgr.log_stats (logger, true);
7673 5 : }
7674 :
7675 : namespace ana {
7676 :
7677 : #if CHECKING_P
7678 :
7679 : namespace selftest {
7680 :
7681 : /* Build a constant tree of the given type from STR. */
7682 :
7683 : static tree
7684 64 : build_real_cst_from_string (tree type, const char *str)
7685 : {
7686 64 : REAL_VALUE_TYPE real;
7687 64 : real_from_string (&real, str);
7688 64 : return build_real (type, real);
7689 : }
7690 :
7691 : /* Append various "interesting" constants to OUT (e.g. NaN). */
7692 :
7693 : static void
7694 8 : append_interesting_constants (auto_vec<tree> *out)
7695 : {
7696 8 : out->safe_push (integer_zero_node);
7697 8 : out->safe_push (build_int_cst (integer_type_node, 42));
7698 8 : out->safe_push (build_int_cst (unsigned_type_node, 0));
7699 8 : out->safe_push (build_int_cst (unsigned_type_node, 42));
7700 8 : out->safe_push (build_real_cst_from_string (float_type_node, "QNaN"));
7701 8 : out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN"));
7702 8 : out->safe_push (build_real_cst_from_string (float_type_node, "SNaN"));
7703 8 : out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN"));
7704 8 : out->safe_push (build_real_cst_from_string (float_type_node, "0.0"));
7705 8 : out->safe_push (build_real_cst_from_string (float_type_node, "-0.0"));
7706 8 : out->safe_push (build_real_cst_from_string (float_type_node, "Inf"));
7707 8 : out->safe_push (build_real_cst_from_string (float_type_node, "-Inf"));
7708 8 : }
7709 :
7710 : /* Verify that tree_cmp is a well-behaved comparator for qsort, even
7711 : if the underlying constants aren't comparable. */
7712 :
7713 : static void
7714 4 : test_tree_cmp_on_constants ()
7715 : {
7716 4 : auto_vec<tree> csts;
7717 4 : append_interesting_constants (&csts);
7718 :
7719 : /* Try sorting every triple. */
7720 4 : const unsigned num = csts.length ();
7721 52 : for (unsigned i = 0; i < num; i++)
7722 624 : for (unsigned j = 0; j < num; j++)
7723 7488 : for (unsigned k = 0; k < num; k++)
7724 : {
7725 6912 : auto_vec<tree> v (3);
7726 6912 : v.quick_push (csts[i]);
7727 6912 : v.quick_push (csts[j]);
7728 6912 : v.quick_push (csts[k]);
7729 6912 : v.qsort (tree_cmp);
7730 6912 : }
7731 4 : }
7732 :
7733 : /* Implementation detail of the ASSERT_CONDITION_* macros. */
7734 :
7735 : void
7736 8 : assert_condition (const location &loc,
7737 : region_model &model,
7738 : const svalue *lhs, tree_code op, const svalue *rhs,
7739 : tristate expected)
7740 : {
7741 8 : tristate actual = model.eval_condition (lhs, op, rhs);
7742 8 : ASSERT_EQ_AT (loc, actual, expected);
7743 8 : }
7744 :
7745 : /* Implementation detail of the ASSERT_CONDITION_* macros. */
7746 :
7747 : void
7748 3084 : assert_condition (const location &loc,
7749 : region_model &model,
7750 : tree lhs, tree_code op, tree rhs,
7751 : tristate expected)
7752 : {
7753 3084 : tristate actual = model.eval_condition (lhs, op, rhs, nullptr);
7754 3084 : ASSERT_EQ_AT (loc, actual, expected);
7755 3084 : }
7756 :
7757 : /* Implementation detail of ASSERT_DUMP_TREE_EQ. */
7758 :
7759 : static void
7760 20 : assert_dump_tree_eq (const location &loc, tree t, const char *expected)
7761 : {
7762 20 : auto_fix_quotes sentinel;
7763 20 : pretty_printer pp;
7764 20 : pp_format_decoder (&pp) = default_tree_printer;
7765 20 : dump_tree (&pp, t);
7766 20 : ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
7767 20 : }
7768 :
7769 : /* Assert that dump_tree (T) is EXPECTED. */
7770 :
7771 : #define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \
7772 : SELFTEST_BEGIN_STMT \
7773 : assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \
7774 : SELFTEST_END_STMT
7775 :
7776 : /* Implementation detail of ASSERT_DUMP_EQ. */
7777 :
7778 : static void
7779 8 : assert_dump_eq (const location &loc,
7780 : const region_model &model,
7781 : bool summarize,
7782 : const char *expected)
7783 : {
7784 8 : auto_fix_quotes sentinel;
7785 8 : pretty_printer pp;
7786 8 : pp_format_decoder (&pp) = default_tree_printer;
7787 :
7788 8 : model.dump_to_pp (&pp, summarize, true);
7789 8 : ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
7790 8 : }
7791 :
7792 : /* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
7793 :
7794 : #define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
7795 : SELFTEST_BEGIN_STMT \
7796 : assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
7797 : SELFTEST_END_STMT
7798 :
7799 : /* Smoketest for region_model::dump_to_pp. */
7800 :
7801 : static void
7802 4 : test_dump ()
7803 : {
7804 4 : region_model_manager mgr;
7805 4 : region_model model (&mgr);
7806 :
7807 4 : ASSERT_DUMP_EQ (model, false,
7808 : "stack depth: 0\n"
7809 : "m_called_unknown_fn: FALSE\n"
7810 : "constraint_manager:\n"
7811 : " equiv classes:\n"
7812 : " constraints:\n");
7813 4 : ASSERT_DUMP_EQ (model, true,
7814 : "stack depth: 0\n"
7815 : "m_called_unknown_fn: FALSE\n"
7816 : "constraint_manager:\n"
7817 : " equiv classes:\n"
7818 : " constraints:\n");
7819 :
7820 4 : text_art::ascii_theme theme;
7821 4 : pretty_printer pp;
7822 4 : dump_to_pp (model, &theme, &pp);
7823 4 : ASSERT_STREQ ("Region Model\n"
7824 : "`- Store\n"
7825 : " `- m_called_unknown_fn: false\n",
7826 : pp_formatted_text (&pp));
7827 4 : }
7828 :
7829 : /* Helper function for selftests. Create a struct or union type named NAME,
7830 : with the fields given by the FIELD_DECLS in FIELDS.
7831 : If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
7832 : create a UNION_TYPE. */
7833 :
7834 : static tree
7835 16 : make_test_compound_type (const char *name, bool is_struct,
7836 : const auto_vec<tree> *fields)
7837 : {
7838 16 : tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE);
7839 16 : TYPE_NAME (t) = get_identifier (name);
7840 16 : TYPE_SIZE (t) = 0;
7841 :
7842 16 : tree fieldlist = NULL_TREE;
7843 16 : int i;
7844 16 : tree field;
7845 48 : FOR_EACH_VEC_ELT (*fields, i, field)
7846 : {
7847 32 : gcc_assert (TREE_CODE (field) == FIELD_DECL);
7848 32 : DECL_CONTEXT (field) = t;
7849 32 : fieldlist = chainon (field, fieldlist);
7850 : }
7851 16 : fieldlist = nreverse (fieldlist);
7852 16 : TYPE_FIELDS (t) = fieldlist;
7853 :
7854 16 : layout_type (t);
7855 16 : return t;
7856 : }
7857 :
7858 : /* Selftest fixture for creating the type "struct coord {int x; int y; };". */
7859 :
7860 : struct coord_test
7861 : {
7862 16 : coord_test ()
7863 16 : {
7864 16 : auto_vec<tree> fields;
7865 16 : m_x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
7866 : get_identifier ("x"), integer_type_node);
7867 16 : fields.safe_push (m_x_field);
7868 16 : m_y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
7869 : get_identifier ("y"), integer_type_node);
7870 16 : fields.safe_push (m_y_field);
7871 16 : m_coord_type = make_test_compound_type ("coord", true, &fields);
7872 16 : }
7873 :
7874 : tree m_x_field;
7875 : tree m_y_field;
7876 : tree m_coord_type;
7877 : };
7878 :
7879 : /* Verify usage of a struct. */
7880 :
7881 : static void
7882 4 : test_struct ()
7883 : {
7884 4 : coord_test ct;
7885 :
7886 4 : tree c = build_global_decl ("c", ct.m_coord_type);
7887 4 : tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
7888 : c, ct.m_x_field, NULL_TREE);
7889 4 : tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
7890 : c, ct.m_y_field, NULL_TREE);
7891 :
7892 4 : tree int_17 = build_int_cst (integer_type_node, 17);
7893 4 : tree int_m3 = build_int_cst (integer_type_node, -3);
7894 :
7895 4 : region_model_manager mgr;
7896 4 : region_model model (&mgr);
7897 : /* Set fields in order y, then x. */
7898 4 : model.set_value (c_y, int_m3, nullptr);
7899 4 : model.set_value (c_x, int_17, nullptr);
7900 :
7901 : /* Verify get_offset for "c.x". */
7902 4 : {
7903 4 : const region *c_x_reg = model.get_lvalue (c_x, nullptr);
7904 4 : region_offset offset = c_x_reg->get_offset (&mgr);
7905 4 : ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, nullptr));
7906 4 : ASSERT_EQ (offset.get_bit_offset (), 0);
7907 : }
7908 :
7909 : /* Verify get_offset for "c.y". */
7910 4 : {
7911 4 : const region *c_y_reg = model.get_lvalue (c_y, nullptr);
7912 4 : region_offset offset = c_y_reg->get_offset (&mgr);
7913 4 : ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, nullptr));
7914 4 : ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
7915 : }
7916 :
7917 : /* Check iteration order of binding_cluster (and thus of binding_map). */
7918 4 : {
7919 4 : std::vector<binding_map::binding_pair> vec;
7920 4 : auto cluster
7921 4 : = model.get_store ()->get_cluster (model.get_lvalue (c, nullptr));
7922 12 : for (auto iter : *cluster)
7923 8 : vec.push_back (iter);
7924 4 : ASSERT_EQ (vec.size (), 2);
7925 : /* we should get them back in ascending order in memory (x then y). */
7926 : /* x */
7927 4 : ASSERT_EQ (vec[0].m_key->dyn_cast_concrete_binding ()->get_bit_range (),
7928 : bit_range (0, INT_TYPE_SIZE));
7929 4 : ASSERT_TRUE (tree_int_cst_equal(vec[0].m_sval->maybe_get_constant (),
7930 : int_17));
7931 : /* y */
7932 4 : ASSERT_EQ (vec[1].m_key->dyn_cast_concrete_binding ()->get_bit_range (),
7933 : bit_range (INT_TYPE_SIZE, INT_TYPE_SIZE));
7934 4 : ASSERT_TRUE (tree_int_cst_equal(vec[1].m_sval->maybe_get_constant (),
7935 : int_m3));
7936 4 : }
7937 4 : }
7938 :
7939 : /* Verify usage of an array element. */
7940 :
7941 : static void
7942 4 : test_array_1 ()
7943 : {
7944 4 : tree tlen = size_int (10);
7945 4 : tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
7946 :
7947 4 : tree a = build_global_decl ("a", arr_type);
7948 :
7949 4 : region_model_manager mgr;
7950 4 : region_model model (&mgr);
7951 4 : tree int_0 = integer_zero_node;
7952 4 : tree a_0 = build4 (ARRAY_REF, char_type_node,
7953 : a, int_0, NULL_TREE, NULL_TREE);
7954 4 : tree char_A = build_int_cst (char_type_node, 'A');
7955 4 : model.set_value (a_0, char_A, nullptr);
7956 4 : }
7957 :
7958 : /* Verify that region_model::get_representative_tree works as expected. */
7959 :
7960 : static void
7961 4 : test_get_representative_tree ()
7962 : {
7963 4 : region_model_manager mgr;
7964 :
7965 : /* STRING_CST. */
7966 4 : {
7967 4 : tree string_cst = build_string (4, "foo");
7968 4 : region_model m (&mgr);
7969 4 : const svalue *str_sval = m.get_rvalue (string_cst, nullptr);
7970 4 : tree rep = m.get_representative_tree (str_sval);
7971 4 : ASSERT_EQ (rep, string_cst);
7972 4 : }
7973 :
7974 : /* String literal. */
7975 4 : {
7976 4 : tree string_cst_ptr = build_string_literal (4, "foo");
7977 4 : region_model m (&mgr);
7978 4 : const svalue *str_sval = m.get_rvalue (string_cst_ptr, nullptr);
7979 4 : tree rep = m.get_representative_tree (str_sval);
7980 4 : ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]");
7981 4 : }
7982 :
7983 : /* Value of an element within an array. */
7984 4 : {
7985 4 : tree tlen = size_int (10);
7986 4 : tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
7987 4 : tree a = build_global_decl ("a", arr_type);
7988 4 : placeholder_svalue test_sval (mgr.alloc_symbol_id (),
7989 4 : char_type_node, "test value");
7990 :
7991 : /* Value of a[3]. */
7992 4 : {
7993 4 : test_region_model_context ctxt;
7994 4 : region_model model (&mgr);
7995 4 : tree int_3 = build_int_cst (integer_type_node, 3);
7996 4 : tree a_3 = build4 (ARRAY_REF, char_type_node,
7997 : a, int_3, NULL_TREE, NULL_TREE);
7998 4 : const region *a_3_reg = model.get_lvalue (a_3, &ctxt);
7999 4 : model.set_value (a_3_reg, &test_sval, &ctxt);
8000 4 : tree rep = model.get_representative_tree (&test_sval);
8001 4 : ASSERT_DUMP_TREE_EQ (rep, "a[3]");
8002 4 : }
8003 :
8004 : /* Value of a[0]. */
8005 4 : {
8006 4 : test_region_model_context ctxt;
8007 4 : region_model model (&mgr);
8008 4 : tree idx = integer_zero_node;
8009 4 : tree a_0 = build4 (ARRAY_REF, char_type_node,
8010 : a, idx, NULL_TREE, NULL_TREE);
8011 4 : const region *a_0_reg = model.get_lvalue (a_0, &ctxt);
8012 4 : model.set_value (a_0_reg, &test_sval, &ctxt);
8013 4 : tree rep = model.get_representative_tree (&test_sval);
8014 4 : ASSERT_DUMP_TREE_EQ (rep, "a[0]");
8015 4 : }
8016 4 : }
8017 :
8018 : /* Value of a field within a struct. */
8019 4 : {
8020 4 : coord_test ct;
8021 :
8022 4 : tree c = build_global_decl ("c", ct.m_coord_type);
8023 4 : tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
8024 : c, ct.m_x_field, NULL_TREE);
8025 4 : tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
8026 : c, ct.m_y_field, NULL_TREE);
8027 :
8028 4 : test_region_model_context ctxt;
8029 :
8030 : /* Value of initial field. */
8031 4 : {
8032 4 : region_model m (&mgr);
8033 4 : const region *c_x_reg = m.get_lvalue (c_x, &ctxt);
8034 4 : placeholder_svalue test_sval_x (mgr.alloc_symbol_id (),
8035 4 : integer_type_node, "test x val");
8036 4 : m.set_value (c_x_reg, &test_sval_x, &ctxt);
8037 4 : tree rep = m.get_representative_tree (&test_sval_x);
8038 4 : ASSERT_DUMP_TREE_EQ (rep, "c.x");
8039 4 : }
8040 :
8041 : /* Value of non-initial field. */
8042 4 : {
8043 4 : region_model m (&mgr);
8044 4 : const region *c_y_reg = m.get_lvalue (c_y, &ctxt);
8045 4 : placeholder_svalue test_sval_y (mgr.alloc_symbol_id (),
8046 4 : integer_type_node, "test y val");
8047 4 : m.set_value (c_y_reg, &test_sval_y, &ctxt);
8048 4 : tree rep = m.get_representative_tree (&test_sval_y);
8049 4 : ASSERT_DUMP_TREE_EQ (rep, "c.y");
8050 4 : }
8051 4 : }
8052 4 : }
8053 :
8054 : /* Verify that calling region_model::get_rvalue repeatedly on the same
8055 : tree constant retrieves the same svalue *. */
8056 :
8057 : static void
8058 4 : test_unique_constants ()
8059 : {
8060 4 : tree int_0 = integer_zero_node;
8061 4 : tree int_42 = build_int_cst (integer_type_node, 42);
8062 :
8063 4 : test_region_model_context ctxt;
8064 4 : region_model_manager mgr;
8065 4 : region_model model (&mgr);
8066 4 : ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
8067 4 : ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
8068 : model.get_rvalue (int_42, &ctxt));
8069 4 : ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
8070 4 : ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
8071 :
8072 : /* A "(const int)42" will be a different tree from "(int)42)"... */
8073 4 : tree const_int_type_node
8074 4 : = build_qualified_type (integer_type_node, TYPE_QUAL_CONST);
8075 4 : tree const_int_42 = build_int_cst (const_int_type_node, 42);
8076 4 : ASSERT_NE (int_42, const_int_42);
8077 : /* It should have a different const_svalue. */
8078 4 : const svalue *int_42_sval = model.get_rvalue (int_42, &ctxt);
8079 4 : const svalue *const_int_42_sval = model.get_rvalue (const_int_42, &ctxt);
8080 4 : ASSERT_NE (int_42_sval, const_int_42_sval);
8081 : /* But they should compare as equal. */
8082 4 : ASSERT_CONDITION_TRUE (model, int_42_sval, EQ_EXPR, const_int_42_sval);
8083 4 : ASSERT_CONDITION_FALSE (model, int_42_sval, NE_EXPR, const_int_42_sval);
8084 4 : }
8085 :
8086 : /* Verify that each type gets its own singleton unknown_svalue within a
8087 : region_model_manager, and that NULL_TREE gets its own singleton. */
8088 :
8089 : static void
8090 4 : test_unique_unknowns ()
8091 : {
8092 4 : region_model_manager mgr;
8093 4 : const svalue *unknown_int
8094 4 : = mgr.get_or_create_unknown_svalue (integer_type_node);
8095 : /* Repeated calls with the same type should get the same "unknown"
8096 : svalue. */
8097 4 : const svalue *unknown_int_2
8098 4 : = mgr.get_or_create_unknown_svalue (integer_type_node);
8099 4 : ASSERT_EQ (unknown_int, unknown_int_2);
8100 :
8101 : /* Different types (or the NULL type) should have different
8102 : unknown_svalues. */
8103 4 : const svalue *unknown_NULL_type = mgr.get_or_create_unknown_svalue (nullptr);
8104 4 : ASSERT_NE (unknown_NULL_type, unknown_int);
8105 :
8106 : /* Repeated calls with NULL for the type should get the same "unknown"
8107 : svalue. */
8108 4 : const svalue *unknown_NULL_type_2 = mgr.get_or_create_unknown_svalue (nullptr);
8109 4 : ASSERT_EQ (unknown_NULL_type, unknown_NULL_type_2);
8110 4 : }
8111 :
8112 : /* Verify that initial_svalue are handled as expected. */
8113 :
8114 : static void
8115 4 : test_initial_svalue_folding ()
8116 : {
8117 4 : region_model_manager mgr;
8118 4 : tree x = build_global_decl ("x", integer_type_node);
8119 4 : tree y = build_global_decl ("y", integer_type_node);
8120 :
8121 4 : test_region_model_context ctxt;
8122 4 : region_model model (&mgr);
8123 4 : const svalue *x_init = model.get_rvalue (x, &ctxt);
8124 4 : const svalue *y_init = model.get_rvalue (y, &ctxt);
8125 4 : ASSERT_NE (x_init, y_init);
8126 4 : const region *x_reg = model.get_lvalue (x, &ctxt);
8127 4 : ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
8128 :
8129 4 : }
8130 :
8131 : /* Verify that unary ops are folded as expected. */
8132 :
8133 : static void
8134 4 : test_unaryop_svalue_folding ()
8135 : {
8136 4 : region_model_manager mgr;
8137 4 : tree x = build_global_decl ("x", integer_type_node);
8138 4 : tree y = build_global_decl ("y", integer_type_node);
8139 :
8140 4 : test_region_model_context ctxt;
8141 4 : region_model model (&mgr);
8142 4 : const svalue *x_init = model.get_rvalue (x, &ctxt);
8143 4 : const svalue *y_init = model.get_rvalue (y, &ctxt);
8144 4 : const region *x_reg = model.get_lvalue (x, &ctxt);
8145 4 : ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
8146 :
8147 : /* "(int)x" -> "x". */
8148 4 : ASSERT_EQ (x_init, mgr.get_or_create_cast (integer_type_node, x_init));
8149 :
8150 : /* "(void *)x" -> something other than "x". */
8151 4 : ASSERT_NE (x_init, mgr.get_or_create_cast (ptr_type_node, x_init));
8152 :
8153 : /* "!(x == y)" -> "x != y". */
8154 4 : ASSERT_EQ (mgr.get_or_create_unaryop
8155 : (boolean_type_node, TRUTH_NOT_EXPR,
8156 : mgr.get_or_create_binop (boolean_type_node, EQ_EXPR,
8157 : x_init, y_init)),
8158 : mgr.get_or_create_binop (boolean_type_node, NE_EXPR,
8159 : x_init, y_init));
8160 : /* "!(x > y)" -> "x <= y". */
8161 4 : ASSERT_EQ (mgr.get_or_create_unaryop
8162 : (boolean_type_node, TRUTH_NOT_EXPR,
8163 : mgr.get_or_create_binop (boolean_type_node, GT_EXPR,
8164 : x_init, y_init)),
8165 : mgr.get_or_create_binop (boolean_type_node, LE_EXPR,
8166 : x_init, y_init));
8167 4 : }
8168 :
8169 : /* Verify that binops on constant svalues are folded. */
8170 :
8171 : static void
8172 4 : test_binop_svalue_folding ()
8173 : {
8174 : #define NUM_CSTS 10
8175 4 : tree cst_int[NUM_CSTS];
8176 4 : region_model_manager mgr;
8177 4 : const svalue *cst_sval[NUM_CSTS];
8178 44 : for (int i = 0; i < NUM_CSTS; i++)
8179 : {
8180 40 : cst_int[i] = build_int_cst (integer_type_node, i);
8181 40 : cst_sval[i] = mgr.get_or_create_constant_svalue (cst_int[i]);
8182 40 : ASSERT_EQ (cst_sval[i]->get_kind (), SK_CONSTANT);
8183 40 : ASSERT_EQ (cst_sval[i]->maybe_get_constant (), cst_int[i]);
8184 : }
8185 :
8186 44 : for (int i = 0; i < NUM_CSTS; i++)
8187 440 : for (int j = 0; j < NUM_CSTS; j++)
8188 : {
8189 400 : if (i != j)
8190 360 : ASSERT_NE (cst_sval[i], cst_sval[j]);
8191 400 : if (i + j < NUM_CSTS)
8192 : {
8193 220 : const svalue *sum
8194 220 : = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
8195 : cst_sval[i], cst_sval[j]);
8196 220 : ASSERT_EQ (sum, cst_sval[i + j]);
8197 : }
8198 400 : if (i - j >= 0)
8199 : {
8200 220 : const svalue *difference
8201 220 : = mgr.get_or_create_binop (integer_type_node, MINUS_EXPR,
8202 : cst_sval[i], cst_sval[j]);
8203 220 : ASSERT_EQ (difference, cst_sval[i - j]);
8204 : }
8205 400 : if (i * j < NUM_CSTS)
8206 : {
8207 168 : const svalue *product
8208 168 : = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
8209 : cst_sval[i], cst_sval[j]);
8210 168 : ASSERT_EQ (product, cst_sval[i * j]);
8211 : }
8212 400 : const svalue *eq = mgr.get_or_create_binop (integer_type_node, EQ_EXPR,
8213 : cst_sval[i], cst_sval[j]);
8214 400 : ASSERT_EQ (eq, i == j ? cst_sval[1] : cst_sval [0]);
8215 400 : const svalue *neq = mgr.get_or_create_binop (integer_type_node, NE_EXPR,
8216 : cst_sval[i], cst_sval[j]);
8217 400 : ASSERT_EQ (neq, i != j ? cst_sval[1] : cst_sval [0]);
8218 : // etc
8219 : }
8220 :
8221 4 : tree x = build_global_decl ("x", integer_type_node);
8222 :
8223 4 : test_region_model_context ctxt;
8224 4 : region_model model (&mgr);
8225 4 : const svalue *x_init = model.get_rvalue (x, &ctxt);
8226 :
8227 : /* PLUS_EXPR folding. */
8228 4 : const svalue *x_init_plus_zero
8229 4 : = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
8230 : x_init, cst_sval[0]);
8231 4 : ASSERT_EQ (x_init_plus_zero, x_init);
8232 4 : const svalue *zero_plus_x_init
8233 4 : = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
8234 : cst_sval[0], x_init);
8235 4 : ASSERT_EQ (zero_plus_x_init, x_init);
8236 :
8237 : /* MULT_EXPR folding. */
8238 4 : const svalue *x_init_times_zero
8239 4 : = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
8240 : x_init, cst_sval[0]);
8241 4 : ASSERT_EQ (x_init_times_zero, cst_sval[0]);
8242 4 : const svalue *zero_times_x_init
8243 4 : = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
8244 : cst_sval[0], x_init);
8245 4 : ASSERT_EQ (zero_times_x_init, cst_sval[0]);
8246 :
8247 4 : const svalue *x_init_times_one
8248 4 : = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
8249 : x_init, cst_sval[1]);
8250 4 : ASSERT_EQ (x_init_times_one, x_init);
8251 4 : const svalue *one_times_x_init
8252 4 : = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
8253 : cst_sval[1], x_init);
8254 4 : ASSERT_EQ (one_times_x_init, x_init);
8255 :
8256 : // etc
8257 : // TODO: do we want to use the match-and-simplify DSL for this?
8258 :
8259 : /* Verify that binops put any constants on the RHS. */
8260 4 : const svalue *four_times_x_init
8261 4 : = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
8262 : cst_sval[4], x_init);
8263 4 : const svalue *x_init_times_four
8264 4 : = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
8265 : x_init, cst_sval[4]);
8266 4 : ASSERT_EQ (four_times_x_init, x_init_times_four);
8267 4 : const binop_svalue *binop = four_times_x_init->dyn_cast_binop_svalue ();
8268 4 : ASSERT_EQ (binop->get_op (), MULT_EXPR);
8269 4 : ASSERT_EQ (binop->get_arg0 (), x_init);
8270 4 : ASSERT_EQ (binop->get_arg1 (), cst_sval[4]);
8271 :
8272 : /* Verify that ((x + 1) + 1) == (x + 2). */
8273 4 : const svalue *x_init_plus_one
8274 4 : = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
8275 : x_init, cst_sval[1]);
8276 4 : const svalue *x_init_plus_two
8277 4 : = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
8278 : x_init, cst_sval[2]);
8279 4 : const svalue *x_init_plus_one_plus_one
8280 4 : = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
8281 : x_init_plus_one, cst_sval[1]);
8282 4 : ASSERT_EQ (x_init_plus_one_plus_one, x_init_plus_two);
8283 :
8284 : /* Verify various binops on booleans. */
8285 4 : {
8286 4 : const svalue *sval_true = mgr.get_or_create_int_cst (boolean_type_node, 1);
8287 4 : const svalue *sval_false = mgr.get_or_create_int_cst (boolean_type_node, 0);
8288 4 : const svalue *sval_unknown
8289 4 : = mgr.get_or_create_unknown_svalue (boolean_type_node);
8290 4 : const placeholder_svalue sval_placeholder (mgr.alloc_symbol_id (),
8291 4 : boolean_type_node, "v");
8292 12 : for (auto op : {BIT_IOR_EXPR, TRUTH_OR_EXPR})
8293 : {
8294 8 : ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
8295 : sval_true, sval_unknown),
8296 : sval_true);
8297 8 : ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
8298 : sval_false, sval_unknown),
8299 : sval_unknown);
8300 8 : ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
8301 : sval_false, &sval_placeholder),
8302 : &sval_placeholder);
8303 : }
8304 12 : for (auto op : {BIT_AND_EXPR, TRUTH_AND_EXPR})
8305 : {
8306 8 : ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
8307 : sval_false, sval_unknown),
8308 : sval_false);
8309 8 : ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
8310 : sval_true, sval_unknown),
8311 : sval_unknown);
8312 8 : ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
8313 : sval_true, &sval_placeholder),
8314 : &sval_placeholder);
8315 : }
8316 4 : }
8317 4 : }
8318 :
8319 : /* Verify that sub_svalues are folded as expected. */
8320 :
8321 : static void
8322 4 : test_sub_svalue_folding ()
8323 : {
8324 4 : coord_test ct;
8325 4 : tree c = build_global_decl ("c", ct.m_coord_type);
8326 4 : tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
8327 : c, ct.m_x_field, NULL_TREE);
8328 :
8329 4 : region_model_manager mgr;
8330 4 : region_model model (&mgr);
8331 4 : test_region_model_context ctxt;
8332 4 : const region *c_x_reg = model.get_lvalue (c_x, &ctxt);
8333 :
8334 : /* Verify that sub_svalue of "unknown" simply
8335 : yields an unknown. */
8336 :
8337 4 : const svalue *unknown = mgr.get_or_create_unknown_svalue (ct.m_coord_type);
8338 4 : const svalue *sub = mgr.get_or_create_sub_svalue (TREE_TYPE (ct.m_x_field),
8339 : unknown, c_x_reg);
8340 4 : ASSERT_EQ (sub->get_kind (), SK_UNKNOWN);
8341 4 : ASSERT_EQ (sub->get_type (), TREE_TYPE (ct.m_x_field));
8342 4 : }
8343 :
8344 : /* Get BIT within VAL as a symbolic value within MGR. */
8345 :
8346 : static const svalue *
8347 256 : get_bit (region_model_manager *mgr,
8348 : bit_offset_t bit,
8349 : unsigned HOST_WIDE_INT val)
8350 : {
8351 256 : const svalue *inner_svalue
8352 256 : = mgr->get_or_create_int_cst (unsigned_type_node, val);
8353 256 : return mgr->get_or_create_bits_within (boolean_type_node,
8354 256 : bit_range (bit, 1),
8355 256 : inner_svalue);
8356 : }
8357 :
8358 : /* Verify that bits_within_svalues are folded as expected. */
8359 :
8360 : static void
8361 4 : test_bits_within_svalue_folding ()
8362 : {
8363 4 : region_model_manager mgr;
8364 :
8365 4 : const svalue *zero = mgr.get_or_create_int_cst (boolean_type_node, 0);
8366 4 : const svalue *one = mgr.get_or_create_int_cst (boolean_type_node, 1);
8367 :
8368 4 : {
8369 4 : const unsigned val = 0x0000;
8370 68 : for (unsigned bit = 0; bit < 16; bit++)
8371 64 : ASSERT_EQ (get_bit (&mgr, bit, val), zero);
8372 : }
8373 :
8374 4 : {
8375 4 : const unsigned val = 0x0001;
8376 4 : ASSERT_EQ (get_bit (&mgr, 0, val), one);
8377 64 : for (unsigned bit = 1; bit < 16; bit++)
8378 60 : ASSERT_EQ (get_bit (&mgr, bit, val), zero);
8379 : }
8380 :
8381 4 : {
8382 4 : const unsigned val = 0x8000;
8383 64 : for (unsigned bit = 0; bit < 15; bit++)
8384 60 : ASSERT_EQ (get_bit (&mgr, bit, val), zero);
8385 4 : ASSERT_EQ (get_bit (&mgr, 15, val), one);
8386 : }
8387 :
8388 4 : {
8389 4 : const unsigned val = 0xFFFF;
8390 68 : for (unsigned bit = 0; bit < 16; bit++)
8391 64 : ASSERT_EQ (get_bit (&mgr, bit, val), one);
8392 : }
8393 4 : }
8394 :
8395 : /* Test that region::descendent_of_p works as expected. */
8396 :
8397 : static void
8398 4 : test_descendent_of_p ()
8399 : {
8400 4 : region_model_manager mgr;
8401 4 : const region *stack = mgr.get_stack_region ();
8402 4 : const region *heap = mgr.get_heap_region ();
8403 4 : const region *code = mgr.get_code_region ();
8404 4 : const region *globals = mgr.get_globals_region ();
8405 :
8406 : /* descendent_of_p should return true when used on the region itself. */
8407 4 : ASSERT_TRUE (stack->descendent_of_p (stack));
8408 4 : ASSERT_FALSE (stack->descendent_of_p (heap));
8409 4 : ASSERT_FALSE (stack->descendent_of_p (code));
8410 4 : ASSERT_FALSE (stack->descendent_of_p (globals));
8411 :
8412 4 : tree x = build_global_decl ("x", integer_type_node);
8413 4 : const region *x_reg = mgr.get_region_for_global (x);
8414 4 : ASSERT_TRUE (x_reg->descendent_of_p (globals));
8415 :
8416 : /* A cast_region should be a descendent of the original region. */
8417 4 : const region *cast_reg = mgr.get_cast_region (x_reg, ptr_type_node);
8418 4 : ASSERT_TRUE (cast_reg->descendent_of_p (x_reg));
8419 4 : }
8420 :
8421 : /* Verify that bit_range_region works as expected. */
8422 :
8423 : static void
8424 4 : test_bit_range_regions ()
8425 : {
8426 4 : tree x = build_global_decl ("x", integer_type_node);
8427 4 : region_model_manager mgr;
8428 4 : const region *x_reg = mgr.get_region_for_global (x);
8429 4 : const region *byte0
8430 4 : = mgr.get_bit_range (x_reg, char_type_node, bit_range (0, 8));
8431 4 : const region *byte1
8432 4 : = mgr.get_bit_range (x_reg, char_type_node, bit_range (8, 8));
8433 4 : ASSERT_TRUE (byte0->descendent_of_p (x_reg));
8434 4 : ASSERT_TRUE (byte1->descendent_of_p (x_reg));
8435 4 : ASSERT_NE (byte0, byte1);
8436 4 : }
8437 :
8438 : /* Verify that simple assignments work as expected. */
8439 :
8440 : static void
8441 4 : test_assignment ()
8442 : {
8443 4 : tree int_0 = integer_zero_node;
8444 4 : tree x = build_global_decl ("x", integer_type_node);
8445 4 : tree y = build_global_decl ("y", integer_type_node);
8446 :
8447 : /* "x == 0", then use of y, then "y = 0;". */
8448 4 : region_model_manager mgr;
8449 4 : region_model model (&mgr);
8450 4 : ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
8451 4 : ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
8452 4 : model.set_value (model.get_lvalue (y, nullptr),
8453 : model.get_rvalue (int_0, nullptr),
8454 : nullptr);
8455 4 : ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
8456 4 : ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
8457 4 : }
8458 :
8459 : /* Verify that compound assignments work as expected. */
8460 :
8461 : static void
8462 4 : test_compound_assignment ()
8463 : {
8464 4 : coord_test ct;
8465 :
8466 4 : tree c = build_global_decl ("c", ct.m_coord_type);
8467 4 : tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
8468 : c, ct.m_x_field, NULL_TREE);
8469 4 : tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
8470 : c, ct.m_y_field, NULL_TREE);
8471 4 : tree d = build_global_decl ("d", ct.m_coord_type);
8472 4 : tree d_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
8473 : d, ct.m_x_field, NULL_TREE);
8474 4 : tree d_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
8475 : d, ct.m_y_field, NULL_TREE);
8476 :
8477 4 : tree int_17 = build_int_cst (integer_type_node, 17);
8478 4 : tree int_m3 = build_int_cst (integer_type_node, -3);
8479 :
8480 4 : region_model_manager mgr;
8481 4 : region_model model (&mgr);
8482 4 : model.set_value (c_x, int_17, nullptr);
8483 4 : model.set_value (c_y, int_m3, nullptr);
8484 :
8485 : /* Copy c to d. */
8486 4 : const svalue *sval = model.get_rvalue (c, nullptr);
8487 4 : model.set_value (model.get_lvalue (d, nullptr), sval, nullptr);
8488 :
8489 : /* Check that the fields have the same svalues. */
8490 4 : ASSERT_EQ (model.get_rvalue (c_x, nullptr), model.get_rvalue (d_x, nullptr));
8491 4 : ASSERT_EQ (model.get_rvalue (c_y, nullptr), model.get_rvalue (d_y, nullptr));
8492 4 : }
8493 :
8494 : /* Verify the details of pushing and popping stack frames. */
8495 :
8496 : static void
8497 4 : test_stack_frames ()
8498 : {
8499 4 : tree int_42 = build_int_cst (integer_type_node, 42);
8500 4 : tree int_10 = build_int_cst (integer_type_node, 10);
8501 4 : tree int_5 = build_int_cst (integer_type_node, 5);
8502 4 : tree int_0 = integer_zero_node;
8503 :
8504 4 : auto_vec <tree> param_types;
8505 4 : tree parent_fndecl = make_fndecl (integer_type_node,
8506 : "parent_fn",
8507 : param_types);
8508 4 : allocate_struct_function (parent_fndecl, true);
8509 :
8510 4 : tree child_fndecl = make_fndecl (integer_type_node,
8511 : "child_fn",
8512 : param_types);
8513 4 : allocate_struct_function (child_fndecl, true);
8514 :
8515 : /* "a" and "b" in the parent frame. */
8516 4 : tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
8517 : get_identifier ("a"),
8518 : integer_type_node);
8519 4 : DECL_CONTEXT (a) = parent_fndecl;
8520 4 : tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
8521 : get_identifier ("b"),
8522 : integer_type_node);
8523 4 : DECL_CONTEXT (b) = parent_fndecl;
8524 : /* "x" and "y" in a child frame. */
8525 4 : tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
8526 : get_identifier ("x"),
8527 : integer_type_node);
8528 4 : DECL_CONTEXT (x) = child_fndecl;
8529 4 : tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
8530 : get_identifier ("y"),
8531 : integer_type_node);
8532 4 : DECL_CONTEXT (y) = child_fndecl;
8533 :
8534 : /* "p" global. */
8535 4 : tree p = build_global_decl ("p", ptr_type_node);
8536 :
8537 : /* "q" global. */
8538 4 : tree q = build_global_decl ("q", ptr_type_node);
8539 :
8540 4 : region_model_manager mgr;
8541 4 : test_region_model_context ctxt;
8542 4 : region_model model (&mgr);
8543 :
8544 : /* Push stack frame for "parent_fn". */
8545 4 : const region *parent_frame_reg
8546 4 : = model.push_frame (*DECL_STRUCT_FUNCTION (parent_fndecl),
8547 : nullptr, nullptr, &ctxt);
8548 4 : ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
8549 4 : ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
8550 4 : const region *a_in_parent_reg = model.get_lvalue (a, &ctxt);
8551 4 : model.set_value (a_in_parent_reg,
8552 : model.get_rvalue (int_42, &ctxt),
8553 : &ctxt);
8554 4 : ASSERT_EQ (a_in_parent_reg->maybe_get_frame_region (), parent_frame_reg);
8555 :
8556 4 : model.add_constraint (b, LT_EXPR, int_10, &ctxt);
8557 4 : ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
8558 : tristate (tristate::TS_TRUE));
8559 :
8560 : /* Push stack frame for "child_fn". */
8561 4 : const region *child_frame_reg
8562 4 : = model.push_frame (*DECL_STRUCT_FUNCTION (child_fndecl),
8563 : nullptr, nullptr, &ctxt);
8564 4 : ASSERT_EQ (model.get_current_frame (), child_frame_reg);
8565 4 : ASSERT_TRUE (model.region_exists_p (child_frame_reg));
8566 4 : const region *x_in_child_reg = model.get_lvalue (x, &ctxt);
8567 4 : model.set_value (x_in_child_reg,
8568 : model.get_rvalue (int_0, &ctxt),
8569 : &ctxt);
8570 4 : ASSERT_EQ (x_in_child_reg->maybe_get_frame_region (), child_frame_reg);
8571 :
8572 4 : model.add_constraint (y, NE_EXPR, int_5, &ctxt);
8573 4 : ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
8574 : tristate (tristate::TS_TRUE));
8575 :
8576 : /* Point a global pointer at a local in the child frame: p = &x. */
8577 4 : const region *p_in_globals_reg = model.get_lvalue (p, &ctxt);
8578 4 : model.set_value (p_in_globals_reg,
8579 : mgr.get_ptr_svalue (ptr_type_node, x_in_child_reg),
8580 : &ctxt);
8581 4 : ASSERT_EQ (p_in_globals_reg->maybe_get_frame_region (), nullptr);
8582 :
8583 : /* Point another global pointer at p: q = &p. */
8584 4 : const region *q_in_globals_reg = model.get_lvalue (q, &ctxt);
8585 4 : model.set_value (q_in_globals_reg,
8586 : mgr.get_ptr_svalue (ptr_type_node, p_in_globals_reg),
8587 : &ctxt);
8588 :
8589 : /* Test region::descendent_of_p. */
8590 4 : ASSERT_TRUE (child_frame_reg->descendent_of_p (child_frame_reg));
8591 4 : ASSERT_TRUE (x_in_child_reg->descendent_of_p (child_frame_reg));
8592 4 : ASSERT_FALSE (a_in_parent_reg->descendent_of_p (child_frame_reg));
8593 :
8594 : /* Pop the "child_fn" frame from the stack. */
8595 4 : model.pop_frame (nullptr, nullptr, &ctxt, nullptr);
8596 4 : ASSERT_FALSE (model.region_exists_p (child_frame_reg));
8597 4 : ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
8598 :
8599 : /* Verify that p (which was pointing at the local "x" in the popped
8600 : frame) has been poisoned. */
8601 4 : const svalue *new_p_sval = model.get_rvalue (p, nullptr);
8602 4 : ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
8603 4 : ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
8604 : poison_kind::popped_stack);
8605 :
8606 : /* Verify that q still points to p, in spite of the region
8607 : renumbering. */
8608 4 : const svalue *new_q_sval = model.get_rvalue (q, &ctxt);
8609 4 : ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
8610 4 : ASSERT_EQ (new_q_sval->maybe_get_region (),
8611 : model.get_lvalue (p, &ctxt));
8612 :
8613 : /* Verify that top of stack has been updated. */
8614 4 : ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
8615 :
8616 : /* Verify locals in parent frame. */
8617 : /* Verify "a" still has its value. */
8618 4 : const svalue *new_a_sval = model.get_rvalue (a, &ctxt);
8619 4 : ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
8620 4 : ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
8621 : int_42);
8622 : /* Verify "b" still has its constraint. */
8623 4 : ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
8624 : tristate (tristate::TS_TRUE));
8625 4 : }
8626 :
8627 : /* Verify that get_representative_path_var works as expected, that
8628 : we can map from regions to parms and back within a recursive call
8629 : stack. */
8630 :
8631 : static void
8632 4 : test_get_representative_path_var ()
8633 : {
8634 4 : auto_vec <tree> param_types;
8635 4 : tree fndecl = make_fndecl (integer_type_node,
8636 : "factorial",
8637 : param_types);
8638 4 : allocate_struct_function (fndecl, true);
8639 :
8640 : /* Parm "n". */
8641 4 : tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
8642 : get_identifier ("n"),
8643 : integer_type_node);
8644 4 : DECL_CONTEXT (n) = fndecl;
8645 :
8646 4 : region_model_manager mgr;
8647 4 : test_region_model_context ctxt;
8648 4 : region_model model (&mgr);
8649 :
8650 : /* Push 5 stack frames for "factorial", each with a param */
8651 4 : auto_vec<const region *> parm_regs;
8652 4 : auto_vec<const svalue *> parm_svals;
8653 24 : for (int depth = 0; depth < 5; depth++)
8654 : {
8655 20 : const region *frame_n_reg
8656 20 : = model.push_frame (*DECL_STRUCT_FUNCTION (fndecl),
8657 : nullptr, nullptr, &ctxt);
8658 20 : const region *parm_n_reg = model.get_lvalue (path_var (n, depth), &ctxt);
8659 20 : parm_regs.safe_push (parm_n_reg);
8660 :
8661 20 : ASSERT_EQ (parm_n_reg->get_parent_region (), frame_n_reg);
8662 20 : const svalue *sval_n = mgr.get_or_create_initial_value (parm_n_reg);
8663 20 : parm_svals.safe_push (sval_n);
8664 : }
8665 :
8666 : /* Verify that we can recognize that the regions are the parms,
8667 : at every depth. */
8668 24 : for (int depth = 0; depth < 5; depth++)
8669 : {
8670 20 : {
8671 20 : svalue_set visited;
8672 40 : ASSERT_EQ (model.get_representative_path_var (parm_regs[depth],
8673 : &visited,
8674 : nullptr),
8675 : path_var (n, depth + 1));
8676 20 : }
8677 : /* ...and that we can lookup lvalues for locals for all frames,
8678 : not just the top. */
8679 20 : ASSERT_EQ (model.get_lvalue (path_var (n, depth), nullptr),
8680 : parm_regs[depth]);
8681 : /* ...and that we can locate the svalues. */
8682 20 : {
8683 20 : svalue_set visited;
8684 40 : ASSERT_EQ (model.get_representative_path_var (parm_svals[depth],
8685 : &visited,
8686 : nullptr),
8687 : path_var (n, depth + 1));
8688 20 : }
8689 : }
8690 4 : }
8691 :
8692 : /* Ensure that region_model::operator== works as expected. */
8693 :
8694 : static void
8695 4 : test_equality_1 ()
8696 : {
8697 4 : tree int_42 = build_int_cst (integer_type_node, 42);
8698 4 : tree int_17 = build_int_cst (integer_type_node, 17);
8699 :
8700 : /* Verify that "empty" region_model instances are equal to each other. */
8701 4 : region_model_manager mgr;
8702 4 : region_model model0 (&mgr);
8703 4 : region_model model1 (&mgr);
8704 4 : ASSERT_EQ (model0, model1);
8705 :
8706 : /* Verify that setting state in model1 makes the models non-equal. */
8707 4 : tree x = build_global_decl ("x", integer_type_node);
8708 4 : model0.set_value (x, int_42, nullptr);
8709 4 : ASSERT_EQ (model0.get_rvalue (x, nullptr)->maybe_get_constant (), int_42);
8710 4 : ASSERT_NE (model0, model1);
8711 :
8712 : /* Verify the copy-ctor. */
8713 4 : region_model model2 (model0);
8714 4 : ASSERT_EQ (model0, model2);
8715 4 : ASSERT_EQ (model2.get_rvalue (x, nullptr)->maybe_get_constant (), int_42);
8716 4 : ASSERT_NE (model1, model2);
8717 :
8718 : /* Verify that models obtained from copy-ctor are independently editable
8719 : w/o affecting the original model. */
8720 4 : model2.set_value (x, int_17, nullptr);
8721 4 : ASSERT_NE (model0, model2);
8722 4 : ASSERT_EQ (model2.get_rvalue (x, nullptr)->maybe_get_constant (), int_17);
8723 4 : ASSERT_EQ (model0.get_rvalue (x, nullptr)->maybe_get_constant (), int_42);
8724 4 : }
8725 :
8726 : /* Verify that region models for
8727 : x = 42; y = 113;
8728 : and
8729 : y = 113; x = 42;
8730 : are equal. */
8731 :
8732 : static void
8733 4 : test_canonicalization_2 ()
8734 : {
8735 4 : tree int_42 = build_int_cst (integer_type_node, 42);
8736 4 : tree int_113 = build_int_cst (integer_type_node, 113);
8737 4 : tree x = build_global_decl ("x", integer_type_node);
8738 4 : tree y = build_global_decl ("y", integer_type_node);
8739 :
8740 4 : region_model_manager mgr;
8741 4 : region_model model0 (&mgr);
8742 4 : model0.set_value (model0.get_lvalue (x, nullptr),
8743 : model0.get_rvalue (int_42, nullptr),
8744 : nullptr);
8745 4 : model0.set_value (model0.get_lvalue (y, nullptr),
8746 : model0.get_rvalue (int_113, nullptr),
8747 : nullptr);
8748 :
8749 4 : region_model model1 (&mgr);
8750 4 : model1.set_value (model1.get_lvalue (y, nullptr),
8751 : model1.get_rvalue (int_113, nullptr),
8752 : nullptr);
8753 4 : model1.set_value (model1.get_lvalue (x, nullptr),
8754 : model1.get_rvalue (int_42, nullptr),
8755 : nullptr);
8756 :
8757 4 : ASSERT_EQ (model0, model1);
8758 4 : }
8759 :
8760 : /* Verify that constraints for
8761 : x > 3 && y > 42
8762 : and
8763 : y > 42 && x > 3
8764 : are equal after canonicalization. */
8765 :
8766 : static void
8767 4 : test_canonicalization_3 ()
8768 : {
8769 4 : tree int_3 = build_int_cst (integer_type_node, 3);
8770 4 : tree int_42 = build_int_cst (integer_type_node, 42);
8771 4 : tree x = build_global_decl ("x", integer_type_node);
8772 4 : tree y = build_global_decl ("y", integer_type_node);
8773 :
8774 4 : region_model_manager mgr;
8775 4 : region_model model0 (&mgr);
8776 4 : model0.add_constraint (x, GT_EXPR, int_3, nullptr);
8777 4 : model0.add_constraint (y, GT_EXPR, int_42, nullptr);
8778 :
8779 4 : region_model model1 (&mgr);
8780 4 : model1.add_constraint (y, GT_EXPR, int_42, nullptr);
8781 4 : model1.add_constraint (x, GT_EXPR, int_3, nullptr);
8782 :
8783 4 : model0.canonicalize ();
8784 4 : model1.canonicalize ();
8785 4 : ASSERT_EQ (model0, model1);
8786 4 : }
8787 :
8788 : /* Verify that we can canonicalize a model containing NaN and other real
8789 : constants. */
8790 :
8791 : static void
8792 4 : test_canonicalization_4 ()
8793 : {
8794 4 : auto_vec<tree> csts;
8795 4 : append_interesting_constants (&csts);
8796 :
8797 4 : region_model_manager mgr;
8798 4 : region_model model (&mgr);
8799 :
8800 60 : for (tree cst : csts)
8801 48 : model.get_rvalue (cst, nullptr);
8802 :
8803 4 : model.canonicalize ();
8804 4 : }
8805 :
8806 : /* Assert that if we have two region_model instances
8807 : with values VAL_A and VAL_B for EXPR that they are
8808 : mergable. Write the merged model to *OUT_MERGED_MODEL,
8809 : and the merged svalue ptr to *OUT_MERGED_SVALUE.
8810 : If VAL_A or VAL_B are nullptr_TREE, don't populate EXPR
8811 : for that region_model. */
8812 :
8813 : static void
8814 20 : assert_region_models_merge (tree expr, tree val_a, tree val_b,
8815 : region_model *out_merged_model,
8816 : const svalue **out_merged_svalue)
8817 : {
8818 20 : region_model_manager *mgr = out_merged_model->get_manager ();
8819 20 : program_point point (program_point::origin (*mgr));
8820 20 : test_region_model_context ctxt;
8821 20 : region_model model0 (mgr);
8822 20 : region_model model1 (mgr);
8823 20 : if (val_a)
8824 16 : model0.set_value (model0.get_lvalue (expr, &ctxt),
8825 : model0.get_rvalue (val_a, &ctxt),
8826 : &ctxt);
8827 20 : if (val_b)
8828 16 : model1.set_value (model1.get_lvalue (expr, &ctxt),
8829 : model1.get_rvalue (val_b, &ctxt),
8830 : &ctxt);
8831 :
8832 : /* They should be mergeable. */
8833 20 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, out_merged_model));
8834 20 : *out_merged_svalue = out_merged_model->get_rvalue (expr, &ctxt);
8835 20 : }
8836 :
8837 : /* Verify that we can merge region_model instances. */
8838 :
8839 : static void
8840 4 : test_state_merging ()
8841 : {
8842 4 : tree int_42 = build_int_cst (integer_type_node, 42);
8843 4 : tree int_113 = build_int_cst (integer_type_node, 113);
8844 4 : tree x = build_global_decl ("x", integer_type_node);
8845 4 : tree y = build_global_decl ("y", integer_type_node);
8846 4 : tree z = build_global_decl ("z", integer_type_node);
8847 4 : tree p = build_global_decl ("p", ptr_type_node);
8848 :
8849 4 : tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
8850 4 : tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
8851 :
8852 4 : auto_vec <tree> param_types;
8853 4 : tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
8854 4 : allocate_struct_function (test_fndecl, true);
8855 :
8856 : /* Param "a". */
8857 4 : tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
8858 : get_identifier ("a"),
8859 : integer_type_node);
8860 4 : DECL_CONTEXT (a) = test_fndecl;
8861 4 : tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
8862 :
8863 : /* Param "q", a pointer. */
8864 4 : tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL,
8865 : get_identifier ("q"),
8866 : ptr_type_node);
8867 4 : DECL_CONTEXT (q) = test_fndecl;
8868 :
8869 4 : region_model_manager mgr;
8870 4 : program_point point (program_point::origin (mgr));
8871 :
8872 4 : {
8873 4 : region_model model0 (&mgr);
8874 4 : region_model model1 (&mgr);
8875 4 : region_model merged (&mgr);
8876 : /* Verify empty models can be merged. */
8877 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
8878 4 : ASSERT_EQ (model0, merged);
8879 4 : }
8880 :
8881 : /* Verify that we can merge two contradictory constraints on the
8882 : value for a global. */
8883 : /* TODO: verify that the merged model doesn't have a value for
8884 : the global */
8885 4 : {
8886 4 : region_model model0 (&mgr);
8887 4 : region_model model1 (&mgr);
8888 4 : region_model merged (&mgr);
8889 4 : test_region_model_context ctxt;
8890 4 : model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
8891 4 : model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
8892 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
8893 4 : ASSERT_NE (model0, merged);
8894 4 : ASSERT_NE (model1, merged);
8895 4 : }
8896 :
8897 : /* Verify handling of a PARM_DECL. */
8898 4 : {
8899 4 : test_region_model_context ctxt;
8900 4 : region_model model0 (&mgr);
8901 4 : region_model model1 (&mgr);
8902 4 : ASSERT_EQ (model0.get_stack_depth (), 0);
8903 4 : model0.push_frame (*DECL_STRUCT_FUNCTION (test_fndecl),
8904 : nullptr, nullptr, &ctxt);
8905 4 : ASSERT_EQ (model0.get_stack_depth (), 1);
8906 4 : model1.push_frame (*DECL_STRUCT_FUNCTION (test_fndecl),
8907 : nullptr, nullptr, &ctxt);
8908 :
8909 4 : placeholder_svalue test_sval (mgr.alloc_symbol_id (),
8910 4 : integer_type_node, "test sval");
8911 4 : model0.set_value (model0.get_lvalue (a, &ctxt), &test_sval, &ctxt);
8912 4 : model1.set_value (model1.get_lvalue (a, &ctxt), &test_sval, &ctxt);
8913 4 : ASSERT_EQ (model0, model1);
8914 :
8915 : /* They should be mergeable, and the result should be the same. */
8916 4 : region_model merged (&mgr);
8917 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
8918 4 : ASSERT_EQ (model0, merged);
8919 : /* In particular, "a" should have the placeholder value. */
8920 4 : ASSERT_EQ (merged.get_rvalue (a, &ctxt), &test_sval);
8921 4 : }
8922 :
8923 : /* Verify handling of a global. */
8924 4 : {
8925 4 : test_region_model_context ctxt;
8926 4 : region_model model0 (&mgr);
8927 4 : region_model model1 (&mgr);
8928 :
8929 4 : placeholder_svalue test_sval (mgr.alloc_symbol_id (),
8930 4 : integer_type_node, "test sval");
8931 4 : model0.set_value (model0.get_lvalue (x, &ctxt), &test_sval, &ctxt);
8932 4 : model1.set_value (model1.get_lvalue (x, &ctxt), &test_sval, &ctxt);
8933 4 : ASSERT_EQ (model0, model1);
8934 :
8935 : /* They should be mergeable, and the result should be the same. */
8936 4 : region_model merged (&mgr);
8937 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
8938 4 : ASSERT_EQ (model0, merged);
8939 : /* In particular, "x" should have the placeholder value. */
8940 4 : ASSERT_EQ (merged.get_rvalue (x, &ctxt), &test_sval);
8941 4 : }
8942 :
8943 : /* Use global-handling to verify various combinations of values. */
8944 :
8945 : /* Two equal constant values. */
8946 4 : {
8947 4 : region_model merged (&mgr);
8948 4 : const svalue *merged_x_sval;
8949 4 : assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
8950 :
8951 : /* In particular, there should be a constant value for "x". */
8952 4 : ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
8953 4 : ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
8954 : int_42);
8955 4 : }
8956 :
8957 : /* Two non-equal constant values. */
8958 4 : {
8959 4 : region_model merged (&mgr);
8960 4 : const svalue *merged_x_sval;
8961 4 : assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
8962 :
8963 : /* In particular, there should be a "widening" value for "x". */
8964 4 : ASSERT_EQ (merged_x_sval->get_kind (), SK_WIDENING);
8965 4 : }
8966 :
8967 : /* Initial and constant. */
8968 4 : {
8969 4 : region_model merged (&mgr);
8970 4 : const svalue *merged_x_sval;
8971 4 : assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
8972 :
8973 : /* In particular, there should be an unknown value for "x". */
8974 4 : ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
8975 4 : }
8976 :
8977 : /* Constant and initial. */
8978 4 : {
8979 4 : region_model merged (&mgr);
8980 4 : const svalue *merged_x_sval;
8981 4 : assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
8982 :
8983 : /* In particular, there should be an unknown value for "x". */
8984 4 : ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
8985 4 : }
8986 :
8987 : /* Unknown and constant. */
8988 : // TODO
8989 :
8990 : /* Pointers: NULL and NULL. */
8991 : // TODO
8992 :
8993 : /* Pointers: NULL and non-NULL. */
8994 : // TODO
8995 :
8996 : /* Pointers: non-NULL and non-NULL: ptr to a local. */
8997 4 : {
8998 4 : region_model model0 (&mgr);
8999 4 : model0.push_frame (*DECL_STRUCT_FUNCTION (test_fndecl),
9000 : nullptr, nullptr, nullptr);
9001 4 : model0.set_value (model0.get_lvalue (p, nullptr),
9002 : model0.get_rvalue (addr_of_a, nullptr), nullptr);
9003 :
9004 4 : region_model model1 (model0);
9005 4 : ASSERT_EQ (model0, model1);
9006 :
9007 : /* They should be mergeable, and the result should be the same. */
9008 4 : region_model merged (&mgr);
9009 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
9010 4 : ASSERT_EQ (model0, merged);
9011 4 : }
9012 :
9013 : /* Pointers: non-NULL and non-NULL: ptr to a global. */
9014 4 : {
9015 4 : region_model merged (&mgr);
9016 : /* p == &y in both input models. */
9017 4 : const svalue *merged_p_sval;
9018 4 : assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
9019 : &merged_p_sval);
9020 :
9021 : /* We should get p == &y in the merged model. */
9022 4 : ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
9023 4 : const region_svalue *merged_p_ptr
9024 4 : = merged_p_sval->dyn_cast_region_svalue ();
9025 4 : const region *merged_p_star_reg = merged_p_ptr->get_pointee ();
9026 4 : ASSERT_EQ (merged_p_star_reg, merged.get_lvalue (y, nullptr));
9027 4 : }
9028 :
9029 : /* Pointers: non-NULL ptrs to different globals should not merge;
9030 : see e.g. gcc.dg/analyzer/torture/uninit-pr108725.c */
9031 4 : {
9032 4 : region_model merged_model (&mgr);
9033 4 : program_point point (program_point::origin (mgr));
9034 4 : test_region_model_context ctxt;
9035 : /* x == &y vs x == &z in the input models; these are actually casts
9036 : of the ptrs to "int". */
9037 4 : region_model model0 (&mgr);
9038 4 : region_model model1 (&mgr);
9039 4 : model0.set_value (model0.get_lvalue (x, &ctxt),
9040 : model0.get_rvalue (addr_of_y, &ctxt),
9041 : &ctxt);
9042 4 : model1.set_value (model1.get_lvalue (x, &ctxt),
9043 : model1.get_rvalue (addr_of_z, &ctxt),
9044 : &ctxt);
9045 : /* They should not be mergeable. */
9046 4 : ASSERT_FALSE (model0.can_merge_with_p (model1, point, &merged_model));
9047 4 : }
9048 :
9049 : /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
9050 4 : {
9051 4 : test_region_model_context ctxt;
9052 4 : region_model model0 (&mgr);
9053 4 : tree size = build_int_cst (size_type_node, 1024);
9054 4 : const svalue *size_sval = mgr.get_or_create_constant_svalue (size);
9055 4 : const region *new_reg
9056 4 : = model0.get_or_create_region_for_heap_alloc (size_sval, &ctxt);
9057 4 : const svalue *ptr_sval = mgr.get_ptr_svalue (ptr_type_node, new_reg);
9058 4 : model0.set_value (model0.get_lvalue (p, &ctxt),
9059 : ptr_sval, &ctxt);
9060 :
9061 4 : region_model model1 (model0);
9062 :
9063 4 : ASSERT_EQ (model0, model1);
9064 :
9065 4 : region_model merged (&mgr);
9066 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
9067 :
9068 : /* The merged model ought to be identical. */
9069 4 : ASSERT_EQ (model0, merged);
9070 4 : }
9071 :
9072 : /* Two regions sharing the same placeholder svalue should continue sharing
9073 : it after self-merger. */
9074 4 : {
9075 4 : test_region_model_context ctxt;
9076 4 : region_model model0 (&mgr);
9077 4 : placeholder_svalue placeholder_sval (mgr.alloc_symbol_id (),
9078 4 : integer_type_node, "test");
9079 4 : model0.set_value (model0.get_lvalue (x, &ctxt),
9080 : &placeholder_sval, &ctxt);
9081 4 : model0.set_value (model0.get_lvalue (y, &ctxt), &placeholder_sval, &ctxt);
9082 4 : region_model model1 (model0);
9083 :
9084 : /* They should be mergeable, and the result should be the same. */
9085 4 : region_model merged (&mgr);
9086 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
9087 4 : ASSERT_EQ (model0, merged);
9088 :
9089 : /* In particular, we should have x == y. */
9090 4 : ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
9091 : tristate (tristate::TS_TRUE));
9092 4 : }
9093 :
9094 4 : {
9095 4 : region_model model0 (&mgr);
9096 4 : region_model model1 (&mgr);
9097 4 : test_region_model_context ctxt;
9098 4 : model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
9099 4 : model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
9100 4 : region_model merged (&mgr);
9101 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
9102 4 : }
9103 :
9104 4 : {
9105 4 : region_model model0 (&mgr);
9106 4 : region_model model1 (&mgr);
9107 4 : test_region_model_context ctxt;
9108 4 : model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
9109 4 : model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
9110 4 : model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
9111 4 : region_model merged (&mgr);
9112 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
9113 4 : }
9114 :
9115 : // TODO: what can't we merge? need at least one such test
9116 :
9117 : /* TODO: various things
9118 : - heap regions
9119 : - value merging:
9120 : - every combination, but in particular
9121 : - pairs of regions
9122 : */
9123 :
9124 : /* Views. */
9125 4 : {
9126 4 : test_region_model_context ctxt;
9127 4 : region_model model0 (&mgr);
9128 :
9129 4 : const region *x_reg = model0.get_lvalue (x, &ctxt);
9130 4 : const region *x_as_ptr = mgr.get_cast_region (x_reg, ptr_type_node);
9131 4 : model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
9132 :
9133 4 : region_model model1 (model0);
9134 4 : ASSERT_EQ (model1, model0);
9135 :
9136 : /* They should be mergeable, and the result should be the same. */
9137 4 : region_model merged (&mgr);
9138 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
9139 4 : }
9140 :
9141 : /* Verify that we can merge a model in which a local in an older stack
9142 : frame points to a local in a more recent stack frame. */
9143 4 : {
9144 4 : region_model model0 (&mgr);
9145 4 : model0.push_frame (*DECL_STRUCT_FUNCTION (test_fndecl),
9146 : nullptr, nullptr, nullptr);
9147 4 : const region *q_in_first_frame = model0.get_lvalue (q, nullptr);
9148 :
9149 : /* Push a second frame. */
9150 4 : const region *reg_2nd_frame
9151 4 : = model0.push_frame (*DECL_STRUCT_FUNCTION (test_fndecl),
9152 : nullptr, nullptr, nullptr);
9153 :
9154 : /* Have a pointer in the older frame point to a local in the
9155 : more recent frame. */
9156 4 : const svalue *sval_ptr = model0.get_rvalue (addr_of_a, nullptr);
9157 4 : model0.set_value (q_in_first_frame, sval_ptr, nullptr);
9158 :
9159 : /* Verify that it's pointing at the newer frame. */
9160 4 : const region *reg_pointee = sval_ptr->maybe_get_region ();
9161 4 : ASSERT_EQ (reg_pointee->get_parent_region (), reg_2nd_frame);
9162 :
9163 4 : model0.canonicalize ();
9164 :
9165 4 : region_model model1 (model0);
9166 4 : ASSERT_EQ (model0, model1);
9167 :
9168 : /* They should be mergeable, and the result should be the same
9169 : (after canonicalization, at least). */
9170 4 : region_model merged (&mgr);
9171 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
9172 4 : merged.canonicalize ();
9173 4 : ASSERT_EQ (model0, merged);
9174 4 : }
9175 :
9176 : /* Verify that we can merge a model in which a local points to a global. */
9177 4 : {
9178 4 : region_model model0 (&mgr);
9179 4 : model0.push_frame (*DECL_STRUCT_FUNCTION (test_fndecl),
9180 : nullptr, nullptr, nullptr);
9181 4 : model0.set_value (model0.get_lvalue (q, nullptr),
9182 : model0.get_rvalue (addr_of_y, nullptr), nullptr);
9183 :
9184 4 : region_model model1 (model0);
9185 4 : ASSERT_EQ (model0, model1);
9186 :
9187 : /* They should be mergeable, and the result should be the same
9188 : (after canonicalization, at least). */
9189 4 : region_model merged (&mgr);
9190 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
9191 4 : ASSERT_EQ (model0, merged);
9192 4 : }
9193 4 : }
9194 :
9195 : /* Verify that constraints are correctly merged when merging region_model
9196 : instances. */
9197 :
9198 : static void
9199 4 : test_constraint_merging ()
9200 : {
9201 4 : tree int_0 = integer_zero_node;
9202 4 : tree int_5 = build_int_cst (integer_type_node, 5);
9203 4 : tree x = build_global_decl ("x", integer_type_node);
9204 4 : tree y = build_global_decl ("y", integer_type_node);
9205 4 : tree z = build_global_decl ("z", integer_type_node);
9206 4 : tree n = build_global_decl ("n", integer_type_node);
9207 :
9208 4 : region_model_manager mgr;
9209 4 : test_region_model_context ctxt;
9210 :
9211 : /* model0: 0 <= (x == y) < n. */
9212 4 : region_model model0 (&mgr);
9213 4 : model0.add_constraint (x, EQ_EXPR, y, &ctxt);
9214 4 : model0.add_constraint (x, GE_EXPR, int_0, nullptr);
9215 4 : model0.add_constraint (x, LT_EXPR, n, nullptr);
9216 :
9217 : /* model1: z != 5 && (0 <= x < n). */
9218 4 : region_model model1 (&mgr);
9219 4 : model1.add_constraint (z, NE_EXPR, int_5, nullptr);
9220 4 : model1.add_constraint (x, GE_EXPR, int_0, nullptr);
9221 4 : model1.add_constraint (x, LT_EXPR, n, nullptr);
9222 :
9223 : /* They should be mergeable; the merged constraints should
9224 : be: (0 <= x < n). */
9225 4 : program_point point (program_point::origin (mgr));
9226 4 : region_model merged (&mgr);
9227 4 : ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
9228 :
9229 4 : ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
9230 : tristate (tristate::TS_TRUE));
9231 4 : ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
9232 : tristate (tristate::TS_TRUE));
9233 :
9234 4 : ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
9235 : tristate (tristate::TS_UNKNOWN));
9236 4 : ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
9237 : tristate (tristate::TS_UNKNOWN));
9238 4 : }
9239 :
9240 : /* Verify that widening_svalue::eval_condition_without_cm works as
9241 : expected. */
9242 :
9243 : static void
9244 4 : test_widening_constraints ()
9245 : {
9246 4 : region_model_manager mgr;
9247 4 : const supernode *snode = nullptr;
9248 4 : tree int_0 = integer_zero_node;
9249 4 : tree int_m1 = build_int_cst (integer_type_node, -1);
9250 4 : tree int_1 = integer_one_node;
9251 4 : tree int_256 = build_int_cst (integer_type_node, 256);
9252 4 : test_region_model_context ctxt;
9253 4 : const svalue *int_0_sval = mgr.get_or_create_constant_svalue (int_0);
9254 4 : const svalue *int_1_sval = mgr.get_or_create_constant_svalue (int_1);
9255 4 : const svalue *w_zero_then_one_sval
9256 4 : = mgr.get_or_create_widening_svalue (integer_type_node, snode,
9257 : int_0_sval, int_1_sval);
9258 4 : const widening_svalue *w_zero_then_one
9259 4 : = w_zero_then_one_sval->dyn_cast_widening_svalue ();
9260 4 : ASSERT_EQ (w_zero_then_one->get_direction (),
9261 : widening_svalue::DIR_ASCENDING);
9262 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_m1),
9263 : tristate::TS_FALSE);
9264 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_0),
9265 : tristate::TS_FALSE);
9266 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_1),
9267 : tristate::TS_UNKNOWN);
9268 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_256),
9269 : tristate::TS_UNKNOWN);
9270 :
9271 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_m1),
9272 : tristate::TS_FALSE);
9273 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_0),
9274 : tristate::TS_UNKNOWN);
9275 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_1),
9276 : tristate::TS_UNKNOWN);
9277 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_256),
9278 : tristate::TS_UNKNOWN);
9279 :
9280 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_m1),
9281 : tristate::TS_TRUE);
9282 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_0),
9283 : tristate::TS_UNKNOWN);
9284 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_1),
9285 : tristate::TS_UNKNOWN);
9286 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_256),
9287 : tristate::TS_UNKNOWN);
9288 :
9289 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_m1),
9290 : tristate::TS_TRUE);
9291 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_0),
9292 : tristate::TS_TRUE);
9293 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_1),
9294 : tristate::TS_UNKNOWN);
9295 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_256),
9296 : tristate::TS_UNKNOWN);
9297 :
9298 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_m1),
9299 : tristate::TS_FALSE);
9300 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_0),
9301 : tristate::TS_UNKNOWN);
9302 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_1),
9303 : tristate::TS_UNKNOWN);
9304 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_256),
9305 : tristate::TS_UNKNOWN);
9306 :
9307 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_m1),
9308 : tristate::TS_TRUE);
9309 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_0),
9310 : tristate::TS_UNKNOWN);
9311 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_1),
9312 : tristate::TS_UNKNOWN);
9313 4 : ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_256),
9314 : tristate::TS_UNKNOWN);
9315 4 : }
9316 :
9317 : /* Verify merging constraints for states simulating successive iterations
9318 : of a loop.
9319 : Simulate:
9320 : for (i = 0; i < 256; i++)
9321 : [...body...]
9322 : i.e. this gimple:.
9323 : i_15 = 0;
9324 : goto <bb 4>;
9325 :
9326 : <bb 4> :
9327 : i_11 = PHI <i_15(2), i_23(3)>
9328 : if (i_11 <= 255)
9329 : goto <bb 3>;
9330 : else
9331 : goto [AFTER LOOP]
9332 :
9333 : <bb 3> :
9334 : [LOOP BODY]
9335 : i_23 = i_11 + 1;
9336 :
9337 : and thus these ops (and resultant states):
9338 : i_11 = PHI()
9339 : {i_11: 0}
9340 : add_constraint (i_11 <= 255) [for the true edge]
9341 : {i_11: 0} [constraint was a no-op]
9342 : i_23 = i_11 + 1;
9343 : {i_22: 1}
9344 : i_11 = PHI()
9345 : {i_11: WIDENED (at phi, 0, 1)}
9346 : add_constraint (i_11 <= 255) [for the true edge]
9347 : {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}
9348 : i_23 = i_11 + 1;
9349 : {i_23: (WIDENED (at phi, 0, 1) + 1); WIDENED <= 255}
9350 : i_11 = PHI(); merge with state at phi above
9351 : {i_11: WIDENED (at phi, 0, 1); WIDENED <= 256}
9352 : [changing meaning of "WIDENED" here]
9353 : if (i_11 <= 255)
9354 : T: {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}; cache hit
9355 : F: {i_11: 256}
9356 : */
9357 :
9358 : static void
9359 4 : test_iteration_1 ()
9360 : {
9361 4 : region_model_manager mgr;
9362 4 : program_point point (program_point::origin (mgr));
9363 :
9364 4 : tree int_0 = integer_zero_node;
9365 4 : tree int_1 = integer_one_node;
9366 4 : tree int_256 = build_int_cst (integer_type_node, 256);
9367 4 : tree i = build_global_decl ("i", integer_type_node);
9368 :
9369 4 : test_region_model_context ctxt;
9370 :
9371 : /* model0: i: 0. */
9372 4 : region_model model0 (&mgr);
9373 4 : model0.set_value (i, int_0, &ctxt);
9374 :
9375 : /* model1: i: 1. */
9376 4 : region_model model1 (&mgr);
9377 4 : model1.set_value (i, int_1, &ctxt);
9378 :
9379 : /* Should merge "i" to a widened value. */
9380 4 : region_model model2 (&mgr);
9381 4 : ASSERT_TRUE (model1.can_merge_with_p (model0, point, &model2));
9382 4 : const svalue *merged_i = model2.get_rvalue (i, &ctxt);
9383 4 : ASSERT_EQ (merged_i->get_kind (), SK_WIDENING);
9384 4 : const widening_svalue *w = merged_i->dyn_cast_widening_svalue ();
9385 4 : ASSERT_EQ (w->get_direction (), widening_svalue::DIR_ASCENDING);
9386 :
9387 : /* Add constraint: i < 256 */
9388 4 : model2.add_constraint (i, LT_EXPR, int_256, &ctxt);
9389 4 : ASSERT_EQ (model2.eval_condition (i, LT_EXPR, int_256, &ctxt),
9390 : tristate (tristate::TS_TRUE));
9391 4 : ASSERT_EQ (model2.eval_condition (i, GE_EXPR, int_0, &ctxt),
9392 : tristate (tristate::TS_TRUE));
9393 :
9394 : /* Try merging with the initial state. */
9395 4 : region_model model3 (&mgr);
9396 4 : ASSERT_TRUE (model2.can_merge_with_p (model0, point, &model3));
9397 : /* Merging the merged value with the initial value should be idempotent,
9398 : so that the analysis converges. */
9399 4 : ASSERT_EQ (model3.get_rvalue (i, &ctxt), merged_i);
9400 : /* Merger of 0 and a widening value with constraint < CST
9401 : should retain the constraint, even though it was implicit
9402 : for the 0 case. */
9403 4 : ASSERT_EQ (model3.eval_condition (i, LT_EXPR, int_256, &ctxt),
9404 : tristate (tristate::TS_TRUE));
9405 : /* ...and we should have equality: the analysis should have converged. */
9406 4 : ASSERT_EQ (model3, model2);
9407 :
9408 : /* "i_23 = i_11 + 1;" */
9409 4 : region_model model4 (model3);
9410 4 : ASSERT_EQ (model4, model2);
9411 4 : model4.set_value (i, build2 (PLUS_EXPR, integer_type_node, i, int_1), &ctxt);
9412 4 : const svalue *plus_one = model4.get_rvalue (i, &ctxt);
9413 4 : ASSERT_EQ (plus_one->get_kind (), SK_BINOP);
9414 :
9415 : /* Try merging with the "i: 1" state. */
9416 4 : region_model model5 (&mgr);
9417 4 : ASSERT_TRUE (model4.can_merge_with_p (model1, point, &model5));
9418 4 : ASSERT_EQ (model5.get_rvalue (i, &ctxt), plus_one);
9419 4 : ASSERT_EQ (model5, model4);
9420 :
9421 : /* "i_11 = PHI();" merge with state at phi above.
9422 : For i, we should have a merger of WIDENING with WIDENING + 1,
9423 : and this should be WIDENING again. */
9424 4 : region_model model6 (&mgr);
9425 4 : ASSERT_TRUE (model5.can_merge_with_p (model2, point, &model6));
9426 4 : const svalue *merged_widening = model6.get_rvalue (i, &ctxt);
9427 4 : ASSERT_EQ (merged_widening->get_kind (), SK_WIDENING);
9428 4 : }
9429 :
9430 : /* Verify that if we mark a pointer to a malloc-ed region as non-NULL,
9431 : all cast pointers to that region are also known to be non-NULL. */
9432 :
9433 : static void
9434 4 : test_malloc_constraints ()
9435 : {
9436 4 : region_model_manager mgr;
9437 4 : region_model model (&mgr);
9438 4 : tree p = build_global_decl ("p", ptr_type_node);
9439 4 : tree char_star = build_pointer_type (char_type_node);
9440 4 : tree q = build_global_decl ("q", char_star);
9441 4 : tree null_ptr = build_int_cst (ptr_type_node, 0);
9442 :
9443 4 : const svalue *size_in_bytes
9444 4 : = mgr.get_or_create_unknown_svalue (size_type_node);
9445 4 : const region *reg
9446 4 : = model.get_or_create_region_for_heap_alloc (size_in_bytes, nullptr);
9447 4 : const svalue *sval = mgr.get_ptr_svalue (ptr_type_node, reg);
9448 4 : model.set_value (model.get_lvalue (p, nullptr), sval, nullptr);
9449 4 : model.set_value (q, p, nullptr);
9450 :
9451 4 : ASSERT_CONDITION_UNKNOWN (model, p, NE_EXPR, null_ptr);
9452 4 : ASSERT_CONDITION_UNKNOWN (model, p, EQ_EXPR, null_ptr);
9453 4 : ASSERT_CONDITION_UNKNOWN (model, q, NE_EXPR, null_ptr);
9454 4 : ASSERT_CONDITION_UNKNOWN (model, q, EQ_EXPR, null_ptr);
9455 :
9456 4 : model.add_constraint (p, NE_EXPR, null_ptr, nullptr);
9457 :
9458 4 : ASSERT_CONDITION_TRUE (model, p, NE_EXPR, null_ptr);
9459 4 : ASSERT_CONDITION_FALSE (model, p, EQ_EXPR, null_ptr);
9460 4 : ASSERT_CONDITION_TRUE (model, q, NE_EXPR, null_ptr);
9461 4 : ASSERT_CONDITION_FALSE (model, q, EQ_EXPR, null_ptr);
9462 4 : }
9463 :
9464 : /* Smoketest of getting and setting the value of a variable. */
9465 :
9466 : static void
9467 4 : test_var ()
9468 : {
9469 : /* "int i;" */
9470 4 : tree i = build_global_decl ("i", integer_type_node);
9471 :
9472 4 : tree int_17 = build_int_cst (integer_type_node, 17);
9473 4 : tree int_m3 = build_int_cst (integer_type_node, -3);
9474 :
9475 4 : region_model_manager mgr;
9476 4 : region_model model (&mgr);
9477 :
9478 4 : const region *i_reg = model.get_lvalue (i, nullptr);
9479 4 : ASSERT_EQ (i_reg->get_kind (), RK_DECL);
9480 :
9481 : /* Reading "i" should give a symbolic "initial value". */
9482 4 : const svalue *sval_init = model.get_rvalue (i, nullptr);
9483 4 : ASSERT_EQ (sval_init->get_kind (), SK_INITIAL);
9484 4 : ASSERT_EQ (sval_init->dyn_cast_initial_svalue ()->get_region (), i_reg);
9485 : /* ..and doing it again should give the same "initial value". */
9486 4 : ASSERT_EQ (model.get_rvalue (i, nullptr), sval_init);
9487 :
9488 : /* "i = 17;". */
9489 4 : model.set_value (i, int_17, nullptr);
9490 4 : ASSERT_EQ (model.get_rvalue (i, nullptr),
9491 : model.get_rvalue (int_17, nullptr));
9492 :
9493 : /* "i = -3;". */
9494 4 : model.set_value (i, int_m3, nullptr);
9495 4 : ASSERT_EQ (model.get_rvalue (i, nullptr),
9496 : model.get_rvalue (int_m3, nullptr));
9497 :
9498 : /* Verify get_offset for "i". */
9499 4 : {
9500 4 : region_offset offset = i_reg->get_offset (&mgr);
9501 4 : ASSERT_EQ (offset.get_base_region (), i_reg);
9502 4 : ASSERT_EQ (offset.get_bit_offset (), 0);
9503 : }
9504 4 : }
9505 :
9506 : static void
9507 4 : test_array_2 ()
9508 : {
9509 : /* "int arr[10];" */
9510 4 : tree tlen = size_int (10);
9511 4 : tree arr_type
9512 4 : = build_array_type (integer_type_node, build_index_type (tlen));
9513 4 : tree arr = build_global_decl ("arr", arr_type);
9514 :
9515 : /* "int i;" */
9516 4 : tree i = build_global_decl ("i", integer_type_node);
9517 :
9518 4 : tree int_0 = integer_zero_node;
9519 4 : tree int_1 = integer_one_node;
9520 :
9521 4 : tree arr_0 = build4 (ARRAY_REF, integer_type_node,
9522 : arr, int_0, NULL_TREE, NULL_TREE);
9523 4 : tree arr_1 = build4 (ARRAY_REF, integer_type_node,
9524 : arr, int_1, NULL_TREE, NULL_TREE);
9525 4 : tree arr_i = build4 (ARRAY_REF, integer_type_node,
9526 : arr, i, NULL_TREE, NULL_TREE);
9527 :
9528 4 : tree int_17 = build_int_cst (integer_type_node, 17);
9529 4 : tree int_42 = build_int_cst (integer_type_node, 42);
9530 4 : tree int_m3 = build_int_cst (integer_type_node, -3);
9531 :
9532 4 : region_model_manager mgr;
9533 4 : region_model model (&mgr);
9534 : /* "arr[0] = 17;". */
9535 4 : model.set_value (arr_0, int_17, nullptr);
9536 : /* "arr[1] = -3;". */
9537 4 : model.set_value (arr_1, int_m3, nullptr);
9538 :
9539 4 : ASSERT_EQ (model.get_rvalue (arr_0, nullptr),
9540 : model.get_rvalue (int_17, nullptr));
9541 4 : ASSERT_EQ (model.get_rvalue (arr_1, nullptr),
9542 : model.get_rvalue (int_m3, nullptr));
9543 :
9544 : /* Overwrite a pre-existing binding: "arr[1] = 42;". */
9545 4 : model.set_value (arr_1, int_42, nullptr);
9546 4 : ASSERT_EQ (model.get_rvalue (arr_1, nullptr),
9547 : model.get_rvalue (int_42, nullptr));
9548 :
9549 : /* Verify get_offset for "arr[0]". */
9550 4 : {
9551 4 : const region *arr_0_reg = model.get_lvalue (arr_0, nullptr);
9552 4 : region_offset offset = arr_0_reg->get_offset (&mgr);
9553 4 : ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, nullptr));
9554 4 : ASSERT_EQ (offset.get_bit_offset (), 0);
9555 : }
9556 :
9557 : /* Verify get_offset for "arr[1]". */
9558 4 : {
9559 4 : const region *arr_1_reg = model.get_lvalue (arr_1, nullptr);
9560 4 : region_offset offset = arr_1_reg->get_offset (&mgr);
9561 4 : ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, nullptr));
9562 4 : ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
9563 : }
9564 :
9565 : /* Verify get_offset for "arr[i]". */
9566 4 : {
9567 4 : const region *arr_i_reg = model.get_lvalue (arr_i, nullptr);
9568 4 : region_offset offset = arr_i_reg->get_offset (&mgr);
9569 4 : ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, nullptr));
9570 4 : const svalue *offset_sval = offset.get_symbolic_byte_offset ();
9571 4 : if (const svalue *cast = offset_sval->maybe_undo_cast ())
9572 4 : offset_sval = cast;
9573 4 : ASSERT_EQ (offset_sval->get_kind (), SK_BINOP);
9574 : }
9575 :
9576 : /* "arr[i] = i;" - this should remove the earlier bindings. */
9577 4 : model.set_value (arr_i, i, nullptr);
9578 4 : ASSERT_EQ (model.get_rvalue (arr_i, nullptr), model.get_rvalue (i, nullptr));
9579 4 : ASSERT_EQ (model.get_rvalue (arr_0, nullptr)->get_kind (), SK_UNKNOWN);
9580 :
9581 : /* "arr[0] = 17;" - this should remove the arr[i] binding. */
9582 4 : model.set_value (arr_0, int_17, nullptr);
9583 4 : ASSERT_EQ (model.get_rvalue (arr_0, nullptr),
9584 : model.get_rvalue (int_17, nullptr));
9585 4 : ASSERT_EQ (model.get_rvalue (arr_i, nullptr)->get_kind (), SK_UNKNOWN);
9586 4 : }
9587 :
9588 : /* Smoketest of dereferencing a pointer via MEM_REF. */
9589 :
9590 : static void
9591 4 : test_mem_ref ()
9592 : {
9593 : /*
9594 : x = 17;
9595 : p = &x;
9596 : *p;
9597 : */
9598 4 : tree x = build_global_decl ("x", integer_type_node);
9599 4 : tree int_star = build_pointer_type (integer_type_node);
9600 4 : tree p = build_global_decl ("p", int_star);
9601 :
9602 4 : tree int_17 = build_int_cst (integer_type_node, 17);
9603 4 : tree addr_of_x = build1 (ADDR_EXPR, int_star, x);
9604 4 : tree ptype = build_pointer_type_for_mode (char_type_node, ptr_mode, true);
9605 4 : tree offset_0 = build_int_cst (ptype, 0);
9606 4 : tree star_p = build2 (MEM_REF, integer_type_node, p, offset_0);
9607 :
9608 4 : region_model_manager mgr;
9609 4 : region_model model (&mgr);
9610 :
9611 : /* "x = 17;". */
9612 4 : model.set_value (x, int_17, nullptr);
9613 :
9614 : /* "p = &x;". */
9615 4 : model.set_value (p, addr_of_x, nullptr);
9616 :
9617 4 : const svalue *sval = model.get_rvalue (star_p, nullptr);
9618 4 : ASSERT_EQ (sval->maybe_get_constant (), int_17);
9619 4 : }
9620 :
9621 : /* Test for a POINTER_PLUS_EXPR followed by a MEM_REF.
9622 : Analogous to this code:
9623 : void test_6 (int a[10])
9624 : {
9625 : __analyzer_eval (a[3] == 42); [should be UNKNOWN]
9626 : a[3] = 42;
9627 : __analyzer_eval (a[3] == 42); [should be TRUE]
9628 : }
9629 : from data-model-1.c, which looks like this at the gimple level:
9630 : # __analyzer_eval (a[3] == 42); [should be UNKNOWN]
9631 : int *_1 = a_10(D) + 12; # POINTER_PLUS_EXPR
9632 : int _2 = *_1; # MEM_REF
9633 : _Bool _3 = _2 == 42;
9634 : int _4 = (int) _3;
9635 : __analyzer_eval (_4);
9636 :
9637 : # a[3] = 42;
9638 : int *_5 = a_10(D) + 12; # POINTER_PLUS_EXPR
9639 : *_5 = 42; # MEM_REF
9640 :
9641 : # __analyzer_eval (a[3] == 42); [should be TRUE]
9642 : int *_6 = a_10(D) + 12; # POINTER_PLUS_EXPR
9643 : int _7 = *_6; # MEM_REF
9644 : _Bool _8 = _7 == 42;
9645 : int _9 = (int) _8;
9646 : __analyzer_eval (_9); */
9647 :
9648 : static void
9649 4 : test_POINTER_PLUS_EXPR_then_MEM_REF ()
9650 : {
9651 4 : tree int_star = build_pointer_type (integer_type_node);
9652 4 : tree a = build_global_decl ("a", int_star);
9653 4 : tree offset_12 = build_int_cst (size_type_node, 12);
9654 4 : tree pointer_plus_expr = build2 (POINTER_PLUS_EXPR, int_star, a, offset_12);
9655 4 : tree ptype = build_pointer_type_for_mode (char_type_node, ptr_mode, true);
9656 4 : tree offset_0 = build_int_cst (ptype, 0);
9657 4 : tree mem_ref = build2 (MEM_REF, integer_type_node,
9658 : pointer_plus_expr, offset_0);
9659 4 : region_model_manager mgr;
9660 4 : region_model m (&mgr);
9661 :
9662 4 : tree int_42 = build_int_cst (integer_type_node, 42);
9663 4 : m.set_value (mem_ref, int_42, nullptr);
9664 4 : ASSERT_EQ (m.get_rvalue (mem_ref, nullptr)->maybe_get_constant (), int_42);
9665 4 : }
9666 :
9667 : /* Verify that malloc works. */
9668 :
9669 : static void
9670 4 : test_malloc ()
9671 : {
9672 4 : tree int_star = build_pointer_type (integer_type_node);
9673 4 : tree p = build_global_decl ("p", int_star);
9674 4 : tree n = build_global_decl ("n", integer_type_node);
9675 4 : tree n_times_4 = build2 (MULT_EXPR, size_type_node,
9676 : n, build_int_cst (size_type_node, 4));
9677 :
9678 4 : region_model_manager mgr;
9679 4 : test_region_model_context ctxt;
9680 4 : region_model model (&mgr);
9681 :
9682 : /* "p = malloc (n * 4);". */
9683 4 : const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
9684 4 : const region *reg
9685 4 : = model.get_or_create_region_for_heap_alloc (size_sval, &ctxt);
9686 4 : const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
9687 4 : model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
9688 4 : ASSERT_EQ (model.get_capacity (reg), size_sval);
9689 4 : }
9690 :
9691 : /* Verify that alloca works. */
9692 :
9693 : static void
9694 4 : test_alloca ()
9695 : {
9696 4 : auto_vec <tree> param_types;
9697 4 : tree fndecl = make_fndecl (integer_type_node,
9698 : "test_fn",
9699 : param_types);
9700 4 : allocate_struct_function (fndecl, true);
9701 :
9702 :
9703 4 : tree int_star = build_pointer_type (integer_type_node);
9704 4 : tree p = build_global_decl ("p", int_star);
9705 4 : tree n = build_global_decl ("n", integer_type_node);
9706 4 : tree n_times_4 = build2 (MULT_EXPR, size_type_node,
9707 : n, build_int_cst (size_type_node, 4));
9708 :
9709 4 : region_model_manager mgr;
9710 4 : test_region_model_context ctxt;
9711 4 : region_model model (&mgr);
9712 :
9713 : /* Push stack frame. */
9714 4 : const region *frame_reg
9715 4 : = model.push_frame (*DECL_STRUCT_FUNCTION (fndecl),
9716 : nullptr, nullptr, &ctxt);
9717 : /* "p = alloca (n * 4);". */
9718 4 : const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
9719 4 : const region *reg = model.create_region_for_alloca (size_sval, &ctxt);
9720 4 : ASSERT_EQ (reg->get_parent_region (), frame_reg);
9721 4 : const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
9722 4 : model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
9723 4 : ASSERT_EQ (model.get_capacity (reg), size_sval);
9724 :
9725 : /* Verify that the pointers to the alloca region are replaced by
9726 : poisoned values when the frame is popped. */
9727 4 : model.pop_frame (nullptr, nullptr, &ctxt, nullptr);
9728 4 : ASSERT_EQ (model.get_rvalue (p, nullptr)->get_kind (), SK_POISONED);
9729 4 : }
9730 :
9731 : /* Verify that svalue::involves_p works. */
9732 :
9733 : static void
9734 4 : test_involves_p ()
9735 : {
9736 4 : region_model_manager mgr;
9737 4 : tree int_star = build_pointer_type (integer_type_node);
9738 4 : tree p = build_global_decl ("p", int_star);
9739 4 : tree q = build_global_decl ("q", int_star);
9740 :
9741 4 : test_region_model_context ctxt;
9742 4 : region_model model (&mgr);
9743 4 : const svalue *p_init = model.get_rvalue (p, &ctxt);
9744 4 : const svalue *q_init = model.get_rvalue (q, &ctxt);
9745 :
9746 4 : ASSERT_TRUE (p_init->involves_p (p_init));
9747 4 : ASSERT_FALSE (p_init->involves_p (q_init));
9748 :
9749 4 : const region *star_p_reg = mgr.get_symbolic_region (p_init);
9750 4 : const region *star_q_reg = mgr.get_symbolic_region (q_init);
9751 :
9752 4 : const svalue *init_star_p = mgr.get_or_create_initial_value (star_p_reg);
9753 4 : const svalue *init_star_q = mgr.get_or_create_initial_value (star_q_reg);
9754 :
9755 4 : ASSERT_TRUE (init_star_p->involves_p (p_init));
9756 4 : ASSERT_FALSE (p_init->involves_p (init_star_p));
9757 4 : ASSERT_FALSE (init_star_p->involves_p (q_init));
9758 4 : ASSERT_TRUE (init_star_q->involves_p (q_init));
9759 4 : ASSERT_FALSE (init_star_q->involves_p (p_init));
9760 4 : }
9761 :
9762 : /* Run all of the selftests within this file. */
9763 :
9764 : void
9765 4 : analyzer_region_model_cc_tests ()
9766 : {
9767 4 : test_tree_cmp_on_constants ();
9768 4 : test_dump ();
9769 4 : test_struct ();
9770 4 : test_array_1 ();
9771 4 : test_get_representative_tree ();
9772 4 : test_unique_constants ();
9773 4 : test_unique_unknowns ();
9774 4 : test_initial_svalue_folding ();
9775 4 : test_unaryop_svalue_folding ();
9776 4 : test_binop_svalue_folding ();
9777 4 : test_sub_svalue_folding ();
9778 4 : test_bits_within_svalue_folding ();
9779 4 : test_descendent_of_p ();
9780 4 : test_bit_range_regions ();
9781 4 : test_assignment ();
9782 4 : test_compound_assignment ();
9783 4 : test_stack_frames ();
9784 4 : test_get_representative_path_var ();
9785 4 : test_equality_1 ();
9786 4 : test_canonicalization_2 ();
9787 4 : test_canonicalization_3 ();
9788 4 : test_canonicalization_4 ();
9789 4 : test_state_merging ();
9790 4 : test_constraint_merging ();
9791 4 : test_widening_constraints ();
9792 4 : test_iteration_1 ();
9793 4 : test_malloc_constraints ();
9794 4 : test_var ();
9795 4 : test_array_2 ();
9796 4 : test_mem_ref ();
9797 4 : test_POINTER_PLUS_EXPR_then_MEM_REF ();
9798 4 : test_malloc ();
9799 4 : test_alloca ();
9800 4 : test_involves_p ();
9801 4 : }
9802 :
9803 : } // namespace selftest
9804 :
9805 : #endif /* CHECKING_P */
9806 :
9807 : } // namespace ana
9808 :
9809 : #endif /* #if ENABLE_ANALYZER */
|