Line data Source code
1 : /* C++-specific tree lowering bits; see also c-gimplify.cc and gimple.cc.
2 :
3 : Copyright (C) 2002-2023 Free Software Foundation, Inc.
4 : Contributed by Jason Merrill <jason@redhat.com>
5 :
6 : This file is part of GCC.
7 :
8 : GCC is free software; you can redistribute it and/or modify it under
9 : the terms of the GNU General Public License as published by the Free
10 : Software Foundation; either version 3, or (at your option) any later
11 : version.
12 :
13 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 : for more details.
17 :
18 : You should have received a copy of the GNU General Public License
19 : along with GCC; see the file COPYING3. If not see
20 : <http://www.gnu.org/licenses/>. */
21 :
22 : #include "config.h"
23 : #include "system.h"
24 : #include "coretypes.h"
25 : #include "target.h"
26 : #include "basic-block.h"
27 : #include "cp-tree.h"
28 : #include "gimple.h"
29 : #include "predict.h"
30 : #include "stor-layout.h"
31 : #include "tree-iterator.h"
32 : #include "gimplify.h"
33 : #include "c-family/c-ubsan.h"
34 : #include "stringpool.h"
35 : #include "attribs.h"
36 : #include "asan.h"
37 : #include "gcc-rich-location.h"
38 : #include "memmodel.h"
39 : #include "tm_p.h"
40 : #include "output.h"
41 : #include "file-prefix-map.h"
42 : #include "cgraph.h"
43 : #include "omp-general.h"
44 : #include "opts.h"
45 :
46 : /* Flags for cp_fold and cp_fold_r. */
47 :
48 : enum fold_flags {
49 : ff_none = 0,
50 : /* Whether we're being called from cp_fold_function. */
51 : ff_genericize = 1 << 0,
52 : /* Whether we're folding a point where we know we're
53 : definitely not in a manifestly constant-evaluated
54 : context. */
55 : ff_mce_false = 1 << 1,
56 : };
57 :
58 : using fold_flags_t = int;
59 :
60 17546632 : struct cp_fold_data
61 : {
62 : hash_set<tree> pset;
63 : fold_flags_t flags;
64 51983743 : cp_fold_data (fold_flags_t flags): flags (flags) {}
65 : };
66 :
67 : /* Forward declarations. */
68 :
69 : static tree cp_genericize_r (tree *, int *, void *);
70 : static tree cp_fold_r (tree *, int *, void *);
71 : static void cp_genericize_tree (tree*, bool);
72 : static tree cp_fold (tree, fold_flags_t);
73 :
74 : /* Genericize a TRY_BLOCK. */
75 :
76 : static void
77 17126 : genericize_try_block (tree *stmt_p)
78 : {
79 17126 : tree body = TRY_STMTS (*stmt_p);
80 17126 : tree cleanup = TRY_HANDLERS (*stmt_p);
81 :
82 17126 : *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
83 17126 : }
84 :
85 : /* Genericize a HANDLER by converting to a CATCH_EXPR. */
86 :
87 : static void
88 20263 : genericize_catch_block (tree *stmt_p)
89 : {
90 20263 : tree type = HANDLER_TYPE (*stmt_p);
91 20263 : tree body = HANDLER_BODY (*stmt_p);
92 :
93 : /* FIXME should the caught type go in TREE_TYPE? */
94 20263 : *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
95 20263 : }
96 :
97 : /* A terser interface for building a representation of an exception
98 : specification. */
99 :
100 : static tree
101 2687 : build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
102 : {
103 2687 : tree t;
104 :
105 : /* FIXME should the allowed types go in TREE_TYPE? */
106 2687 : t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
107 2687 : append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
108 :
109 2687 : t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
110 2687 : append_to_statement_list (body, &TREE_OPERAND (t, 0));
111 :
112 2687 : return t;
113 : }
114 :
115 : /* Genericize an EH_SPEC_BLOCK by converting it to a
116 : TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
117 :
118 : static void
119 2687 : genericize_eh_spec_block (tree *stmt_p)
120 : {
121 2687 : tree body = EH_SPEC_STMTS (*stmt_p);
122 2687 : tree allowed = EH_SPEC_RAISES (*stmt_p);
123 2687 : tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
124 :
125 2687 : *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
126 2687 : suppress_warning (*stmt_p);
127 2687 : suppress_warning (TREE_OPERAND (*stmt_p, 1));
128 2687 : }
129 :
130 : /* Return the first non-compound statement in STMT. */
131 :
132 : tree
133 6497724 : first_stmt (tree stmt)
134 : {
135 10705104 : switch (TREE_CODE (stmt))
136 : {
137 2231089 : case STATEMENT_LIST:
138 2231089 : if (tree_statement_list_node *p = STATEMENT_LIST_HEAD (stmt))
139 1993937 : return first_stmt (p->stmt);
140 237152 : return void_node;
141 :
142 2213443 : case BIND_EXPR:
143 2213443 : return first_stmt (BIND_EXPR_BODY (stmt));
144 :
145 : default:
146 : return stmt;
147 : }
148 : }
149 :
150 : /* Genericize an IF_STMT by turning it into a COND_EXPR. */
151 :
152 : static void
153 8805298 : genericize_if_stmt (tree *stmt_p)
154 : {
155 8805298 : tree stmt, cond, then_, else_;
156 8805298 : location_t locus = EXPR_LOCATION (*stmt_p);
157 :
158 8805298 : stmt = *stmt_p;
159 8805298 : cond = IF_COND (stmt);
160 8805298 : then_ = THEN_CLAUSE (stmt);
161 8805298 : else_ = ELSE_CLAUSE (stmt);
162 :
163 8805298 : if (then_ && else_)
164 : {
165 3248862 : tree ft = first_stmt (then_);
166 3248862 : tree fe = first_stmt (else_);
167 3248862 : br_predictor pr;
168 3248862 : if (TREE_CODE (ft) == PREDICT_EXPR
169 1882 : && TREE_CODE (fe) == PREDICT_EXPR
170 33 : && (pr = PREDICT_EXPR_PREDICTOR (ft)) == PREDICT_EXPR_PREDICTOR (fe)
171 3248888 : && (pr == PRED_HOT_LABEL || pr == PRED_COLD_LABEL))
172 : {
173 1 : gcc_rich_location richloc (EXPR_LOC_OR_LOC (ft, locus));
174 1 : richloc.add_range (EXPR_LOC_OR_LOC (fe, locus));
175 1 : warning_at (&richloc, OPT_Wattributes,
176 : "both branches of %<if%> statement marked as %qs",
177 : pr == PRED_HOT_LABEL ? "likely" : "unlikely");
178 1 : }
179 : }
180 :
181 8805298 : if (!then_)
182 86 : then_ = build_empty_stmt (locus);
183 8805298 : if (!else_)
184 5556378 : else_ = build_empty_stmt (locus);
185 :
186 : /* consteval if has been verified not to have the then_/else_ blocks
187 : entered by gotos/case labels from elsewhere, and as then_ block
188 : can contain unfolded immediate function calls, we have to discard
189 : the then_ block regardless of whether else_ has side-effects or not. */
190 8805298 : if (IF_STMT_CONSTEVAL_P (stmt))
191 : {
192 362 : if (block_may_fallthru (then_))
193 47 : stmt = build3 (COND_EXPR, void_type_node, boolean_false_node,
194 : void_node, else_);
195 : else
196 : stmt = else_;
197 : }
198 8804936 : else if (IF_STMT_CONSTEXPR_P (stmt))
199 479722 : stmt = integer_nonzerop (cond) ? then_ : else_;
200 : /* ??? This optimization doesn't seem to belong here, but removing it
201 : causes -Wreturn-type regressions (e.g. 107310). */
202 8325214 : else if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
203 : stmt = then_;
204 8291944 : else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
205 : stmt = else_;
206 : else
207 8247781 : stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
208 8805298 : protected_set_expr_location_if_unset (stmt, locus);
209 8805298 : *stmt_p = stmt;
210 8805298 : }
211 :
212 : /* Hook into the middle of gimplifying an OMP_FOR node. */
213 :
214 : static enum gimplify_status
215 45780 : cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
216 : {
217 45780 : tree for_stmt = *expr_p;
218 45780 : gimple_seq seq = NULL;
219 :
220 : /* Protect ourselves from recursion. */
221 45780 : if (OMP_FOR_GIMPLIFYING_P (for_stmt))
222 : return GS_UNHANDLED;
223 21362 : OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
224 :
225 21362 : gimplify_and_add (for_stmt, &seq);
226 21362 : gimple_seq_add_seq (pre_p, seq);
227 :
228 21362 : OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
229 :
230 21362 : return GS_ALL_DONE;
231 : }
232 :
233 : /* Gimplify an EXPR_STMT node. */
234 :
235 : static void
236 3158649 : gimplify_expr_stmt (tree *stmt_p)
237 : {
238 3158649 : tree stmt = EXPR_STMT_EXPR (*stmt_p);
239 :
240 3158649 : if (stmt == error_mark_node)
241 : stmt = NULL;
242 :
243 : /* Gimplification of a statement expression will nullify the
244 : statement if all its side effects are moved to *PRE_P and *POST_P.
245 :
246 : In this case we will not want to emit the gimplified statement.
247 : However, we may still want to emit a warning, so we do that before
248 : gimplification. */
249 3155202 : if (stmt && warn_unused_value)
250 : {
251 296295 : if (!TREE_SIDE_EFFECTS (stmt))
252 : {
253 0 : if (!IS_EMPTY_STMT (stmt)
254 7089 : && !VOID_TYPE_P (TREE_TYPE (stmt))
255 7089 : && !warning_suppressed_p (stmt, OPT_Wunused_value))
256 0 : warning (OPT_Wunused_value, "statement with no effect");
257 : }
258 : else
259 289206 : warn_if_unused_value (stmt, input_location);
260 : }
261 :
262 3158649 : if (stmt == NULL_TREE)
263 3447 : stmt = alloc_stmt_list ();
264 :
265 3158649 : *stmt_p = stmt;
266 3158649 : }
267 :
268 : /* Gimplify initialization from an AGGR_INIT_EXPR. */
269 :
270 : static void
271 8269634 : cp_gimplify_init_expr (tree *expr_p)
272 : {
273 8269634 : tree from = TREE_OPERAND (*expr_p, 1);
274 8269634 : tree to = TREE_OPERAND (*expr_p, 0);
275 8269634 : tree t;
276 :
277 8269634 : if (TREE_CODE (from) == TARGET_EXPR)
278 102162 : if (tree init = TARGET_EXPR_INITIAL (from))
279 : {
280 : /* Make sure that we expected to elide this temporary. But also allow
281 : gimplify_modify_expr_rhs to elide temporaries of trivial type. */
282 102162 : gcc_checking_assert (TARGET_EXPR_ELIDING_P (from)
283 : || !TREE_ADDRESSABLE (TREE_TYPE (from)));
284 102162 : if (target_expr_needs_replace (from))
285 : {
286 : /* If this was changed by cp_genericize_target_expr, we need to
287 : walk into it to replace uses of the slot. */
288 58 : replace_decl (&init, TARGET_EXPR_SLOT (from), to);
289 58 : *expr_p = init;
290 58 : return;
291 : }
292 : else
293 : from = init;
294 : }
295 :
296 : /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
297 : inside the TARGET_EXPR. */
298 8316244 : for (t = from; t; )
299 : {
300 8316244 : tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
301 :
302 : /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
303 : replace the slot operand with our target.
304 :
305 : Should we add a target parm to gimplify_expr instead? No, as in this
306 : case we want to replace the INIT_EXPR. */
307 8316244 : if (TREE_CODE (sub) == AGGR_INIT_EXPR
308 8316244 : || TREE_CODE (sub) == VEC_INIT_EXPR)
309 : {
310 66218 : if (TREE_CODE (sub) == AGGR_INIT_EXPR)
311 66218 : AGGR_INIT_EXPR_SLOT (sub) = to;
312 : else
313 0 : VEC_INIT_EXPR_SLOT (sub) = to;
314 66218 : *expr_p = from;
315 :
316 : /* The initialization is now a side-effect, so the container can
317 : become void. */
318 66218 : if (from != sub)
319 90 : TREE_TYPE (from) = void_type_node;
320 : }
321 :
322 : /* Handle aggregate NSDMI. */
323 8316244 : replace_placeholders (sub, to);
324 :
325 8316244 : if (t == sub)
326 : break;
327 : else
328 46668 : t = TREE_OPERAND (t, 1);
329 : }
330 :
331 : }
332 :
333 : /* Gimplify a MUST_NOT_THROW_EXPR. */
334 :
335 : static enum gimplify_status
336 404277 : gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
337 : {
338 404277 : tree stmt = *expr_p;
339 404277 : tree temp = voidify_wrapper_expr (stmt, NULL);
340 404277 : tree body = TREE_OPERAND (stmt, 0);
341 404277 : gimple_seq try_ = NULL;
342 404277 : gimple_seq catch_ = NULL;
343 404277 : gimple *mnt;
344 :
345 404277 : gimplify_and_add (body, &try_);
346 404277 : mnt = gimple_build_eh_must_not_throw (call_terminate_fn);
347 404277 : gimple_seq_add_stmt_without_update (&catch_, mnt);
348 404277 : mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
349 :
350 404277 : gimple_seq_add_stmt_without_update (pre_p, mnt);
351 404277 : if (temp)
352 : {
353 33 : *expr_p = temp;
354 33 : return GS_OK;
355 : }
356 :
357 404244 : *expr_p = NULL;
358 404244 : return GS_ALL_DONE;
359 : }
360 :
361 : /* Return TRUE if an operand (OP) of a given TYPE being copied is
362 : really just an empty class copy.
363 :
364 : Check that the operand has a simple form so that TARGET_EXPRs and
365 : non-empty CONSTRUCTORs get reduced properly, and we leave the
366 : return slot optimization alone because it isn't a copy. */
367 :
368 : bool
369 13046659 : simple_empty_class_p (tree type, tree op, tree_code code)
370 : {
371 15006998 : if (TREE_CODE (op) == COMPOUND_EXPR)
372 98452 : return simple_empty_class_p (type, TREE_OPERAND (op, 1), code);
373 1937929 : if (SIMPLE_TARGET_EXPR_P (op)
374 16774024 : && TYPE_HAS_TRIVIAL_DESTRUCTOR (type))
375 : /* The TARGET_EXPR is itself a simple copy, look through it. */
376 1861887 : return simple_empty_class_p (type, TARGET_EXPR_INITIAL (op), code);
377 :
378 13046659 : if (TREE_CODE (op) == PARM_DECL
379 13046659 : && TREE_ADDRESSABLE (TREE_TYPE (op)))
380 : {
381 3 : tree fn = DECL_CONTEXT (op);
382 3 : if (DECL_THUNK_P (fn)
383 6 : || lambda_static_thunk_p (fn))
384 : /* In a thunk, we pass through invisible reference parms, so this isn't
385 : actually a copy. */
386 3 : return false;
387 : }
388 :
389 13046656 : return
390 13046656 : (TREE_CODE (op) == EMPTY_CLASS_EXPR
391 13046616 : || code == MODIFY_EXPR
392 10748065 : || is_gimple_lvalue (op)
393 8133458 : || INDIRECT_REF_P (op)
394 7880310 : || (TREE_CODE (op) == CONSTRUCTOR
395 1612349 : && CONSTRUCTOR_NELTS (op) == 0)
396 6380931 : || (TREE_CODE (op) == CALL_EXPR
397 1709979 : && !CALL_EXPR_RETURN_SLOT_OPT (op)))
398 8326841 : && !TREE_CLOBBER_P (op)
399 20935018 : && is_really_empty_class (type, /*ignore_vptr*/true);
400 : }
401 :
402 : /* Returns true if evaluating E as an lvalue has side-effects;
403 : specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
404 : have side-effects until there is a read or write through it. */
405 :
406 : static bool
407 1840824 : lvalue_has_side_effects (tree e)
408 : {
409 1840824 : if (!TREE_SIDE_EFFECTS (e))
410 : return false;
411 42299 : while (handled_component_p (e))
412 : {
413 1302 : if (TREE_CODE (e) == ARRAY_REF
414 1302 : && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
415 : return true;
416 1026 : e = TREE_OPERAND (e, 0);
417 : }
418 40997 : if (DECL_P (e))
419 : /* Just naming a variable has no side-effects. */
420 : return false;
421 25408 : else if (INDIRECT_REF_P (e))
422 : /* Similarly, indirection has no side-effects. */
423 25329 : return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
424 : else
425 : /* For anything else, trust TREE_SIDE_EFFECTS. */
426 79 : return TREE_SIDE_EFFECTS (e);
427 : }
428 :
429 : /* Gimplify *EXPR_P as rvalue into an expression that can't be modified
430 : by expressions with side-effects in other operands. */
431 :
432 : static enum gimplify_status
433 20961 : gimplify_to_rvalue (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
434 : bool (*gimple_test_f) (tree))
435 : {
436 20961 : enum gimplify_status t
437 20961 : = gimplify_expr (expr_p, pre_p, post_p, gimple_test_f, fb_rvalue);
438 20961 : if (t == GS_ERROR)
439 : return GS_ERROR;
440 20961 : else if (is_gimple_variable (*expr_p) && TREE_CODE (*expr_p) != SSA_NAME)
441 1944 : *expr_p = get_initialized_tmp_var (*expr_p, pre_p);
442 : return t;
443 : }
444 :
445 : /* Like gimplify_arg, but if ORDERED is set (which should be set if
446 : any of the arguments this argument is sequenced before has
447 : TREE_SIDE_EFFECTS set, make sure expressions with is_gimple_reg_type type
448 : are gimplified into SSA_NAME or a fresh temporary and for
449 : non-is_gimple_reg_type we don't optimize away TARGET_EXPRs. */
450 :
451 : static enum gimplify_status
452 2579024 : cp_gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
453 : bool ordered)
454 : {
455 2579024 : enum gimplify_status t;
456 2579024 : if (ordered
457 227946 : && !is_gimple_reg_type (TREE_TYPE (*arg_p))
458 2580181 : && TREE_CODE (*arg_p) == TARGET_EXPR)
459 : {
460 : /* gimplify_arg would strip away the TARGET_EXPR, but
461 : that can mean we don't copy the argument and some following
462 : argument with side-effect could modify it. */
463 1029 : protected_set_expr_location (*arg_p, call_location);
464 1029 : return gimplify_expr (arg_p, pre_p, NULL, is_gimple_lvalue, fb_either);
465 : }
466 : else
467 : {
468 2577995 : t = gimplify_arg (arg_p, pre_p, call_location);
469 2577995 : if (t == GS_ERROR)
470 : return GS_ERROR;
471 2577995 : else if (ordered
472 226917 : && is_gimple_reg_type (TREE_TYPE (*arg_p))
473 226789 : && is_gimple_variable (*arg_p)
474 135061 : && TREE_CODE (*arg_p) != SSA_NAME
475 : /* No need to force references into register, references
476 : can't be modified. */
477 77647 : && !TYPE_REF_P (TREE_TYPE (*arg_p))
478 : /* And this can't be modified either. */
479 2635067 : && *arg_p != current_class_ptr)
480 8634 : *arg_p = get_initialized_tmp_var (*arg_p, pre_p);
481 2577995 : return t;
482 : }
483 :
484 : }
485 :
486 : /* Do C++-specific gimplification. Args are as for gimplify_expr. */
487 :
488 : int
489 123507235 : cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
490 : {
491 123507235 : int saved_stmts_are_full_exprs_p = 0;
492 123507235 : location_t loc = cp_expr_loc_or_input_loc (*expr_p);
493 123507235 : enum tree_code code = TREE_CODE (*expr_p);
494 123507235 : enum gimplify_status ret;
495 :
496 123507235 : if (STATEMENT_CODE_P (code))
497 : {
498 3198725 : saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
499 6397450 : current_stmt_tree ()->stmts_are_full_exprs_p
500 3198725 : = STMT_IS_FULL_EXPR_P (*expr_p);
501 : }
502 :
503 123507235 : switch (code)
504 : {
505 208569 : case AGGR_INIT_EXPR:
506 208569 : simplify_aggr_init_expr (expr_p);
507 208569 : ret = GS_OK;
508 208569 : break;
509 :
510 0 : case VEC_INIT_EXPR:
511 0 : {
512 0 : *expr_p = expand_vec_init_expr (NULL_TREE, *expr_p,
513 : tf_warning_or_error);
514 :
515 0 : cp_fold_data data (ff_genericize | ff_mce_false);
516 0 : cp_walk_tree (expr_p, cp_fold_r, &data, NULL);
517 0 : cp_genericize_tree (expr_p, false);
518 0 : copy_if_shared (expr_p);
519 0 : ret = GS_OK;
520 0 : }
521 0 : break;
522 :
523 18348 : case THROW_EXPR:
524 : /* FIXME communicate throw type to back end, probably by moving
525 : THROW_EXPR into ../tree.def. */
526 18348 : *expr_p = TREE_OPERAND (*expr_p, 0);
527 18348 : ret = GS_OK;
528 18348 : break;
529 :
530 404277 : case MUST_NOT_THROW_EXPR:
531 404277 : ret = gimplify_must_not_throw_expr (expr_p, pre_p);
532 404277 : break;
533 :
534 : /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
535 : LHS of an assignment might also be involved in the RHS, as in bug
536 : 25979. */
537 8269634 : case INIT_EXPR:
538 8269634 : cp_gimplify_init_expr (expr_p);
539 8269634 : if (TREE_CODE (*expr_p) != INIT_EXPR)
540 : return GS_OK;
541 : /* Fall through. */
542 11250621 : case MODIFY_EXPR:
543 8203358 : modify_expr_case:
544 11250621 : {
545 : /* If the back end isn't clever enough to know that the lhs and rhs
546 : types are the same, add an explicit conversion. */
547 11250621 : tree op0 = TREE_OPERAND (*expr_p, 0);
548 11250621 : tree op1 = TREE_OPERAND (*expr_p, 1);
549 :
550 11250621 : if (!error_operand_p (op0)
551 11250621 : && !error_operand_p (op1)
552 11250612 : && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
553 11248215 : || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
554 11253022 : && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
555 12 : TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
556 12 : TREE_TYPE (op0), op1);
557 :
558 11250609 : else if (simple_empty_class_p (TREE_TYPE (op0), op1, code))
559 : {
560 89839 : while (TREE_CODE (op1) == TARGET_EXPR)
561 : /* We're disconnecting the initializer from its target,
562 : don't create a temporary. */
563 5541 : op1 = TARGET_EXPR_INITIAL (op1);
564 :
565 : /* Remove any copies of empty classes. Also drop volatile
566 : variables on the RHS to avoid infinite recursion from
567 : gimplify_expr trying to load the value. */
568 84298 : if (TREE_SIDE_EFFECTS (op1))
569 : {
570 9921 : if (TREE_THIS_VOLATILE (op1)
571 0 : && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
572 0 : op1 = build_fold_addr_expr (op1);
573 :
574 9921 : gimplify_and_add (op1, pre_p);
575 : }
576 84298 : gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
577 : is_gimple_lvalue, fb_lvalue);
578 84298 : *expr_p = TREE_OPERAND (*expr_p, 0);
579 84298 : if (code == RETURN_EXPR && REFERENCE_CLASS_P (*expr_p))
580 : /* Avoid 'return *<retval>;' */
581 3 : *expr_p = TREE_OPERAND (*expr_p, 0);
582 : }
583 : /* P0145 says that the RHS is sequenced before the LHS.
584 : gimplify_modify_expr gimplifies the RHS before the LHS, but that
585 : isn't quite strong enough in two cases:
586 :
587 : 1) gimplify.cc wants to leave a CALL_EXPR on the RHS, which would
588 : mean it's evaluated after the LHS.
589 :
590 : 2) the value calculation of the RHS is also sequenced before the
591 : LHS, so for scalar assignment we need to preevaluate if the
592 : RHS could be affected by LHS side-effects even if it has no
593 : side-effects of its own. We don't need this for classes because
594 : class assignment takes its RHS by reference. */
595 11166311 : else if (flag_strong_eval_order > 1
596 9086926 : && TREE_CODE (*expr_p) == MODIFY_EXPR
597 1840824 : && lvalue_has_side_effects (op0)
598 11191906 : && (TREE_CODE (op1) == CALL_EXPR
599 19395 : || (SCALAR_TYPE_P (TREE_TYPE (op1))
600 15126 : && !TREE_CONSTANT (op1))))
601 13476 : TREE_OPERAND (*expr_p, 1) = get_initialized_tmp_var (op1, pre_p);
602 : }
603 : ret = GS_OK;
604 : break;
605 :
606 49568 : case EMPTY_CLASS_EXPR:
607 : /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
608 49568 : *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
609 49568 : ret = GS_OK;
610 49568 : break;
611 :
612 0 : case BASELINK:
613 0 : *expr_p = BASELINK_FUNCTIONS (*expr_p);
614 0 : ret = GS_OK;
615 0 : break;
616 :
617 17126 : case TRY_BLOCK:
618 17126 : genericize_try_block (expr_p);
619 17126 : ret = GS_OK;
620 17126 : break;
621 :
622 20263 : case HANDLER:
623 20263 : genericize_catch_block (expr_p);
624 20263 : ret = GS_OK;
625 20263 : break;
626 :
627 2687 : case EH_SPEC_BLOCK:
628 2687 : genericize_eh_spec_block (expr_p);
629 2687 : ret = GS_OK;
630 2687 : break;
631 :
632 0 : case USING_STMT:
633 0 : gcc_unreachable ();
634 :
635 0 : case FOR_STMT:
636 0 : case WHILE_STMT:
637 0 : case DO_STMT:
638 0 : case SWITCH_STMT:
639 0 : case CONTINUE_STMT:
640 0 : case BREAK_STMT:
641 0 : gcc_unreachable ();
642 :
643 45780 : case OMP_FOR:
644 45780 : case OMP_SIMD:
645 45780 : case OMP_DISTRIBUTE:
646 45780 : case OMP_LOOP:
647 45780 : case OMP_TASKLOOP:
648 45780 : ret = cp_gimplify_omp_for (expr_p, pre_p);
649 45780 : break;
650 :
651 3158649 : case EXPR_STMT:
652 3158649 : gimplify_expr_stmt (expr_p);
653 3158649 : ret = GS_OK;
654 3158649 : break;
655 :
656 0 : case UNARY_PLUS_EXPR:
657 0 : {
658 0 : tree arg = TREE_OPERAND (*expr_p, 0);
659 0 : tree type = TREE_TYPE (*expr_p);
660 0 : *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
661 : : arg;
662 0 : ret = GS_OK;
663 : }
664 0 : break;
665 :
666 5594450 : case CALL_EXPR:
667 5594450 : ret = GS_OK;
668 5594450 : if (flag_strong_eval_order == 2
669 4670211 : && CALL_EXPR_FN (*expr_p)
670 4511168 : && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p)
671 9724799 : && cp_get_callee_fndecl_nofold (*expr_p) == NULL_TREE)
672 : {
673 20961 : tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
674 20961 : enum gimplify_status t
675 20961 : = gimplify_to_rvalue (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
676 : is_gimple_call_addr);
677 20961 : if (t == GS_ERROR)
678 : ret = GS_ERROR;
679 : /* GIMPLE considers most pointer conversion useless, but for
680 : calls we actually care about the exact function pointer type. */
681 20961 : else if (TREE_TYPE (CALL_EXPR_FN (*expr_p)) != fnptrtype)
682 1562 : CALL_EXPR_FN (*expr_p)
683 3124 : = build1 (NOP_EXPR, fnptrtype, CALL_EXPR_FN (*expr_p));
684 : }
685 5594450 : if (!CALL_EXPR_FN (*expr_p))
686 : /* Internal function call. */;
687 5409297 : else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
688 : {
689 : /* This is a call to a (compound) assignment operator that used
690 : the operator syntax; gimplify the RHS first. */
691 23532 : gcc_assert (call_expr_nargs (*expr_p) == 2);
692 23532 : gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
693 23532 : enum gimplify_status t
694 23532 : = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc,
695 23532 : TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, 0)));
696 23532 : if (t == GS_ERROR)
697 : ret = GS_ERROR;
698 : }
699 5385765 : else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
700 : {
701 : /* Leave the last argument for gimplify_call_expr, to avoid problems
702 : with __builtin_va_arg_pack(). */
703 100138 : int nargs = call_expr_nargs (*expr_p) - 1;
704 100138 : int last_side_effects_arg = -1;
705 195274 : for (int i = nargs; i > 0; --i)
706 104430 : if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i)))
707 : {
708 : last_side_effects_arg = i;
709 : break;
710 : }
711 210223 : for (int i = 0; i < nargs; ++i)
712 : {
713 110085 : enum gimplify_status t
714 110085 : = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc,
715 : i < last_side_effects_arg);
716 110085 : if (t == GS_ERROR)
717 0 : ret = GS_ERROR;
718 : }
719 : }
720 5285627 : else if (flag_strong_eval_order
721 5285627 : && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
722 : {
723 : /* If flag_strong_eval_order, evaluate the object argument first. */
724 4977022 : tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
725 4977022 : if (INDIRECT_TYPE_P (fntype))
726 4977022 : fntype = TREE_TYPE (fntype);
727 4977022 : if (TREE_CODE (fntype) == METHOD_TYPE)
728 : {
729 2445407 : int nargs = call_expr_nargs (*expr_p);
730 2445407 : bool side_effects = false;
731 3334423 : for (int i = 1; i < nargs; ++i)
732 1099199 : if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i)))
733 : {
734 : side_effects = true;
735 : break;
736 : }
737 2445407 : enum gimplify_status t
738 2445407 : = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc,
739 : side_effects);
740 2445407 : if (t == GS_ERROR)
741 : ret = GS_ERROR;
742 : }
743 : }
744 5594450 : if (ret != GS_ERROR)
745 : {
746 5594450 : tree decl = cp_get_callee_fndecl_nofold (*expr_p);
747 5594450 : if (decl && fndecl_built_in_p (decl, BUILT_IN_FRONTEND))
748 0 : switch (DECL_FE_FUNCTION_CODE (decl))
749 : {
750 0 : case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
751 0 : *expr_p = boolean_false_node;
752 0 : break;
753 0 : case CP_BUILT_IN_SOURCE_LOCATION:
754 0 : *expr_p
755 0 : = fold_builtin_source_location (*expr_p);
756 0 : break;
757 0 : case CP_BUILT_IN_IS_CORRESPONDING_MEMBER:
758 0 : *expr_p
759 0 : = fold_builtin_is_corresponding_member
760 0 : (EXPR_LOCATION (*expr_p), call_expr_nargs (*expr_p),
761 : &CALL_EXPR_ARG (*expr_p, 0));
762 0 : break;
763 0 : case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS:
764 0 : *expr_p
765 0 : = fold_builtin_is_pointer_inverconvertible_with_class
766 0 : (EXPR_LOCATION (*expr_p), call_expr_nargs (*expr_p),
767 : &CALL_EXPR_ARG (*expr_p, 0));
768 0 : break;
769 : default:
770 : break;
771 : }
772 : }
773 : break;
774 :
775 328795 : case TARGET_EXPR:
776 : /* A TARGET_EXPR that expresses direct-initialization should have been
777 : elided by cp_gimplify_init_expr. */
778 328795 : gcc_checking_assert (!TARGET_EXPR_DIRECT_INIT_P (*expr_p));
779 : /* Likewise, but allow extra temps of trivial type so that
780 : gimplify_init_ctor_preeval can materialize subobjects of a CONSTRUCTOR
781 : on the rhs of an assignment, as in constexpr-aggr1.C. */
782 328795 : gcc_checking_assert (!TARGET_EXPR_ELIDING_P (*expr_p)
783 : || !TREE_ADDRESSABLE (TREE_TYPE (*expr_p)));
784 : ret = GS_UNHANDLED;
785 : break;
786 :
787 2 : case PTRMEM_CST:
788 2 : *expr_p = cplus_expand_constant (*expr_p);
789 2 : if (TREE_CODE (*expr_p) == PTRMEM_CST)
790 : ret = GS_ERROR;
791 : else
792 14725833 : ret = GS_OK;
793 : break;
794 :
795 788177 : case RETURN_EXPR:
796 788177 : if (TREE_OPERAND (*expr_p, 0)
797 788177 : && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
798 8471 : || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
799 : {
800 748712 : expr_p = &TREE_OPERAND (*expr_p, 0);
801 : /* Avoid going through the INIT_EXPR case, which can
802 : degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
803 748712 : goto modify_expr_case;
804 : }
805 : /* Fall through. */
806 :
807 102341824 : default:
808 102341824 : ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
809 102341824 : break;
810 : }
811 :
812 : /* Restore saved state. */
813 123440959 : if (STATEMENT_CODE_P (code))
814 3198725 : current_stmt_tree ()->stmts_are_full_exprs_p
815 3198725 : = saved_stmts_are_full_exprs_p;
816 :
817 : return ret;
818 : }
819 :
820 : static inline bool
821 972429642 : is_invisiref_parm (const_tree t)
822 : {
823 972429642 : return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
824 972429642 : && DECL_BY_REFERENCE (t));
825 : }
826 :
827 : /* A stable comparison routine for use with splay trees and DECLs. */
828 :
829 : static int
830 59481 : splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
831 : {
832 59481 : tree a = (tree) xa;
833 59481 : tree b = (tree) xb;
834 :
835 59481 : return DECL_UID (a) - DECL_UID (b);
836 : }
837 :
838 : /* OpenMP context during genericization. */
839 :
840 : struct cp_genericize_omp_taskreg
841 : {
842 : bool is_parallel;
843 : bool default_shared;
844 : struct cp_genericize_omp_taskreg *outer;
845 : splay_tree variables;
846 : };
847 :
848 : /* Return true if genericization should try to determine if
849 : DECL is firstprivate or shared within task regions. */
850 :
851 : static bool
852 116510 : omp_var_to_track (tree decl)
853 : {
854 116510 : tree type = TREE_TYPE (decl);
855 116510 : if (is_invisiref_parm (decl))
856 554 : type = TREE_TYPE (type);
857 115956 : else if (TYPE_REF_P (type))
858 5274 : type = TREE_TYPE (type);
859 141593 : while (TREE_CODE (type) == ARRAY_TYPE)
860 25083 : type = TREE_TYPE (type);
861 116510 : if (type == error_mark_node || !CLASS_TYPE_P (type))
862 : return false;
863 13209 : if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
864 : return false;
865 13206 : if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
866 : return false;
867 : return true;
868 : }
869 :
870 : /* Note DECL use in OpenMP region OMP_CTX during genericization. */
871 :
872 : static void
873 13435 : omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
874 : {
875 13435 : splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
876 : (splay_tree_key) decl);
877 13435 : if (n == NULL)
878 : {
879 4247 : int flags = OMP_CLAUSE_DEFAULT_SHARED;
880 4247 : if (omp_ctx->outer)
881 1259 : omp_cxx_notice_variable (omp_ctx->outer, decl);
882 4247 : if (!omp_ctx->default_shared)
883 : {
884 957 : struct cp_genericize_omp_taskreg *octx;
885 :
886 1074 : for (octx = omp_ctx->outer; octx; octx = octx->outer)
887 : {
888 904 : n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
889 904 : if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
890 : {
891 : flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
892 : break;
893 : }
894 861 : if (octx->is_parallel)
895 : break;
896 : }
897 957 : if (octx == NULL
898 957 : && (TREE_CODE (decl) == PARM_DECL
899 124 : || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
900 48 : && DECL_CONTEXT (decl) == current_function_decl)))
901 : flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
902 863 : if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
903 : {
904 : /* DECL is implicitly determined firstprivate in
905 : the current task construct. Ensure copy ctor and
906 : dtor are instantiated, because during gimplification
907 : it will be already too late. */
908 137 : tree type = TREE_TYPE (decl);
909 137 : if (is_invisiref_parm (decl))
910 2 : type = TREE_TYPE (type);
911 135 : else if (TYPE_REF_P (type))
912 59 : type = TREE_TYPE (type);
913 187 : while (TREE_CODE (type) == ARRAY_TYPE)
914 50 : type = TREE_TYPE (type);
915 137 : get_copy_ctor (type, tf_none);
916 137 : get_dtor (type, tf_none);
917 : }
918 : }
919 4247 : splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
920 : }
921 13435 : }
922 :
923 : /* True if any of the element initializers in CTOR are TARGET_EXPRs that are
924 : not expected to elide, e.g. because unsafe_copy_elision_p is true. */
925 :
926 : static bool
927 19290 : any_non_eliding_target_exprs (tree ctor)
928 : {
929 43828 : for (const constructor_elt &e : *CONSTRUCTOR_ELTS (ctor))
930 : {
931 24540 : if (TREE_CODE (e.value) == TARGET_EXPR
932 24540 : && !TARGET_EXPR_ELIDING_P (e.value))
933 : return true;
934 : }
935 : return false;
936 : }
937 :
938 : /* If we might need to clean up a partially constructed object, break down the
939 : CONSTRUCTOR with split_nonconstant_init. Also expand VEC_INIT_EXPR at this
940 : point. If initializing TO with FROM is non-trivial, overwrite *REPLACE with
941 : the result. */
942 :
943 : static void
944 36963683 : cp_genericize_init (tree *replace, tree from, tree to)
945 : {
946 36963683 : tree init = NULL_TREE;
947 36963683 : if (TREE_CODE (from) == VEC_INIT_EXPR)
948 957 : init = expand_vec_init_expr (to, from, tf_warning_or_error);
949 36962726 : else if (TREE_CODE (from) == CONSTRUCTOR
950 1351553 : && TREE_SIDE_EFFECTS (from)
951 36982298 : && ((flag_exceptions
952 19537 : && TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TREE_TYPE (from)))
953 19290 : || any_non_eliding_target_exprs (from)))
954 : {
955 284 : to = cp_stabilize_reference (to);
956 284 : replace_placeholders (from, to);
957 284 : init = split_nonconstant_init (to, from);
958 : }
959 :
960 1241 : if (init)
961 : {
962 1241 : if (*replace == from)
963 : /* Make cp_gimplify_init_expr call replace_decl on this
964 : TARGET_EXPR_INITIAL. */
965 290 : init = fold_convert (void_type_node, init);
966 1241 : *replace = init;
967 : }
968 36963683 : }
969 :
970 : /* For an INIT_EXPR, replace the INIT_EXPR itself. */
971 :
972 : static void
973 30068385 : cp_genericize_init_expr (tree *stmt_p)
974 : {
975 30068385 : iloc_sentinel ils = EXPR_LOCATION (*stmt_p);
976 30068385 : tree to = TREE_OPERAND (*stmt_p, 0);
977 30068385 : tree from = TREE_OPERAND (*stmt_p, 1);
978 2937619 : if (SIMPLE_TARGET_EXPR_P (from)
979 : /* Return gets confused if we clobber its INIT_EXPR this soon. */
980 32036631 : && TREE_CODE (to) != RESULT_DECL)
981 10441 : from = TARGET_EXPR_INITIAL (from);
982 30068385 : cp_genericize_init (stmt_p, from, to);
983 30068385 : }
984 :
985 : /* For a TARGET_EXPR, change the TARGET_EXPR_INITIAL. We will need to use
986 : replace_decl later when we know what we're initializing. */
987 :
988 : static void
989 6895298 : cp_genericize_target_expr (tree *stmt_p)
990 : {
991 6895298 : iloc_sentinel ils = EXPR_LOCATION (*stmt_p);
992 6895298 : tree slot = TARGET_EXPR_SLOT (*stmt_p);
993 6895298 : cp_genericize_init (&TARGET_EXPR_INITIAL (*stmt_p),
994 6895298 : TARGET_EXPR_INITIAL (*stmt_p), slot);
995 6895298 : gcc_assert (!DECL_INITIAL (slot));
996 6895298 : }
997 :
998 : /* Similar to if (target_expr_needs_replace) replace_decl, but TP is the
999 : TARGET_EXPR_INITIAL, and this also updates *_SLOT. We need this extra
1000 : replacement when cp_folding TARGET_EXPR to preserve the invariant that
1001 : AGGR_INIT_EXPR_SLOT agrees with the enclosing TARGET_EXPR_SLOT. */
1002 :
1003 : bool
1004 120 : maybe_replace_decl (tree *tp, tree decl, tree replacement)
1005 : {
1006 120 : if (!*tp || !VOID_TYPE_P (TREE_TYPE (*tp)))
1007 : return false;
1008 : tree t = *tp;
1009 52 : while (TREE_CODE (t) == COMPOUND_EXPR)
1010 0 : t = TREE_OPERAND (t, 1);
1011 52 : if (TREE_CODE (t) == AGGR_INIT_EXPR)
1012 52 : replace_decl (&AGGR_INIT_EXPR_SLOT (t), decl, replacement);
1013 0 : else if (TREE_CODE (t) == VEC_INIT_EXPR)
1014 0 : replace_decl (&VEC_INIT_EXPR_SLOT (t), decl, replacement);
1015 : else
1016 0 : replace_decl (tp, decl, replacement);
1017 : return true;
1018 : }
1019 :
1020 : /* Genericization context. */
1021 :
1022 25643392 : struct cp_genericize_data
1023 : {
1024 : hash_set<tree> *p_set;
1025 : auto_vec<tree> bind_expr_stack;
1026 : struct cp_genericize_omp_taskreg *omp_ctx;
1027 : tree try_block;
1028 : bool no_sanitize_p;
1029 : bool handle_invisiref_parm_p;
1030 : };
1031 :
1032 : /* Perform any pre-gimplification folding of C++ front end trees to
1033 : GENERIC.
1034 : Note: The folding of non-omp cases is something to move into
1035 : the middle-end. As for now we have most foldings only on GENERIC
1036 : in fold-const, we need to perform this before transformation to
1037 : GIMPLE-form. */
1038 :
1039 : static tree
1040 1102308099 : cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
1041 : {
1042 1102308099 : cp_fold_data *data = (cp_fold_data*)data_;
1043 1102308099 : tree stmt = *stmt_p;
1044 1102308099 : enum tree_code code = TREE_CODE (stmt);
1045 :
1046 1102308099 : switch (code)
1047 : {
1048 25954 : case PTRMEM_CST:
1049 25954 : if (TREE_CODE (PTRMEM_CST_MEMBER (stmt)) == FUNCTION_DECL
1050 25954 : && DECL_IMMEDIATE_FUNCTION_P (PTRMEM_CST_MEMBER (stmt)))
1051 : {
1052 3 : if (!data->pset.add (stmt))
1053 6 : error_at (PTRMEM_CST_LOCATION (stmt),
1054 : "taking address of an immediate function %qD",
1055 3 : PTRMEM_CST_MEMBER (stmt));
1056 3 : stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
1057 3 : break;
1058 : }
1059 : break;
1060 :
1061 74556750 : case ADDR_EXPR:
1062 74556750 : if (TREE_CODE (TREE_OPERAND (stmt, 0)) == FUNCTION_DECL
1063 125211181 : && DECL_IMMEDIATE_FUNCTION_P (TREE_OPERAND (stmt, 0)))
1064 : {
1065 6 : error_at (EXPR_LOCATION (stmt),
1066 : "taking address of an immediate function %qD",
1067 6 : TREE_OPERAND (stmt, 0));
1068 6 : stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
1069 6 : break;
1070 : }
1071 : break;
1072 :
1073 : default:
1074 : break;
1075 : }
1076 :
1077 1102308099 : *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
1078 :
1079 1102308099 : if (data->pset.add (stmt))
1080 : {
1081 : /* Don't walk subtrees of stmts we've already walked once, otherwise
1082 : we can have exponential complexity with e.g. lots of nested
1083 : SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1084 : always the same tree, which the first time cp_fold_r has been
1085 : called on it had the subtrees walked. */
1086 167439757 : *walk_subtrees = 0;
1087 167439757 : return NULL;
1088 : }
1089 :
1090 934868342 : code = TREE_CODE (stmt);
1091 934868342 : switch (code)
1092 : {
1093 26217 : tree x;
1094 26217 : int i, n;
1095 26217 : case OMP_FOR:
1096 26217 : case OMP_SIMD:
1097 26217 : case OMP_DISTRIBUTE:
1098 26217 : case OMP_LOOP:
1099 26217 : case OMP_TASKLOOP:
1100 26217 : case OACC_LOOP:
1101 26217 : cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1102 26217 : cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1103 26217 : cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1104 26217 : x = OMP_FOR_COND (stmt);
1105 26217 : if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1106 : {
1107 0 : cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1108 0 : cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1109 : }
1110 18956 : else if (x && TREE_CODE (x) == TREE_VEC)
1111 : {
1112 18956 : n = TREE_VEC_LENGTH (x);
1113 43119 : for (i = 0; i < n; i++)
1114 : {
1115 24163 : tree o = TREE_VEC_ELT (x, i);
1116 24163 : if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1117 24163 : cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1118 : }
1119 : }
1120 26217 : x = OMP_FOR_INCR (stmt);
1121 26217 : if (x && TREE_CODE (x) == TREE_VEC)
1122 : {
1123 18956 : n = TREE_VEC_LENGTH (x);
1124 43119 : for (i = 0; i < n; i++)
1125 : {
1126 24163 : tree o = TREE_VEC_ELT (x, i);
1127 24163 : if (o && TREE_CODE (o) == MODIFY_EXPR)
1128 6013 : o = TREE_OPERAND (o, 1);
1129 24163 : if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1130 19517 : || TREE_CODE (o) == POINTER_PLUS_EXPR))
1131 : {
1132 6013 : cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1133 6013 : cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1134 : }
1135 : }
1136 : }
1137 26217 : cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1138 26217 : *walk_subtrees = 0;
1139 26217 : return NULL;
1140 :
1141 8804630 : case IF_STMT:
1142 8804630 : if (IF_STMT_CONSTEVAL_P (stmt))
1143 : {
1144 : /* Don't walk THEN_CLAUSE (stmt) for consteval if. IF_COND is always
1145 : boolean_false_node. */
1146 362 : cp_walk_tree (&ELSE_CLAUSE (stmt), cp_fold_r, data, NULL);
1147 362 : cp_walk_tree (&IF_SCOPE (stmt), cp_fold_r, data, NULL);
1148 362 : *walk_subtrees = 0;
1149 362 : return NULL;
1150 : }
1151 : break;
1152 :
1153 : /* cp_genericize_{init,target}_expr are only for genericize time; they're
1154 : here rather than in cp_genericize to avoid problems with the invisible
1155 : reference transition. */
1156 30073068 : case INIT_EXPR:
1157 30073068 : if (data->flags & ff_genericize)
1158 30068385 : cp_genericize_init_expr (stmt_p);
1159 : break;
1160 :
1161 7156206 : case TARGET_EXPR:
1162 7156206 : if (data->flags & ff_genericize)
1163 6895298 : cp_genericize_target_expr (stmt_p);
1164 :
1165 : /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
1166 : that case, strip it in favor of this one. */
1167 7156206 : if (tree &init = TARGET_EXPR_INITIAL (stmt))
1168 : {
1169 7156206 : cp_walk_tree (&init, cp_fold_r, data, NULL);
1170 7156206 : cp_walk_tree (&TARGET_EXPR_CLEANUP (stmt), cp_fold_r, data, NULL);
1171 7156206 : *walk_subtrees = 0;
1172 7156206 : if (TREE_CODE (init) == TARGET_EXPR)
1173 : {
1174 120 : tree sub = TARGET_EXPR_INITIAL (init);
1175 120 : maybe_replace_decl (&sub, TARGET_EXPR_SLOT (init),
1176 120 : TARGET_EXPR_SLOT (stmt));
1177 120 : init = sub;
1178 : }
1179 : }
1180 : break;
1181 :
1182 : default:
1183 : break;
1184 : }
1185 :
1186 : return NULL;
1187 : }
1188 :
1189 : /* Fold ALL the trees! FIXME we should be able to remove this, but
1190 : apparently that still causes optimization regressions. */
1191 :
1192 : void
1193 34437111 : cp_fold_function (tree fndecl)
1194 : {
1195 : /* By now all manifestly-constant-evaluated expressions will have
1196 : been constant-evaluated already if possible, so we can safely
1197 : pass ff_mce_false. */
1198 34437111 : cp_fold_data data (ff_genericize | ff_mce_false);
1199 34437111 : cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
1200 34437111 : }
1201 :
1202 : /* Turn SPACESHIP_EXPR EXPR into GENERIC. */
1203 :
1204 6808 : static tree genericize_spaceship (tree expr)
1205 : {
1206 6808 : iloc_sentinel s (cp_expr_location (expr));
1207 6808 : tree type = TREE_TYPE (expr);
1208 6808 : tree op0 = TREE_OPERAND (expr, 0);
1209 6808 : tree op1 = TREE_OPERAND (expr, 1);
1210 6808 : return genericize_spaceship (input_location, type, op0, op1);
1211 6808 : }
1212 :
1213 : /* If EXPR involves an anonymous VLA type, prepend a DECL_EXPR for that type
1214 : to trigger gimplify_type_sizes; otherwise a cast to pointer-to-VLA confuses
1215 : the middle-end (c++/88256). If EXPR is a DECL, use add_stmt and return
1216 : NULL_TREE; otherwise return a COMPOUND_STMT of the DECL_EXPR and EXPR. */
1217 :
1218 : tree
1219 65323724 : predeclare_vla (tree expr)
1220 : {
1221 65323724 : tree type = TREE_TYPE (expr);
1222 65323724 : if (type == error_mark_node)
1223 : return expr;
1224 65323702 : if (is_typedef_decl (expr))
1225 65323702 : type = DECL_ORIGINAL_TYPE (expr);
1226 :
1227 : /* We need to strip pointers for gimplify_type_sizes. */
1228 65323702 : tree vla = type;
1229 96974570 : while (POINTER_TYPE_P (vla))
1230 : {
1231 32778648 : if (TYPE_NAME (vla))
1232 : return expr;
1233 31650868 : vla = TREE_TYPE (vla);
1234 : }
1235 30694063 : if (vla == type || TYPE_NAME (vla)
1236 64593655 : || !variably_modified_type_p (vla, NULL_TREE))
1237 64195758 : return expr;
1238 :
1239 164 : tree decl = build_decl (input_location, TYPE_DECL, NULL_TREE, vla);
1240 164 : DECL_ARTIFICIAL (decl) = 1;
1241 164 : TYPE_NAME (vla) = decl;
1242 164 : tree dexp = build_stmt (input_location, DECL_EXPR, decl);
1243 164 : if (DECL_P (expr))
1244 : {
1245 6 : add_stmt (dexp);
1246 6 : return NULL_TREE;
1247 : }
1248 : else
1249 : {
1250 158 : expr = build2 (COMPOUND_EXPR, type, dexp, expr);
1251 158 : return expr;
1252 : }
1253 : }
1254 :
1255 : /* Perform any pre-gimplification lowering of C++ front end trees to
1256 : GENERIC. */
1257 :
1258 : static tree
1259 884775392 : cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1260 : {
1261 893581424 : tree stmt = *stmt_p;
1262 893581424 : struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1263 893581424 : hash_set<tree> *p_set = wtd->p_set;
1264 :
1265 : /* If in an OpenMP context, note var uses. */
1266 893581424 : if (UNLIKELY (wtd->omp_ctx != NULL)
1267 566991 : && (VAR_P (stmt)
1268 472579 : || TREE_CODE (stmt) == PARM_DECL
1269 461814 : || TREE_CODE (stmt) == RESULT_DECL)
1270 893686636 : && omp_var_to_track (stmt))
1271 11666 : omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1272 :
1273 : /* Don't dereference parms in a thunk, pass the references through. */
1274 37334694 : if ((TREE_CODE (stmt) == CALL_EXPR && call_from_lambda_thunk_p (stmt))
1275 930900427 : || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1276 : {
1277 15820 : *walk_subtrees = 0;
1278 15820 : return NULL;
1279 : }
1280 :
1281 : /* Dereference invisible reference parms. */
1282 893565604 : if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1283 : {
1284 786886 : *stmt_p = convert_from_reference (stmt);
1285 786886 : p_set->add (*stmt_p);
1286 786886 : *walk_subtrees = 0;
1287 786886 : return NULL;
1288 : }
1289 :
1290 : /* Map block scope extern declarations to visible declarations with the
1291 : same name and type in outer scopes if any. */
1292 892778718 : if (VAR_OR_FUNCTION_DECL_P (stmt) && DECL_LOCAL_DECL_P (stmt))
1293 31056 : if (tree alias = DECL_LOCAL_DECL_ALIAS (stmt))
1294 : {
1295 16345 : if (alias != error_mark_node)
1296 : {
1297 16341 : *stmt_p = alias;
1298 16341 : TREE_USED (alias) |= TREE_USED (stmt);
1299 : }
1300 16345 : *walk_subtrees = 0;
1301 16345 : return NULL;
1302 : }
1303 :
1304 892762373 : if (TREE_CODE (stmt) == INTEGER_CST
1305 105037521 : && TYPE_REF_P (TREE_TYPE (stmt))
1306 161 : && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1307 892762374 : && !wtd->no_sanitize_p)
1308 : {
1309 1 : ubsan_maybe_instrument_reference (stmt_p);
1310 1 : if (*stmt_p != stmt)
1311 : {
1312 1 : *walk_subtrees = 0;
1313 1 : return NULL_TREE;
1314 : }
1315 : }
1316 :
1317 : /* Other than invisiref parms, don't walk the same tree twice. */
1318 892762372 : if (p_set->contains (stmt))
1319 : {
1320 162443095 : *walk_subtrees = 0;
1321 162443095 : return NULL_TREE;
1322 : }
1323 :
1324 730319277 : switch (TREE_CODE (stmt))
1325 : {
1326 56537212 : case ADDR_EXPR:
1327 56537212 : if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1328 : {
1329 : /* If in an OpenMP context, note var uses. */
1330 413268 : if (UNLIKELY (wtd->omp_ctx != NULL)
1331 413268 : && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1332 419 : omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1333 413268 : *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1334 413268 : *walk_subtrees = 0;
1335 : }
1336 : break;
1337 :
1338 21701181 : case RETURN_EXPR:
1339 21701181 : if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1340 : /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1341 491 : *walk_subtrees = 0;
1342 : break;
1343 :
1344 82688 : case OMP_CLAUSE:
1345 82688 : switch (OMP_CLAUSE_CODE (stmt))
1346 : {
1347 2980 : case OMP_CLAUSE_LASTPRIVATE:
1348 : /* Don't dereference an invisiref in OpenMP clauses. */
1349 2980 : if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1350 : {
1351 54 : *walk_subtrees = 0;
1352 54 : if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1353 48 : cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1354 : cp_genericize_r, data, NULL);
1355 : }
1356 : break;
1357 2276 : case OMP_CLAUSE_PRIVATE:
1358 : /* Don't dereference an invisiref in OpenMP clauses. */
1359 2276 : if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1360 9 : *walk_subtrees = 0;
1361 2267 : else if (wtd->omp_ctx != NULL)
1362 : {
1363 : /* Private clause doesn't cause any references to the
1364 : var in outer contexts, avoid calling
1365 : omp_cxx_notice_variable for it. */
1366 643 : struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1367 643 : wtd->omp_ctx = NULL;
1368 643 : cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1369 : data, NULL);
1370 643 : wtd->omp_ctx = old;
1371 643 : *walk_subtrees = 0;
1372 : }
1373 : break;
1374 6706 : case OMP_CLAUSE_SHARED:
1375 6706 : case OMP_CLAUSE_FIRSTPRIVATE:
1376 6706 : case OMP_CLAUSE_COPYIN:
1377 6706 : case OMP_CLAUSE_COPYPRIVATE:
1378 6706 : case OMP_CLAUSE_INCLUSIVE:
1379 6706 : case OMP_CLAUSE_EXCLUSIVE:
1380 : /* Don't dereference an invisiref in OpenMP clauses. */
1381 6706 : if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1382 97 : *walk_subtrees = 0;
1383 : break;
1384 8925 : case OMP_CLAUSE_REDUCTION:
1385 8925 : case OMP_CLAUSE_IN_REDUCTION:
1386 8925 : case OMP_CLAUSE_TASK_REDUCTION:
1387 : /* Don't dereference an invisiref in reduction clause's
1388 : OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1389 : still needs to be genericized. */
1390 8925 : if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1391 : {
1392 44 : *walk_subtrees = 0;
1393 44 : if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1394 44 : cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1395 : cp_genericize_r, data, NULL);
1396 44 : if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1397 44 : cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1398 : cp_genericize_r, data, NULL);
1399 : }
1400 : break;
1401 : default:
1402 : break;
1403 : }
1404 : break;
1405 :
1406 : /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1407 : to lower this construct before scanning it, so we need to lower these
1408 : before doing anything else. */
1409 3418399 : case CLEANUP_STMT:
1410 3418399 : *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1411 3418399 : CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1412 : : TRY_FINALLY_EXPR,
1413 : void_type_node,
1414 3418399 : CLEANUP_BODY (stmt),
1415 3418399 : CLEANUP_EXPR (stmt));
1416 3418399 : break;
1417 :
1418 8805298 : case IF_STMT:
1419 8805298 : genericize_if_stmt (stmt_p);
1420 : /* *stmt_p has changed, tail recurse to handle it again. */
1421 8805298 : return cp_genericize_r (stmt_p, walk_subtrees, data);
1422 :
1423 : /* COND_EXPR might have incompatible types in branches if one or both
1424 : arms are bitfields. Fix it up now. */
1425 9416010 : case COND_EXPR:
1426 9416010 : {
1427 9416010 : tree type_left
1428 9416010 : = (TREE_OPERAND (stmt, 1)
1429 9416010 : ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1430 9416010 : : NULL_TREE);
1431 9416010 : tree type_right
1432 9416010 : = (TREE_OPERAND (stmt, 2)
1433 9416010 : ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1434 9416010 : : NULL_TREE);
1435 9416010 : if (type_left
1436 9416047 : && !useless_type_conversion_p (TREE_TYPE (stmt),
1437 37 : TREE_TYPE (TREE_OPERAND (stmt, 1))))
1438 : {
1439 33 : TREE_OPERAND (stmt, 1)
1440 33 : = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1441 33 : gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1442 : type_left));
1443 : }
1444 9416010 : if (type_right
1445 9416028 : && !useless_type_conversion_p (TREE_TYPE (stmt),
1446 18 : TREE_TYPE (TREE_OPERAND (stmt, 2))))
1447 : {
1448 18 : TREE_OPERAND (stmt, 2)
1449 18 : = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1450 18 : gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1451 : type_right));
1452 : }
1453 : }
1454 : break;
1455 :
1456 12797976 : case BIND_EXPR:
1457 12797976 : if (UNLIKELY (wtd->omp_ctx != NULL))
1458 : {
1459 24691 : tree decl;
1460 30333 : for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1461 5642 : if (VAR_P (decl)
1462 5591 : && !DECL_EXTERNAL (decl)
1463 11233 : && omp_var_to_track (decl))
1464 : {
1465 455 : splay_tree_node n
1466 455 : = splay_tree_lookup (wtd->omp_ctx->variables,
1467 : (splay_tree_key) decl);
1468 455 : if (n == NULL)
1469 455 : splay_tree_insert (wtd->omp_ctx->variables,
1470 : (splay_tree_key) decl,
1471 455 : TREE_STATIC (decl)
1472 : ? OMP_CLAUSE_DEFAULT_SHARED
1473 : : OMP_CLAUSE_DEFAULT_PRIVATE);
1474 : }
1475 : }
1476 12797976 : if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1477 : {
1478 : /* The point here is to not sanitize static initializers. */
1479 1975 : bool no_sanitize_p = wtd->no_sanitize_p;
1480 1975 : wtd->no_sanitize_p = true;
1481 1975 : for (tree decl = BIND_EXPR_VARS (stmt);
1482 3613 : decl;
1483 1638 : decl = DECL_CHAIN (decl))
1484 1638 : if (VAR_P (decl)
1485 1638 : && TREE_STATIC (decl)
1486 1638 : && DECL_INITIAL (decl))
1487 8 : cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1488 1975 : wtd->no_sanitize_p = no_sanitize_p;
1489 : }
1490 12797976 : wtd->bind_expr_stack.safe_push (stmt);
1491 12797976 : cp_walk_tree (&BIND_EXPR_BODY (stmt),
1492 : cp_genericize_r, data, NULL);
1493 12797976 : wtd->bind_expr_stack.pop ();
1494 : break;
1495 :
1496 734 : case ASSERTION_STMT:
1497 734 : case PRECONDITION_STMT:
1498 734 : case POSTCONDITION_STMT:
1499 734 : {
1500 734 : if (tree check = build_contract_check (stmt))
1501 : {
1502 734 : *stmt_p = check;
1503 734 : return cp_genericize_r (stmt_p, walk_subtrees, data);
1504 : }
1505 :
1506 : /* If we didn't build a check, replace it with void_node so we don't
1507 : leak contracts into GENERIC. */
1508 0 : *stmt_p = void_node;
1509 0 : *walk_subtrees = 0;
1510 : }
1511 0 : break;
1512 :
1513 3938 : case USING_STMT:
1514 3938 : {
1515 3938 : tree block = NULL_TREE;
1516 :
1517 : /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1518 : BLOCK, and append an IMPORTED_DECL to its
1519 : BLOCK_VARS chained list. */
1520 3938 : if (wtd->bind_expr_stack.exists ())
1521 : {
1522 3938 : int i;
1523 3938 : for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1524 3938 : if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1525 : break;
1526 : }
1527 3938 : if (block)
1528 : {
1529 3938 : tree decl = TREE_OPERAND (stmt, 0);
1530 3938 : gcc_assert (decl);
1531 :
1532 3938 : if (undeduced_auto_decl (decl))
1533 : /* Omit from the GENERIC, the back-end can't handle it. */;
1534 : else
1535 : {
1536 3935 : tree using_directive = make_node (IMPORTED_DECL);
1537 3935 : TREE_TYPE (using_directive) = void_type_node;
1538 3935 : DECL_CONTEXT (using_directive) = current_function_decl;
1539 7870 : DECL_SOURCE_LOCATION (using_directive)
1540 3935 : = cp_expr_loc_or_input_loc (stmt);
1541 :
1542 3935 : IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1543 3935 : DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1544 3935 : BLOCK_VARS (block) = using_directive;
1545 : }
1546 : }
1547 : /* The USING_STMT won't appear in GENERIC. */
1548 3938 : *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1549 3938 : *walk_subtrees = 0;
1550 : }
1551 3938 : break;
1552 :
1553 14252448 : case DECL_EXPR:
1554 14252448 : if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1555 : {
1556 : /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1557 8263 : *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1558 8263 : *walk_subtrees = 0;
1559 : }
1560 : else
1561 : {
1562 14244185 : tree d = DECL_EXPR_DECL (stmt);
1563 14244185 : if (VAR_P (d))
1564 28487258 : gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1565 : }
1566 : break;
1567 :
1568 12604 : case OMP_PARALLEL:
1569 12604 : case OMP_TASK:
1570 12604 : case OMP_TASKLOOP:
1571 12604 : {
1572 12604 : struct cp_genericize_omp_taskreg omp_ctx;
1573 12604 : tree c, decl;
1574 12604 : splay_tree_node n;
1575 :
1576 12604 : *walk_subtrees = 0;
1577 12604 : cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1578 12604 : omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1579 12604 : omp_ctx.default_shared = omp_ctx.is_parallel;
1580 12604 : omp_ctx.outer = wtd->omp_ctx;
1581 12604 : omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1582 12604 : wtd->omp_ctx = &omp_ctx;
1583 30369 : for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1584 17765 : switch (OMP_CLAUSE_CODE (c))
1585 : {
1586 5288 : case OMP_CLAUSE_SHARED:
1587 5288 : case OMP_CLAUSE_PRIVATE:
1588 5288 : case OMP_CLAUSE_FIRSTPRIVATE:
1589 5288 : case OMP_CLAUSE_LASTPRIVATE:
1590 5288 : decl = OMP_CLAUSE_DECL (c);
1591 5288 : if (decl == error_mark_node || !omp_var_to_track (decl))
1592 : break;
1593 594 : n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1594 594 : if (n != NULL)
1595 : break;
1596 1166 : splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1597 583 : OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1598 : ? OMP_CLAUSE_DEFAULT_SHARED
1599 : : OMP_CLAUSE_DEFAULT_PRIVATE);
1600 583 : if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1601 91 : omp_cxx_notice_variable (omp_ctx.outer, decl);
1602 : break;
1603 1951 : case OMP_CLAUSE_DEFAULT:
1604 1951 : if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1605 796 : omp_ctx.default_shared = true;
1606 : default:
1607 : break;
1608 : }
1609 12604 : if (TREE_CODE (stmt) == OMP_TASKLOOP)
1610 1123 : c_genericize_control_stmt (stmt_p, walk_subtrees, data,
1611 : cp_genericize_r, cp_walk_subtrees);
1612 : else
1613 11481 : cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1614 12604 : wtd->omp_ctx = omp_ctx.outer;
1615 12604 : splay_tree_delete (omp_ctx.variables);
1616 : }
1617 12604 : break;
1618 :
1619 5940 : case OMP_TARGET:
1620 5940 : cfun->has_omp_target = true;
1621 5940 : break;
1622 :
1623 97828 : case TRY_BLOCK:
1624 97828 : {
1625 97828 : *walk_subtrees = 0;
1626 97828 : tree try_block = wtd->try_block;
1627 97828 : wtd->try_block = stmt;
1628 97828 : cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1629 97828 : wtd->try_block = try_block;
1630 97828 : cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1631 : }
1632 97828 : break;
1633 :
1634 10919671 : case MUST_NOT_THROW_EXPR:
1635 : /* MUST_NOT_THROW_COND might be something else with TM. */
1636 10919671 : if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1637 : {
1638 10919653 : *walk_subtrees = 0;
1639 10919653 : tree try_block = wtd->try_block;
1640 10919653 : wtd->try_block = stmt;
1641 10919653 : cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1642 10919653 : wtd->try_block = try_block;
1643 : }
1644 : break;
1645 :
1646 91893 : case THROW_EXPR:
1647 91893 : {
1648 91893 : location_t loc = location_of (stmt);
1649 91893 : if (warning_suppressed_p (stmt /* What warning? */))
1650 : /* Never mind. */;
1651 24747 : else if (wtd->try_block)
1652 : {
1653 823 : if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR)
1654 : {
1655 21 : auto_diagnostic_group d;
1656 21 : if (warning_at (loc, OPT_Wterminate,
1657 : "%<throw%> will always call %<terminate%>")
1658 12 : && cxx_dialect >= cxx11
1659 43 : && DECL_DESTRUCTOR_P (current_function_decl))
1660 6 : inform (loc, "in C++11 destructors default to %<noexcept%>");
1661 21 : }
1662 : }
1663 : else
1664 : {
1665 103 : if (warn_cxx11_compat && cxx_dialect < cxx11
1666 206 : && DECL_DESTRUCTOR_P (current_function_decl)
1667 1 : && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1668 : == NULL_TREE)
1669 23925 : && (get_defaulted_eh_spec (current_function_decl)
1670 1 : == empty_except_spec))
1671 1 : warning_at (loc, OPT_Wc__11_compat,
1672 : "in C++11 this %<throw%> will call %<terminate%> "
1673 : "because destructors default to %<noexcept%>");
1674 : }
1675 : }
1676 : break;
1677 :
1678 21476101 : case CONVERT_EXPR:
1679 21476101 : gcc_checking_assert (!AGGREGATE_TYPE_P (TREE_TYPE (stmt)));
1680 21476101 : gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1681 : break;
1682 :
1683 6808 : case SPACESHIP_EXPR:
1684 6808 : *stmt_p = genericize_spaceship (*stmt_p);
1685 6808 : break;
1686 :
1687 24212 : case PTRMEM_CST:
1688 : /* By the time we get here we're handing off to the back end, so we don't
1689 : need or want to preserve PTRMEM_CST anymore. */
1690 24212 : *stmt_p = cplus_expand_constant (stmt);
1691 24212 : *walk_subtrees = 0;
1692 24212 : break;
1693 :
1694 23419 : case MEM_REF:
1695 : /* For MEM_REF, make sure not to sanitize the second operand even
1696 : if it has reference type. It is just an offset with a type
1697 : holding other information. There is no other processing we
1698 : need to do for INTEGER_CSTs, so just ignore the second argument
1699 : unconditionally. */
1700 23419 : cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1701 23419 : *walk_subtrees = 0;
1702 23419 : break;
1703 :
1704 52186983 : case NOP_EXPR:
1705 52186983 : *stmt_p = predeclare_vla (*stmt_p);
1706 52186983 : if (!wtd->no_sanitize_p
1707 52186980 : && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
1708 52195445 : && TYPE_REF_P (TREE_TYPE (stmt)))
1709 1046 : ubsan_maybe_instrument_reference (stmt_p);
1710 : break;
1711 :
1712 37318488 : case CALL_EXPR:
1713 : /* Evaluate function concept checks instead of treating them as
1714 : normal functions. */
1715 37318488 : if (concept_check_p (stmt))
1716 : {
1717 0 : *stmt_p = evaluate_concept_check (stmt);
1718 0 : * walk_subtrees = 0;
1719 0 : break;
1720 : }
1721 :
1722 37318488 : if (!wtd->no_sanitize_p
1723 37318488 : && sanitize_flags_p ((SANITIZE_NULL
1724 : | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
1725 : {
1726 10042 : tree fn = CALL_EXPR_FN (stmt);
1727 10042 : if (fn != NULL_TREE
1728 6549 : && !error_operand_p (fn)
1729 6549 : && INDIRECT_TYPE_P (TREE_TYPE (fn))
1730 16591 : && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1731 : {
1732 3234 : bool is_ctor
1733 3234 : = TREE_CODE (fn) == ADDR_EXPR
1734 3118 : && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1735 9470 : && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1736 3234 : if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1737 2448 : ubsan_maybe_instrument_member_call (stmt, is_ctor);
1738 3234 : if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
1739 2436 : cp_ubsan_maybe_instrument_member_call (stmt);
1740 : }
1741 6808 : else if (fn == NULL_TREE
1742 3493 : && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1743 2576 : && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
1744 6816 : && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))))
1745 8 : *walk_subtrees = 0;
1746 : }
1747 : /* Fall through. */
1748 39578127 : case AGGR_INIT_EXPR:
1749 : /* For calls to a multi-versioned function, overload resolution
1750 : returns the function with the highest target priority, that is,
1751 : the version that will checked for dispatching first. If this
1752 : version is inlinable, a direct call to this version can be made
1753 : otherwise the call should go through the dispatcher. */
1754 39578127 : {
1755 39578127 : tree fn = cp_get_callee_fndecl_nofold (stmt);
1756 38891800 : if (fn && DECL_FUNCTION_VERSIONED (fn)
1757 39578283 : && (current_function_decl == NULL
1758 156 : || !targetm.target_option.can_inline_p (current_function_decl,
1759 : fn)))
1760 136 : if (tree dis = get_function_version_dispatcher (fn))
1761 : {
1762 136 : mark_versions_used (dis);
1763 136 : dis = build_address (dis);
1764 136 : if (TREE_CODE (stmt) == CALL_EXPR)
1765 132 : CALL_EXPR_FN (stmt) = dis;
1766 : else
1767 4 : AGGR_INIT_EXPR_FN (stmt) = dis;
1768 : }
1769 : }
1770 : break;
1771 :
1772 6292956 : case TARGET_EXPR:
1773 6292956 : if (TARGET_EXPR_INITIAL (stmt)
1774 6292956 : && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
1775 7356344 : && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
1776 122 : TARGET_EXPR_NO_ELIDE (stmt) = 1;
1777 : break;
1778 :
1779 33 : case TEMPLATE_ID_EXPR:
1780 33 : gcc_assert (concept_check_p (stmt));
1781 : /* Emit the value of the concept check. */
1782 33 : *stmt_p = evaluate_concept_check (stmt);
1783 33 : walk_subtrees = 0;
1784 33 : break;
1785 :
1786 4444 : case OMP_DISTRIBUTE:
1787 : /* Need to explicitly instantiate copy ctors on class iterators of
1788 : composite distribute parallel for. */
1789 4444 : if (OMP_FOR_INIT (*stmt_p) == NULL_TREE)
1790 : {
1791 3906 : tree *data[4] = { NULL, NULL, NULL, NULL };
1792 3906 : tree inner = walk_tree (&OMP_FOR_BODY (*stmt_p),
1793 : find_combined_omp_for, data, NULL);
1794 3906 : if (inner != NULL_TREE
1795 3870 : && TREE_CODE (inner) == OMP_FOR)
1796 : {
1797 4606 : for (int i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner)); i++)
1798 2885 : if (OMP_FOR_ORIG_DECLS (inner)
1799 2885 : && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1800 : i)) == TREE_LIST
1801 2906 : && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1802 : i)))
1803 : {
1804 9 : tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner), i);
1805 : /* Class iterators aren't allowed on OMP_SIMD, so the only
1806 : case we need to solve is distribute parallel for. */
1807 9 : gcc_assert (TREE_CODE (inner) == OMP_FOR
1808 : && data[1]);
1809 9 : tree orig_decl = TREE_PURPOSE (orig);
1810 9 : tree c, cl = NULL_TREE;
1811 9 : for (c = OMP_FOR_CLAUSES (inner);
1812 10 : c; c = OMP_CLAUSE_CHAIN (c))
1813 9 : if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1814 2 : || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
1815 10 : && OMP_CLAUSE_DECL (c) == orig_decl)
1816 : {
1817 : cl = c;
1818 : break;
1819 : }
1820 9 : if (cl == NULL_TREE)
1821 : {
1822 1 : for (c = OMP_PARALLEL_CLAUSES (*data[1]);
1823 1 : c; c = OMP_CLAUSE_CHAIN (c))
1824 1 : if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1825 1 : && OMP_CLAUSE_DECL (c) == orig_decl)
1826 : {
1827 : cl = c;
1828 : break;
1829 : }
1830 : }
1831 1 : if (cl)
1832 : {
1833 9 : orig_decl = require_complete_type (orig_decl);
1834 9 : tree inner_type = TREE_TYPE (orig_decl);
1835 9 : if (orig_decl == error_mark_node)
1836 0 : continue;
1837 9 : if (TYPE_REF_P (TREE_TYPE (orig_decl)))
1838 0 : inner_type = TREE_TYPE (inner_type);
1839 :
1840 9 : while (TREE_CODE (inner_type) == ARRAY_TYPE)
1841 0 : inner_type = TREE_TYPE (inner_type);
1842 9 : get_copy_ctor (inner_type, tf_warning_or_error);
1843 : }
1844 : }
1845 : }
1846 : }
1847 : /* FALLTHRU */
1848 :
1849 32843719 : case FOR_STMT:
1850 32843719 : case WHILE_STMT:
1851 32843719 : case DO_STMT:
1852 32843719 : case SWITCH_STMT:
1853 32843719 : case CONTINUE_STMT:
1854 32843719 : case BREAK_STMT:
1855 32843719 : case OMP_FOR:
1856 32843719 : case OMP_SIMD:
1857 32843719 : case OMP_LOOP:
1858 32843719 : case OACC_LOOP:
1859 32843719 : case STATEMENT_LIST:
1860 : /* These cases are handled by shared code. */
1861 32843719 : c_genericize_control_stmt (stmt_p, walk_subtrees, data,
1862 : cp_genericize_r, cp_walk_subtrees);
1863 32843719 : break;
1864 :
1865 338 : case BIT_CAST_EXPR:
1866 338 : *stmt_p = build1_loc (EXPR_LOCATION (stmt), VIEW_CONVERT_EXPR,
1867 338 : TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1868 338 : break;
1869 :
1870 439742761 : default:
1871 439742761 : if (IS_TYPE_OR_DECL_P (stmt))
1872 126593531 : *walk_subtrees = 0;
1873 : break;
1874 : }
1875 :
1876 721513245 : p_set->add (*stmt_p);
1877 :
1878 721513245 : return NULL;
1879 : }
1880 :
1881 : /* Lower C++ front end trees to GENERIC in T_P. */
1882 :
1883 : static void
1884 25643392 : cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1885 : {
1886 25643392 : struct cp_genericize_data wtd;
1887 :
1888 25643392 : wtd.p_set = new hash_set<tree>;
1889 25643392 : wtd.bind_expr_stack.create (0);
1890 25643392 : wtd.omp_ctx = NULL;
1891 25643392 : wtd.try_block = NULL_TREE;
1892 25643392 : wtd.no_sanitize_p = false;
1893 25643392 : wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1894 25643392 : cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1895 51286784 : delete wtd.p_set;
1896 25643392 : if (sanitize_flags_p (SANITIZE_VPTR))
1897 4296 : cp_ubsan_instrument_member_accesses (t_p);
1898 25643392 : }
1899 :
1900 : /* If a function that should end with a return in non-void
1901 : function doesn't obviously end with return, add ubsan
1902 : instrumentation code to verify it at runtime. If -fsanitize=return
1903 : is not enabled, instrument __builtin_unreachable. */
1904 :
1905 : static void
1906 25643392 : cp_maybe_instrument_return (tree fndecl)
1907 : {
1908 25643392 : if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1909 36278430 : || DECL_CONSTRUCTOR_P (fndecl)
1910 18139215 : || DECL_DESTRUCTOR_P (fndecl)
1911 43782607 : || !targetm.warn_func_return (fndecl))
1912 7504193 : return;
1913 :
1914 18139199 : if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
1915 : /* Don't add __builtin_unreachable () if not optimizing, it will not
1916 : improve any optimizations in that case, just break UB code.
1917 : Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1918 : UBSan covers this with ubsan_instrument_return above where sufficient
1919 : information is provided, while the __builtin_unreachable () below
1920 : if return sanitization is disabled will just result in hard to
1921 : understand runtime error without location. */
1922 18139199 : && ((!optimize && !flag_unreachable_traps)
1923 18136490 : || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
1924 40 : return;
1925 :
1926 18139159 : tree t = DECL_SAVED_TREE (fndecl);
1927 33118171 : while (t)
1928 : {
1929 33118171 : switch (TREE_CODE (t))
1930 : {
1931 2411275 : case BIND_EXPR:
1932 2411275 : t = BIND_EXPR_BODY (t);
1933 2411275 : continue;
1934 4951359 : case TRY_FINALLY_EXPR:
1935 4951359 : case CLEANUP_POINT_EXPR:
1936 4951359 : t = TREE_OPERAND (t, 0);
1937 4951359 : continue;
1938 7619178 : case STATEMENT_LIST:
1939 7619178 : {
1940 7619178 : tree_stmt_iterator i = tsi_last (t);
1941 7619178 : while (!tsi_end_p (i))
1942 : {
1943 7617648 : tree p = tsi_stmt (i);
1944 7617648 : if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
1945 : break;
1946 7620448 : tsi_prev (&i);
1947 : }
1948 7619178 : if (!tsi_end_p (i))
1949 : {
1950 7616378 : t = tsi_stmt (i);
1951 7616378 : continue;
1952 : }
1953 : }
1954 2800 : break;
1955 : case RETURN_EXPR:
1956 : return;
1957 : default:
1958 : break;
1959 7362634 : }
1960 : break;
1961 : }
1962 8426228 : if (t == NULL_TREE)
1963 : return;
1964 8426228 : tree *p = &DECL_SAVED_TREE (fndecl);
1965 8426228 : if (TREE_CODE (*p) == BIND_EXPR)
1966 607736 : p = &BIND_EXPR_BODY (*p);
1967 :
1968 8426228 : location_t loc = DECL_SOURCE_LOCATION (fndecl);
1969 8426228 : if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1970 778 : t = ubsan_instrument_return (loc);
1971 : else
1972 8425450 : t = build_builtin_unreachable (BUILTINS_LOCATION);
1973 :
1974 8426228 : append_to_statement_list (t, p);
1975 : }
1976 :
1977 : void
1978 34437111 : cp_genericize (tree fndecl)
1979 : {
1980 34437111 : tree t;
1981 :
1982 : /* Fix up the types of parms passed by invisible reference. */
1983 92080073 : for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1984 57642962 : if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1985 : {
1986 : /* If a function's arguments are copied to create a thunk,
1987 : then DECL_BY_REFERENCE will be set -- but the type of the
1988 : argument will be a pointer type, so we will never get
1989 : here. */
1990 90606 : gcc_assert (!DECL_BY_REFERENCE (t));
1991 90606 : gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1992 90606 : TREE_TYPE (t) = DECL_ARG_TYPE (t);
1993 90606 : DECL_BY_REFERENCE (t) = 1;
1994 90606 : TREE_ADDRESSABLE (t) = 0;
1995 90606 : relayout_decl (t);
1996 : }
1997 :
1998 : /* Do the same for the return value. */
1999 34437111 : if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
2000 : {
2001 698882 : t = DECL_RESULT (fndecl);
2002 698882 : TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
2003 698882 : DECL_BY_REFERENCE (t) = 1;
2004 698882 : TREE_ADDRESSABLE (t) = 0;
2005 698882 : relayout_decl (t);
2006 698882 : if (DECL_NAME (t))
2007 : {
2008 : /* Adjust DECL_VALUE_EXPR of the original var. */
2009 89303 : tree outer = outer_curly_brace_block (current_function_decl);
2010 89303 : tree var;
2011 :
2012 89303 : if (outer)
2013 196814 : for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
2014 196429 : if (VAR_P (var)
2015 173208 : && DECL_NAME (t) == DECL_NAME (var)
2016 88918 : && DECL_HAS_VALUE_EXPR_P (var)
2017 285347 : && DECL_VALUE_EXPR (var) == t)
2018 : {
2019 88918 : tree val = convert_from_reference (t);
2020 88918 : SET_DECL_VALUE_EXPR (var, val);
2021 88918 : break;
2022 : }
2023 : }
2024 : }
2025 :
2026 : /* If we're a clone, the body is already GIMPLE. */
2027 34437111 : if (DECL_CLONED_FUNCTION_P (fndecl))
2028 8793719 : return;
2029 :
2030 : /* Allow cp_genericize calls to be nested. */
2031 25643392 : bc_state_t save_state;
2032 25643392 : save_bc_state (&save_state);
2033 :
2034 : /* We do want to see every occurrence of the parms, so we can't just use
2035 : walk_tree's hash functionality. */
2036 25643392 : cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
2037 :
2038 25643392 : cp_maybe_instrument_return (fndecl);
2039 :
2040 : /* Do everything else. */
2041 25643392 : c_genericize (fndecl);
2042 25643392 : restore_bc_state (&save_state);
2043 : }
2044 :
2045 : /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
2046 : NULL if there is in fact nothing to do. ARG2 may be null if FN
2047 : actually only takes one argument. */
2048 :
2049 : static tree
2050 3887 : cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
2051 : {
2052 3887 : tree defparm, parm, t;
2053 3887 : int i = 0;
2054 3887 : int nargs;
2055 3887 : tree *argarray;
2056 :
2057 3887 : if (fn == NULL)
2058 : return NULL;
2059 :
2060 3026 : nargs = list_length (DECL_ARGUMENTS (fn));
2061 3026 : argarray = XALLOCAVEC (tree, nargs);
2062 :
2063 3026 : defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
2064 3026 : if (arg2)
2065 1012 : defparm = TREE_CHAIN (defparm);
2066 :
2067 3026 : bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
2068 3026 : if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
2069 : {
2070 27 : tree inner_type = TREE_TYPE (arg1);
2071 27 : tree start1, end1, p1;
2072 27 : tree start2 = NULL, p2 = NULL;
2073 27 : tree ret = NULL, lab;
2074 :
2075 27 : start1 = arg1;
2076 27 : start2 = arg2;
2077 27 : do
2078 : {
2079 27 : inner_type = TREE_TYPE (inner_type);
2080 27 : start1 = build4 (ARRAY_REF, inner_type, start1,
2081 : size_zero_node, NULL, NULL);
2082 27 : if (arg2)
2083 9 : start2 = build4 (ARRAY_REF, inner_type, start2,
2084 : size_zero_node, NULL, NULL);
2085 : }
2086 27 : while (TREE_CODE (inner_type) == ARRAY_TYPE);
2087 27 : start1 = build_fold_addr_expr_loc (input_location, start1);
2088 27 : if (arg2)
2089 9 : start2 = build_fold_addr_expr_loc (input_location, start2);
2090 :
2091 27 : end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
2092 27 : end1 = fold_build_pointer_plus (start1, end1);
2093 :
2094 27 : p1 = create_tmp_var (TREE_TYPE (start1));
2095 27 : t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
2096 27 : append_to_statement_list (t, &ret);
2097 :
2098 27 : if (arg2)
2099 : {
2100 9 : p2 = create_tmp_var (TREE_TYPE (start2));
2101 9 : t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
2102 9 : append_to_statement_list (t, &ret);
2103 : }
2104 :
2105 27 : lab = create_artificial_label (input_location);
2106 27 : t = build1 (LABEL_EXPR, void_type_node, lab);
2107 27 : append_to_statement_list (t, &ret);
2108 :
2109 27 : argarray[i++] = p1;
2110 27 : if (arg2)
2111 9 : argarray[i++] = p2;
2112 : /* Handle default arguments. */
2113 27 : for (parm = defparm; parm && parm != void_list_node;
2114 0 : parm = TREE_CHAIN (parm), i++)
2115 0 : argarray[i] = convert_default_arg (TREE_VALUE (parm),
2116 0 : TREE_PURPOSE (parm), fn,
2117 : i - is_method, tf_warning_or_error);
2118 27 : t = build_call_a (fn, i, argarray);
2119 27 : t = fold_convert (void_type_node, t);
2120 27 : t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2121 27 : append_to_statement_list (t, &ret);
2122 :
2123 27 : t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
2124 27 : t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
2125 27 : append_to_statement_list (t, &ret);
2126 :
2127 27 : if (arg2)
2128 : {
2129 9 : t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
2130 9 : t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
2131 9 : append_to_statement_list (t, &ret);
2132 : }
2133 :
2134 27 : t = build2 (NE_EXPR, boolean_type_node, p1, end1);
2135 27 : t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
2136 27 : append_to_statement_list (t, &ret);
2137 :
2138 27 : return ret;
2139 : }
2140 : else
2141 : {
2142 2999 : argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
2143 2999 : if (arg2)
2144 1003 : argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
2145 : /* Handle default arguments. */
2146 3005 : for (parm = defparm; parm && parm != void_list_node;
2147 6 : parm = TREE_CHAIN (parm), i++)
2148 12 : argarray[i] = convert_default_arg (TREE_VALUE (parm),
2149 6 : TREE_PURPOSE (parm), fn,
2150 : i - is_method, tf_warning_or_error);
2151 2999 : t = build_call_a (fn, i, argarray);
2152 2999 : t = fold_convert (void_type_node, t);
2153 2999 : return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2154 : }
2155 : }
2156 :
2157 : /* Return code to initialize DECL with its default constructor, or
2158 : NULL if there's nothing to do. */
2159 :
2160 : tree
2161 44353 : cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
2162 : {
2163 44353 : tree info = CP_OMP_CLAUSE_INFO (clause);
2164 44353 : tree ret = NULL;
2165 :
2166 44353 : if (info)
2167 1461 : ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
2168 :
2169 44353 : return ret;
2170 : }
2171 :
2172 : /* Return code to initialize DST with a copy constructor from SRC. */
2173 :
2174 : tree
2175 11974 : cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
2176 : {
2177 11974 : tree info = CP_OMP_CLAUSE_INFO (clause);
2178 11974 : tree ret = NULL;
2179 :
2180 11974 : if (info)
2181 295 : ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
2182 295 : if (ret == NULL)
2183 11751 : ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2184 :
2185 11974 : return ret;
2186 : }
2187 :
2188 : /* Similarly, except use an assignment operator instead. */
2189 :
2190 : tree
2191 13748 : cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
2192 : {
2193 13748 : tree info = CP_OMP_CLAUSE_INFO (clause);
2194 13748 : tree ret = NULL;
2195 :
2196 13748 : if (info)
2197 812 : ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
2198 812 : if (ret == NULL)
2199 12959 : ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2200 :
2201 13748 : return ret;
2202 : }
2203 :
2204 : /* Return code to destroy DECL. */
2205 :
2206 : tree
2207 64663 : cxx_omp_clause_dtor (tree clause, tree decl)
2208 : {
2209 64663 : tree info = CP_OMP_CLAUSE_INFO (clause);
2210 64663 : tree ret = NULL;
2211 :
2212 64663 : if (info)
2213 1319 : ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
2214 :
2215 64663 : return ret;
2216 : }
2217 :
2218 : /* True if OpenMP should privatize what this DECL points to rather
2219 : than the DECL itself. */
2220 :
2221 : bool
2222 814385 : cxx_omp_privatize_by_reference (const_tree decl)
2223 : {
2224 814385 : return (TYPE_REF_P (TREE_TYPE (decl))
2225 814385 : || is_invisiref_parm (decl));
2226 : }
2227 :
2228 : /* Return true if DECL is const qualified var having no mutable member. */
2229 : bool
2230 16596 : cxx_omp_const_qual_no_mutable (tree decl)
2231 : {
2232 16596 : tree type = TREE_TYPE (decl);
2233 16596 : if (TYPE_REF_P (type))
2234 : {
2235 976 : if (!is_invisiref_parm (decl))
2236 : return false;
2237 0 : type = TREE_TYPE (type);
2238 :
2239 0 : if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
2240 : {
2241 : /* NVR doesn't preserve const qualification of the
2242 : variable's type. */
2243 0 : tree outer = outer_curly_brace_block (current_function_decl);
2244 0 : tree var;
2245 :
2246 0 : if (outer)
2247 0 : for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
2248 0 : if (VAR_P (var)
2249 0 : && DECL_NAME (decl) == DECL_NAME (var)
2250 0 : && (TYPE_MAIN_VARIANT (type)
2251 0 : == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
2252 : {
2253 0 : if (TYPE_READONLY (TREE_TYPE (var)))
2254 0 : type = TREE_TYPE (var);
2255 : break;
2256 : }
2257 : }
2258 : }
2259 :
2260 15620 : if (type == error_mark_node)
2261 : return false;
2262 :
2263 : /* Variables with const-qualified type having no mutable member
2264 : are predetermined shared. */
2265 15600 : if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
2266 : return true;
2267 :
2268 : return false;
2269 : }
2270 :
2271 : /* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute
2272 : of DECL is predetermined. */
2273 :
2274 : enum omp_clause_default_kind
2275 54904 : cxx_omp_predetermined_sharing_1 (tree decl)
2276 : {
2277 : /* Static data members are predetermined shared. */
2278 54904 : if (TREE_STATIC (decl))
2279 : {
2280 16022 : tree ctx = CP_DECL_CONTEXT (decl);
2281 16022 : if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
2282 : return OMP_CLAUSE_DEFAULT_SHARED;
2283 :
2284 15881 : if (c_omp_predefined_variable (decl))
2285 : return OMP_CLAUSE_DEFAULT_SHARED;
2286 : }
2287 :
2288 : /* this may not be specified in data-sharing clauses, still we need
2289 : to predetermined it firstprivate. */
2290 54715 : if (decl == current_class_ptr)
2291 122 : return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
2292 :
2293 : return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2294 : }
2295 :
2296 : /* Likewise, but also include the artificial vars. We don't want to
2297 : disallow the artificial vars being mentioned in explicit clauses,
2298 : as we use artificial vars e.g. for loop constructs with random
2299 : access iterators other than pointers, but during gimplification
2300 : we want to treat them as predetermined. */
2301 :
2302 : enum omp_clause_default_kind
2303 32608 : cxx_omp_predetermined_sharing (tree decl)
2304 : {
2305 32608 : enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
2306 32608 : if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
2307 : return ret;
2308 :
2309 : /* Predetermine artificial variables holding integral values, those
2310 : are usually result of gimplify_one_sizepos or SAVE_EXPR
2311 : gimplification. */
2312 32353 : if (VAR_P (decl)
2313 21432 : && DECL_ARTIFICIAL (decl)
2314 6076 : && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2315 32776 : && !(DECL_LANG_SPECIFIC (decl)
2316 2 : && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2317 : return OMP_CLAUSE_DEFAULT_SHARED;
2318 :
2319 : /* Similarly for typeinfo symbols. */
2320 31932 : if (VAR_P (decl) && DECL_ARTIFICIAL (decl) && DECL_TINFO_P (decl))
2321 : return OMP_CLAUSE_DEFAULT_SHARED;
2322 :
2323 : return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2324 : }
2325 :
2326 : enum omp_clause_defaultmap_kind
2327 6201 : cxx_omp_predetermined_mapping (tree decl)
2328 : {
2329 : /* Predetermine artificial variables holding integral values, those
2330 : are usually result of gimplify_one_sizepos or SAVE_EXPR
2331 : gimplification. */
2332 6201 : if (VAR_P (decl)
2333 1115 : && DECL_ARTIFICIAL (decl)
2334 86 : && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2335 6277 : && !(DECL_LANG_SPECIFIC (decl)
2336 5 : && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2337 : return OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE;
2338 :
2339 6125 : if (c_omp_predefined_variable (decl))
2340 0 : return OMP_CLAUSE_DEFAULTMAP_TO;
2341 :
2342 : return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED;
2343 : }
2344 :
2345 : /* Finalize an implicitly determined clause. */
2346 :
2347 : void
2348 64904 : cxx_omp_finish_clause (tree c, gimple_seq *, bool /* openacc */)
2349 : {
2350 64904 : tree decl, inner_type;
2351 64904 : bool make_shared = false;
2352 :
2353 64904 : if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE
2354 57684 : && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
2355 94632 : && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LASTPRIVATE
2356 5047 : || !OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)))
2357 : return;
2358 :
2359 35191 : decl = OMP_CLAUSE_DECL (c);
2360 35191 : decl = require_complete_type (decl);
2361 35191 : inner_type = TREE_TYPE (decl);
2362 35191 : if (decl == error_mark_node)
2363 35191 : make_shared = true;
2364 35191 : else if (TYPE_REF_P (TREE_TYPE (decl)))
2365 102 : inner_type = TREE_TYPE (inner_type);
2366 :
2367 : /* We're interested in the base element, not arrays. */
2368 35480 : while (TREE_CODE (inner_type) == ARRAY_TYPE)
2369 289 : inner_type = TREE_TYPE (inner_type);
2370 :
2371 : /* Check for special function availability by building a call to one.
2372 : Save the results, because later we won't be in the right context
2373 : for making these queries. */
2374 35191 : bool first = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE;
2375 35191 : bool last = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE;
2376 35191 : if (!make_shared
2377 35191 : && CLASS_TYPE_P (inner_type)
2378 35394 : && cxx_omp_create_clause_info (c, inner_type, !first, first, last,
2379 : true))
2380 : make_shared = true;
2381 :
2382 35183 : if (make_shared)
2383 : {
2384 8 : OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2385 8 : OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2386 8 : OMP_CLAUSE_SHARED_READONLY (c) = 0;
2387 : }
2388 : }
2389 :
2390 : /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2391 : disregarded in OpenMP construct, because it is going to be
2392 : remapped during OpenMP lowering. SHARED is true if DECL
2393 : is going to be shared, false if it is going to be privatized. */
2394 :
2395 : bool
2396 548058 : cxx_omp_disregard_value_expr (tree decl, bool shared)
2397 : {
2398 548058 : if (shared)
2399 : return false;
2400 390027 : if (VAR_P (decl)
2401 369250 : && DECL_HAS_VALUE_EXPR_P (decl)
2402 9095 : && DECL_ARTIFICIAL (decl)
2403 8593 : && DECL_LANG_SPECIFIC (decl)
2404 398070 : && DECL_OMP_PRIVATIZED_MEMBER (decl))
2405 : return true;
2406 384496 : if (VAR_P (decl) && DECL_CONTEXT (decl) && is_capture_proxy (decl))
2407 : return true;
2408 : return false;
2409 : }
2410 :
2411 : /* Fold expression X which is used as an rvalue if RVAL is true. */
2412 :
2413 : static tree
2414 920244615 : cp_fold_maybe_rvalue (tree x, bool rval, fold_flags_t flags)
2415 : {
2416 937402451 : while (true)
2417 : {
2418 928823533 : x = cp_fold (x, flags);
2419 928823533 : if (rval)
2420 662886244 : x = mark_rvalue_use (x);
2421 662886244 : if (rval && DECL_P (x)
2422 173815281 : && !TYPE_REF_P (TREE_TYPE (x)))
2423 : {
2424 151561569 : tree v = decl_constant_value (x);
2425 151561569 : if (v != x && v != error_mark_node)
2426 : {
2427 8578918 : x = v;
2428 8578918 : continue;
2429 : }
2430 : }
2431 920244615 : break;
2432 8578918 : }
2433 920244615 : return x;
2434 : }
2435 :
2436 : tree
2437 39725562 : cp_fold_maybe_rvalue (tree x, bool rval)
2438 : {
2439 0 : return cp_fold_maybe_rvalue (x, rval, ff_none);
2440 : }
2441 :
2442 : /* Fold expression X which is used as an rvalue. */
2443 :
2444 : static tree
2445 306374803 : cp_fold_rvalue (tree x, fold_flags_t flags)
2446 : {
2447 193886509 : return cp_fold_maybe_rvalue (x, true, flags);
2448 : }
2449 :
2450 : tree
2451 213136 : cp_fold_rvalue (tree x)
2452 : {
2453 213136 : return cp_fold_rvalue (x, ff_none);
2454 : }
2455 :
2456 : /* Perform folding on expression X. */
2457 :
2458 : static tree
2459 122844973 : cp_fully_fold (tree x, mce_value manifestly_const_eval)
2460 : {
2461 122844973 : if (processing_template_decl)
2462 : return x;
2463 : /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2464 : have to call both. */
2465 112275158 : if (cxx_dialect >= cxx11)
2466 : {
2467 111386135 : x = maybe_constant_value (x, /*decl=*/NULL_TREE, manifestly_const_eval);
2468 : /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2469 : a TARGET_EXPR; undo that here. */
2470 111386135 : if (TREE_CODE (x) == TARGET_EXPR)
2471 326584 : x = TARGET_EXPR_INITIAL (x);
2472 111059551 : else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2473 16963272 : && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2474 111059753 : && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2475 202 : x = TREE_OPERAND (x, 0);
2476 : }
2477 112275158 : fold_flags_t flags = ff_none;
2478 112275158 : if (manifestly_const_eval == mce_false)
2479 17546632 : flags |= ff_mce_false;
2480 112275158 : return cp_fold_rvalue (x, flags);
2481 : }
2482 :
2483 : tree
2484 105298341 : cp_fully_fold (tree x)
2485 : {
2486 105298341 : return cp_fully_fold (x, mce_unknown);
2487 : }
2488 :
2489 : /* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
2490 : in some cases. */
2491 :
2492 : tree
2493 18516058 : cp_fully_fold_init (tree x)
2494 : {
2495 18516058 : if (processing_template_decl)
2496 969426 : return x;
2497 17546632 : x = cp_fully_fold (x, mce_false);
2498 17546632 : cp_fold_data data (ff_mce_false);
2499 17546632 : cp_walk_tree (&x, cp_fold_r, &data, NULL);
2500 17546632 : return x;
2501 18516058 : }
2502 :
2503 : /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2504 : and certain changes are made to the folding done. Or should be (FIXME). We
2505 : never touch maybe_const, as it is only used for the C front-end
2506 : C_MAYBE_CONST_EXPR. */
2507 :
2508 : tree
2509 39725562 : c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
2510 : {
2511 39725562 : return cp_fold_maybe_rvalue (x, !lval);
2512 : }
2513 :
2514 : static GTY((deletable)) hash_map<tree, tree> *fold_caches[2];
2515 :
2516 : /* Subroutine of cp_fold. Returns which fold cache to use according
2517 : to the given flags. We need multiple caches since the result of
2518 : folding may depend on which flags are used. */
2519 :
2520 : static hash_map<tree, tree> *&
2521 1341482748 : get_fold_cache (fold_flags_t flags)
2522 : {
2523 0 : if (flags & ff_mce_false)
2524 : return fold_caches[1];
2525 : else
2526 205488774 : return fold_caches[0];
2527 : }
2528 :
2529 : /* Dispose of the whole FOLD_CACHE. */
2530 :
2531 : void
2532 13101245 : clear_fold_cache (void)
2533 : {
2534 39303735 : for (auto& fold_cache : fold_caches)
2535 26202490 : if (fold_cache != NULL)
2536 32269958 : fold_cache->empty ();
2537 13101245 : }
2538 :
2539 : /* This function tries to fold an expression X.
2540 : To avoid combinatorial explosion, folding results are kept in fold_cache.
2541 : If X is invalid, we don't fold at all.
2542 : For performance reasons we don't cache expressions representing a
2543 : declaration or constant.
2544 : Function returns X or its folded variant. */
2545 :
2546 : static tree
2547 2175443582 : cp_fold (tree x, fold_flags_t flags)
2548 : {
2549 2175443582 : tree op0, op1, op2, op3;
2550 2175443582 : tree org_x = x, r = NULL_TREE;
2551 2175443582 : enum tree_code code;
2552 2175443582 : location_t loc;
2553 2175443582 : bool rval_ops = true;
2554 :
2555 2175443582 : if (!x || x == error_mark_node)
2556 : return x;
2557 :
2558 2172697751 : if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
2559 : return x;
2560 :
2561 : /* Don't bother to cache DECLs or constants. */
2562 2172601610 : if (DECL_P (x) || CONSTANT_CLASS_P (x))
2563 : return x;
2564 :
2565 1341482748 : auto& fold_cache = get_fold_cache (flags);
2566 1341482748 : if (fold_cache == NULL)
2567 276769 : fold_cache = hash_map<tree, tree>::create_ggc (101);
2568 :
2569 1341482748 : if (tree *cached = fold_cache->get (x))
2570 268858538 : return *cached;
2571 :
2572 1072624210 : uid_sensitive_constexpr_evaluation_checker c;
2573 :
2574 1072624210 : code = TREE_CODE (x);
2575 1072624210 : switch (code)
2576 : {
2577 58142901 : case CLEANUP_POINT_EXPR:
2578 : /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2579 : effects. */
2580 58142901 : r = cp_fold_rvalue (TREE_OPERAND (x, 0), flags);
2581 58142901 : if (!TREE_SIDE_EFFECTS (r))
2582 734839 : x = r;
2583 : break;
2584 :
2585 1013677 : case SIZEOF_EXPR:
2586 1013677 : x = fold_sizeof_expr (x);
2587 1013677 : break;
2588 :
2589 134300311 : case VIEW_CONVERT_EXPR:
2590 134300311 : rval_ops = false;
2591 : /* FALLTHRU */
2592 334561079 : case NON_LVALUE_EXPR:
2593 334561079 : CASE_CONVERT:
2594 :
2595 334561079 : if (VOID_TYPE_P (TREE_TYPE (x)))
2596 : {
2597 : /* This is just to make sure we don't end up with casts to
2598 : void from error_mark_node. If we just return x, then
2599 : cp_fold_r might fold the operand into error_mark_node and
2600 : leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2601 : during gimplification doesn't like such casts.
2602 : Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2603 : folding of the operand should be in the caches and if in cp_fold_r
2604 : it will modify it in place. */
2605 23700536 : op0 = cp_fold (TREE_OPERAND (x, 0), flags);
2606 23700536 : if (op0 == error_mark_node)
2607 5 : x = error_mark_node;
2608 : break;
2609 : }
2610 :
2611 310860543 : loc = EXPR_LOCATION (x);
2612 310860543 : op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
2613 :
2614 310860543 : if (code == CONVERT_EXPR
2615 21852704 : && SCALAR_TYPE_P (TREE_TYPE (x))
2616 332713247 : && op0 != void_node)
2617 : /* During parsing we used convert_to_*_nofold; re-convert now using the
2618 : folding variants, since fold() doesn't do those transformations. */
2619 19731875 : x = fold (convert (TREE_TYPE (x), op0));
2620 291128668 : else if (op0 != TREE_OPERAND (x, 0))
2621 : {
2622 74424413 : if (op0 == error_mark_node)
2623 0 : x = error_mark_node;
2624 : else
2625 74424413 : x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2626 : }
2627 : else
2628 216704255 : x = fold (x);
2629 :
2630 : /* Conversion of an out-of-range value has implementation-defined
2631 : behavior; the language considers it different from arithmetic
2632 : overflow, which is undefined. */
2633 310860543 : if (TREE_CODE (op0) == INTEGER_CST
2634 310860543 : && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2635 1180 : TREE_OVERFLOW (x) = false;
2636 :
2637 : break;
2638 :
2639 202 : case EXCESS_PRECISION_EXPR:
2640 202 : op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
2641 202 : x = fold_convert_loc (EXPR_LOCATION (x), TREE_TYPE (x), op0);
2642 202 : break;
2643 :
2644 35028150 : case INDIRECT_REF:
2645 : /* We don't need the decltype(auto) obfuscation anymore. */
2646 35028150 : if (REF_PARENTHESIZED_P (x))
2647 : {
2648 196 : tree p = maybe_undo_parenthesized_ref (x);
2649 196 : if (p != x)
2650 0 : return cp_fold (p, flags);
2651 : }
2652 35028150 : goto unary;
2653 :
2654 76033002 : case ADDR_EXPR:
2655 76033002 : loc = EXPR_LOCATION (x);
2656 76033002 : op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false, flags);
2657 :
2658 : /* Cope with user tricks that amount to offsetof. */
2659 76033002 : if (op0 != error_mark_node
2660 76033002 : && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0)))
2661 : {
2662 27894611 : tree val = get_base_address (op0);
2663 27894611 : if (val
2664 27894611 : && INDIRECT_REF_P (val)
2665 12716137 : && COMPLETE_TYPE_P (TREE_TYPE (val))
2666 40610628 : && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2667 : {
2668 219 : val = TREE_OPERAND (val, 0);
2669 219 : STRIP_NOPS (val);
2670 219 : val = maybe_constant_value (val);
2671 219 : if (TREE_CODE (val) == INTEGER_CST)
2672 148 : return fold_offsetof (op0, TREE_TYPE (x));
2673 : }
2674 : }
2675 76032854 : goto finish_unary;
2676 :
2677 : case REALPART_EXPR:
2678 : case IMAGPART_EXPR:
2679 54525883 : rval_ops = false;
2680 : /* FALLTHRU */
2681 54525883 : case CONJ_EXPR:
2682 54525883 : case FIX_TRUNC_EXPR:
2683 54525883 : case FLOAT_EXPR:
2684 54525883 : case NEGATE_EXPR:
2685 54525883 : case ABS_EXPR:
2686 54525883 : case ABSU_EXPR:
2687 54525883 : case BIT_NOT_EXPR:
2688 54525883 : case TRUTH_NOT_EXPR:
2689 54525883 : case FIXED_CONVERT_EXPR:
2690 54525883 : unary:
2691 :
2692 54525883 : loc = EXPR_LOCATION (x);
2693 54525883 : op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
2694 :
2695 130558737 : finish_unary:
2696 130558737 : if (op0 != TREE_OPERAND (x, 0))
2697 : {
2698 19795841 : if (op0 == error_mark_node)
2699 0 : x = error_mark_node;
2700 : else
2701 : {
2702 19795841 : x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2703 19795841 : if (code == INDIRECT_REF
2704 3772356 : && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2705 : {
2706 3772300 : TREE_READONLY (x) = TREE_READONLY (org_x);
2707 3772300 : TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2708 3772300 : TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2709 : }
2710 : }
2711 : }
2712 : else
2713 110762896 : x = fold (x);
2714 :
2715 130558737 : gcc_assert (TREE_CODE (x) != COND_EXPR
2716 : || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2717 : break;
2718 :
2719 150746 : case UNARY_PLUS_EXPR:
2720 150746 : op0 = cp_fold_rvalue (TREE_OPERAND (x, 0), flags);
2721 150746 : if (op0 == error_mark_node)
2722 0 : x = error_mark_node;
2723 : else
2724 150746 : x = fold_convert (TREE_TYPE (x), op0);
2725 : break;
2726 :
2727 54847889 : case POSTDECREMENT_EXPR:
2728 54847889 : case POSTINCREMENT_EXPR:
2729 54847889 : case INIT_EXPR:
2730 54847889 : case PREDECREMENT_EXPR:
2731 54847889 : case PREINCREMENT_EXPR:
2732 54847889 : case COMPOUND_EXPR:
2733 54847889 : case MODIFY_EXPR:
2734 54847889 : rval_ops = false;
2735 : /* FALLTHRU */
2736 132724620 : case POINTER_PLUS_EXPR:
2737 132724620 : case PLUS_EXPR:
2738 132724620 : case POINTER_DIFF_EXPR:
2739 132724620 : case MINUS_EXPR:
2740 132724620 : case MULT_EXPR:
2741 132724620 : case TRUNC_DIV_EXPR:
2742 132724620 : case CEIL_DIV_EXPR:
2743 132724620 : case FLOOR_DIV_EXPR:
2744 132724620 : case ROUND_DIV_EXPR:
2745 132724620 : case TRUNC_MOD_EXPR:
2746 132724620 : case CEIL_MOD_EXPR:
2747 132724620 : case ROUND_MOD_EXPR:
2748 132724620 : case RDIV_EXPR:
2749 132724620 : case EXACT_DIV_EXPR:
2750 132724620 : case MIN_EXPR:
2751 132724620 : case MAX_EXPR:
2752 132724620 : case LSHIFT_EXPR:
2753 132724620 : case RSHIFT_EXPR:
2754 132724620 : case LROTATE_EXPR:
2755 132724620 : case RROTATE_EXPR:
2756 132724620 : case BIT_AND_EXPR:
2757 132724620 : case BIT_IOR_EXPR:
2758 132724620 : case BIT_XOR_EXPR:
2759 132724620 : case TRUTH_AND_EXPR:
2760 132724620 : case TRUTH_ANDIF_EXPR:
2761 132724620 : case TRUTH_OR_EXPR:
2762 132724620 : case TRUTH_ORIF_EXPR:
2763 132724620 : case TRUTH_XOR_EXPR:
2764 132724620 : case LT_EXPR: case LE_EXPR:
2765 132724620 : case GT_EXPR: case GE_EXPR:
2766 132724620 : case EQ_EXPR: case NE_EXPR:
2767 132724620 : case UNORDERED_EXPR: case ORDERED_EXPR:
2768 132724620 : case UNLT_EXPR: case UNLE_EXPR:
2769 132724620 : case UNGT_EXPR: case UNGE_EXPR:
2770 132724620 : case UNEQ_EXPR: case LTGT_EXPR:
2771 132724620 : case RANGE_EXPR: case COMPLEX_EXPR:
2772 :
2773 132724620 : loc = EXPR_LOCATION (x);
2774 132724620 : op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
2775 132724620 : op1 = cp_fold_rvalue (TREE_OPERAND (x, 1), flags);
2776 :
2777 : /* decltype(nullptr) has only one value, so optimize away all comparisons
2778 : with that type right away, keeping them in the IL causes troubles for
2779 : various optimizations. */
2780 132724620 : if (COMPARISON_CLASS_P (org_x)
2781 19483177 : && TREE_CODE (TREE_TYPE (op0)) == NULLPTR_TYPE
2782 132724650 : && TREE_CODE (TREE_TYPE (op1)) == NULLPTR_TYPE)
2783 : {
2784 30 : switch (code)
2785 : {
2786 15 : case EQ_EXPR:
2787 15 : x = constant_boolean_node (true, TREE_TYPE (x));
2788 15 : break;
2789 15 : case NE_EXPR:
2790 15 : x = constant_boolean_node (false, TREE_TYPE (x));
2791 15 : break;
2792 0 : default:
2793 0 : gcc_unreachable ();
2794 : }
2795 30 : return omit_two_operands_loc (loc, TREE_TYPE (x), x,
2796 30 : op0, op1);
2797 : }
2798 :
2799 132724590 : if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2800 : {
2801 92977870 : if (op0 == error_mark_node || op1 == error_mark_node)
2802 2 : x = error_mark_node;
2803 : else
2804 92977868 : x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2805 : }
2806 : else
2807 39746720 : x = fold (x);
2808 :
2809 : /* This is only needed for -Wnonnull-compare and only if
2810 : TREE_NO_WARNING (org_x), but to avoid that option affecting code
2811 : generation, we do it always. */
2812 132724590 : if (COMPARISON_CLASS_P (org_x))
2813 : {
2814 19483147 : if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2815 : ;
2816 18654133 : else if (COMPARISON_CLASS_P (x))
2817 : {
2818 18162557 : if (warn_nonnull_compare
2819 18162557 : && warning_suppressed_p (org_x, OPT_Wnonnull_compare))
2820 131597 : suppress_warning (x, OPT_Wnonnull_compare);
2821 : }
2822 : /* Otherwise give up on optimizing these, let GIMPLE folders
2823 : optimize those later on. */
2824 491576 : else if (op0 != TREE_OPERAND (org_x, 0)
2825 491576 : || op1 != TREE_OPERAND (org_x, 1))
2826 : {
2827 491056 : x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2828 491056 : if (warn_nonnull_compare
2829 491056 : && warning_suppressed_p (org_x, OPT_Wnonnull_compare))
2830 20 : suppress_warning (x, OPT_Wnonnull_compare);
2831 : }
2832 : else
2833 520 : x = org_x;
2834 : }
2835 :
2836 : break;
2837 :
2838 2868242 : case VEC_COND_EXPR:
2839 2868242 : case COND_EXPR:
2840 2868242 : loc = EXPR_LOCATION (x);
2841 2868242 : op0 = cp_fold_rvalue (TREE_OPERAND (x, 0), flags);
2842 2868242 : op1 = cp_fold (TREE_OPERAND (x, 1), flags);
2843 2868242 : op2 = cp_fold (TREE_OPERAND (x, 2), flags);
2844 :
2845 2868242 : if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2846 : {
2847 14791 : warning_sentinel s (warn_int_in_bool_context);
2848 14791 : if (!VOID_TYPE_P (TREE_TYPE (op1)))
2849 14791 : op1 = cp_truthvalue_conversion (op1, tf_warning_or_error);
2850 14791 : if (!VOID_TYPE_P (TREE_TYPE (op2)))
2851 14767 : op2 = cp_truthvalue_conversion (op2, tf_warning_or_error);
2852 14791 : }
2853 2853451 : else if (VOID_TYPE_P (TREE_TYPE (x)))
2854 : {
2855 891359 : if (TREE_CODE (op0) == INTEGER_CST)
2856 : {
2857 : /* If the condition is constant, fold can fold away
2858 : the COND_EXPR. If some statement-level uses of COND_EXPR
2859 : have one of the branches NULL, avoid folding crash. */
2860 137434 : if (!op1)
2861 0 : op1 = build_empty_stmt (loc);
2862 137434 : if (!op2)
2863 0 : op2 = build_empty_stmt (loc);
2864 : }
2865 : else
2866 : {
2867 : /* Otherwise, don't bother folding a void condition, since
2868 : it can't produce a constant value. */
2869 753925 : if (op0 != TREE_OPERAND (x, 0)
2870 747054 : || op1 != TREE_OPERAND (x, 1)
2871 1347894 : || op2 != TREE_OPERAND (x, 2))
2872 160616 : x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2873 : break;
2874 : }
2875 : }
2876 :
2877 2114317 : if (op0 != TREE_OPERAND (x, 0)
2878 726604 : || op1 != TREE_OPERAND (x, 1)
2879 2654812 : || op2 != TREE_OPERAND (x, 2))
2880 : {
2881 1616201 : if (op0 == error_mark_node
2882 1616201 : || op1 == error_mark_node
2883 1616201 : || op2 == error_mark_node)
2884 0 : x = error_mark_node;
2885 : else
2886 1616201 : x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2887 : }
2888 : else
2889 498116 : x = fold (x);
2890 :
2891 : /* A COND_EXPR might have incompatible types in branches if one or both
2892 : arms are bitfields. If folding exposed such a branch, fix it up. */
2893 2114317 : if (TREE_CODE (x) != code
2894 562567 : && x != error_mark_node
2895 2676884 : && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2896 14708 : x = fold_convert (TREE_TYPE (org_x), x);
2897 :
2898 : break;
2899 :
2900 68754465 : case CALL_EXPR:
2901 68754465 : {
2902 68754465 : tree callee = get_callee_fndecl (x);
2903 :
2904 : /* "Inline" calls to std::move/forward and other cast-like functions
2905 : by simply folding them into a corresponding cast to their return
2906 : type. This is cheaper than relying on the middle end to do so, and
2907 : also means we avoid generating useless debug info for them at all.
2908 :
2909 : At this point the argument has already been converted into a
2910 : reference, so it suffices to use a NOP_EXPR to express the
2911 : cast. */
2912 68754465 : if ((OPTION_SET_P (flag_fold_simple_inlines)
2913 68754465 : ? flag_fold_simple_inlines
2914 68754390 : : !flag_no_inline)
2915 66420095 : && call_expr_nargs (x) == 1
2916 35055464 : && decl_in_std_namespace_p (callee)
2917 23739745 : && DECL_NAME (callee) != NULL_TREE
2918 92494210 : && (id_equal (DECL_NAME (callee), "move")
2919 23194436 : || id_equal (DECL_NAME (callee), "forward")
2920 22673906 : || id_equal (DECL_NAME (callee), "addressof")
2921 : /* This addressof equivalent is used heavily in libstdc++. */
2922 22543327 : || id_equal (DECL_NAME (callee), "__addressof")
2923 22272272 : || id_equal (DECL_NAME (callee), "as_const")))
2924 : {
2925 1467567 : r = CALL_EXPR_ARG (x, 0);
2926 : /* Check that the return and argument types are sane before
2927 : folding. */
2928 2533500 : if (INDIRECT_TYPE_P (TREE_TYPE (x))
2929 2533500 : && INDIRECT_TYPE_P (TREE_TYPE (r)))
2930 : {
2931 1467567 : if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
2932 1311932 : r = build_nop (TREE_TYPE (x), r);
2933 1467567 : x = cp_fold (r, flags);
2934 1467567 : break;
2935 : }
2936 : }
2937 :
2938 67286898 : int sv = optimize, nw = sv;
2939 :
2940 : /* Some built-in function calls will be evaluated at compile-time in
2941 : fold (). Set optimize to 1 when folding __builtin_constant_p inside
2942 : a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2943 66420330 : if (callee && fndecl_built_in_p (callee) && !optimize
2944 498719 : && DECL_IS_BUILTIN_CONSTANT_P (callee)
2945 518 : && current_function_decl
2946 67287393 : && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2947 : nw = 1;
2948 :
2949 67286898 : if (callee && fndecl_built_in_p (callee, BUILT_IN_FRONTEND))
2950 : {
2951 32159 : iloc_sentinel ils (EXPR_LOCATION (x));
2952 32159 : switch (DECL_FE_FUNCTION_CODE (callee))
2953 : {
2954 31859 : case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
2955 : /* Defer folding __builtin_is_constant_evaluated unless
2956 : we know this isn't a manifestly constant-evaluated
2957 : context. */
2958 31859 : if (flags & ff_mce_false)
2959 16298 : x = boolean_false_node;
2960 : break;
2961 0 : case CP_BUILT_IN_SOURCE_LOCATION:
2962 0 : x = fold_builtin_source_location (x);
2963 0 : break;
2964 160 : case CP_BUILT_IN_IS_CORRESPONDING_MEMBER:
2965 320 : x = fold_builtin_is_corresponding_member
2966 160 : (EXPR_LOCATION (x), call_expr_nargs (x),
2967 : &CALL_EXPR_ARG (x, 0));
2968 160 : break;
2969 140 : case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS:
2970 280 : x = fold_builtin_is_pointer_inverconvertible_with_class
2971 140 : (EXPR_LOCATION (x), call_expr_nargs (x),
2972 : &CALL_EXPR_ARG (x, 0));
2973 140 : break;
2974 : default:
2975 : break;
2976 : }
2977 32159 : break;
2978 32159 : }
2979 :
2980 67254739 : if (callee
2981 67254739 : && fndecl_built_in_p (callee, CP_BUILT_IN_SOURCE_LOCATION,
2982 : BUILT_IN_FRONTEND))
2983 : {
2984 0 : x = fold_builtin_source_location (x);
2985 0 : break;
2986 : }
2987 :
2988 67254739 : bool changed = false;
2989 67254739 : int m = call_expr_nargs (x);
2990 166021838 : for (int i = 0; i < m; i++)
2991 : {
2992 98767099 : r = cp_fold (CALL_EXPR_ARG (x, i), flags);
2993 98767099 : if (r != CALL_EXPR_ARG (x, i))
2994 : {
2995 59881807 : if (r == error_mark_node)
2996 : {
2997 0 : x = error_mark_node;
2998 0 : break;
2999 : }
3000 59881807 : if (!changed)
3001 37225631 : x = copy_node (x);
3002 59881807 : CALL_EXPR_ARG (x, i) = r;
3003 59881807 : changed = true;
3004 : }
3005 : }
3006 67254739 : if (x == error_mark_node)
3007 : break;
3008 :
3009 67254739 : optimize = nw;
3010 67254739 : r = fold (x);
3011 67254739 : optimize = sv;
3012 :
3013 67254739 : if (TREE_CODE (r) != CALL_EXPR)
3014 : {
3015 1294218 : x = cp_fold (r, flags);
3016 1294218 : break;
3017 : }
3018 :
3019 65960521 : optimize = nw;
3020 :
3021 : /* Invoke maybe_constant_value for functions declared
3022 : constexpr and not called with AGGR_INIT_EXPRs.
3023 : TODO:
3024 : Do constexpr expansion of expressions where the call itself is not
3025 : constant, but the call followed by an INDIRECT_REF is. */
3026 65093953 : if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
3027 81059098 : && !flag_no_inline)
3028 : {
3029 14605980 : mce_value manifestly_const_eval = mce_unknown;
3030 14605980 : if (flags & ff_mce_false)
3031 : /* Allow folding __builtin_is_constant_evaluated to false during
3032 : constexpr evaluation of this call. */
3033 9901400 : manifestly_const_eval = mce_false;
3034 14605980 : r = maybe_constant_value (x, /*decl=*/NULL_TREE,
3035 : manifestly_const_eval);
3036 : }
3037 65960521 : optimize = sv;
3038 :
3039 65960521 : if (TREE_CODE (r) != CALL_EXPR)
3040 : {
3041 3802450 : if (DECL_CONSTRUCTOR_P (callee))
3042 : {
3043 0 : loc = EXPR_LOCATION (x);
3044 0 : tree s = build_fold_indirect_ref_loc (loc,
3045 0 : CALL_EXPR_ARG (x, 0));
3046 0 : r = cp_build_init_expr (s, r);
3047 : }
3048 1901225 : x = r;
3049 1901225 : break;
3050 : }
3051 :
3052 : break;
3053 : }
3054 :
3055 10821230 : case CONSTRUCTOR:
3056 10821230 : {
3057 10821230 : unsigned i;
3058 10821230 : constructor_elt *p;
3059 10821230 : vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
3060 10821230 : vec<constructor_elt, va_gc> *nelts = NULL;
3061 18076676 : FOR_EACH_VEC_SAFE_ELT (elts, i, p)
3062 : {
3063 7255446 : tree op = cp_fold (p->value, flags);
3064 7255446 : if (op != p->value)
3065 : {
3066 523862 : if (op == error_mark_node)
3067 : {
3068 0 : x = error_mark_node;
3069 0 : vec_free (nelts);
3070 : break;
3071 : }
3072 523862 : if (nelts == NULL)
3073 309530 : nelts = elts->copy ();
3074 523862 : (*nelts)[i].value = op;
3075 : }
3076 : }
3077 10821230 : if (nelts)
3078 : {
3079 309530 : x = build_constructor (TREE_TYPE (x), nelts);
3080 309530 : CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
3081 309530 : = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
3082 309530 : CONSTRUCTOR_MUTABLE_POISON (x)
3083 619060 : = CONSTRUCTOR_MUTABLE_POISON (org_x);
3084 : }
3085 10821230 : if (VECTOR_TYPE_P (TREE_TYPE (x)))
3086 38988 : x = fold (x);
3087 : break;
3088 : }
3089 53225 : case TREE_VEC:
3090 53225 : {
3091 53225 : bool changed = false;
3092 53225 : int n = TREE_VEC_LENGTH (x);
3093 :
3094 131169 : for (int i = 0; i < n; i++)
3095 : {
3096 77944 : tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
3097 77944 : if (op != TREE_VEC_ELT (x, i))
3098 : {
3099 856 : if (!changed)
3100 817 : x = copy_node (x);
3101 856 : TREE_VEC_ELT (x, i) = op;
3102 856 : changed = true;
3103 : }
3104 : }
3105 : }
3106 :
3107 : break;
3108 :
3109 1366300 : case ARRAY_REF:
3110 1366300 : case ARRAY_RANGE_REF:
3111 :
3112 1366300 : loc = EXPR_LOCATION (x);
3113 1366300 : op0 = cp_fold (TREE_OPERAND (x, 0), flags);
3114 1366300 : op1 = cp_fold (TREE_OPERAND (x, 1), flags);
3115 1366300 : op2 = cp_fold (TREE_OPERAND (x, 2), flags);
3116 1366300 : op3 = cp_fold (TREE_OPERAND (x, 3), flags);
3117 :
3118 1366300 : if (op0 != TREE_OPERAND (x, 0)
3119 593673 : || op1 != TREE_OPERAND (x, 1)
3120 305643 : || op2 != TREE_OPERAND (x, 2)
3121 1671943 : || op3 != TREE_OPERAND (x, 3))
3122 : {
3123 1060657 : if (op0 == error_mark_node
3124 1060657 : || op1 == error_mark_node
3125 1060657 : || op2 == error_mark_node
3126 1060657 : || op3 == error_mark_node)
3127 0 : x = error_mark_node;
3128 : else
3129 : {
3130 1060657 : x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
3131 1060657 : TREE_READONLY (x) = TREE_READONLY (org_x);
3132 1060657 : TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
3133 1060657 : TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3134 : }
3135 : }
3136 :
3137 1366300 : x = fold (x);
3138 1366300 : break;
3139 :
3140 547456 : case SAVE_EXPR:
3141 : /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
3142 : folding, evaluates to an invariant. In that case no need to wrap
3143 : this folded tree with a SAVE_EXPR. */
3144 547456 : r = cp_fold (TREE_OPERAND (x, 0), flags);
3145 547456 : if (tree_invariant_p (r))
3146 50 : x = r;
3147 : break;
3148 :
3149 4 : case REQUIRES_EXPR:
3150 4 : x = evaluate_requires_expr (x);
3151 4 : break;
3152 :
3153 : default:
3154 : return org_x;
3155 : }
3156 :
3157 741562854 : if (EXPR_P (x) && TREE_CODE (x) == code)
3158 : {
3159 509080907 : TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3160 509080907 : copy_warning (x, org_x);
3161 : }
3162 :
3163 741562854 : if (!c.evaluation_restricted_p ())
3164 : {
3165 741377528 : fold_cache->put (org_x, x);
3166 : /* Prevent that we try to fold an already folded result again. */
3167 741377528 : if (x != org_x)
3168 412959269 : fold_cache->put (x, x);
3169 : }
3170 :
3171 : return x;
3172 : }
3173 :
3174 : /* Look up "hot", "cold", "likely" or "unlikely" in attribute list LIST. */
3175 :
3176 : tree
3177 154079277 : lookup_hotness_attribute (tree list)
3178 : {
3179 154080346 : for (; list; list = TREE_CHAIN (list))
3180 : {
3181 91887 : tree name = get_attribute_name (list);
3182 91887 : if ((is_attribute_p ("hot", name)
3183 91887 : || is_attribute_p ("cold", name)
3184 91886 : || is_attribute_p ("likely", name)
3185 67170 : || is_attribute_p ("unlikely", name))
3186 182708 : && is_attribute_namespace_p ("", list))
3187 : break;
3188 : }
3189 154079277 : return list;
3190 : }
3191 :
3192 : /* Remove "hot", "cold", "likely" and "unlikely" attributes from LIST. */
3193 :
3194 : static tree
3195 90811 : remove_hotness_attribute (tree list)
3196 : {
3197 181632 : for (tree *p = &list; *p; )
3198 : {
3199 90821 : tree l = *p;
3200 90821 : tree name = get_attribute_name (l);
3201 90821 : if ((is_attribute_p ("hot", name)
3202 90821 : || is_attribute_p ("cold", name)
3203 90820 : || is_attribute_p ("likely", name)
3204 66104 : || is_attribute_p ("unlikely", name))
3205 181642 : && is_attribute_namespace_p ("", l))
3206 : {
3207 90818 : *p = TREE_CHAIN (l);
3208 90818 : continue;
3209 : }
3210 3 : p = &TREE_CHAIN (l);
3211 : }
3212 90811 : return list;
3213 : }
3214 :
3215 : /* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
3216 : PREDICT_EXPR. */
3217 :
3218 : tree
3219 153988484 : process_stmt_hotness_attribute (tree std_attrs, location_t attrs_loc)
3220 : {
3221 153988484 : if (std_attrs == error_mark_node)
3222 : return std_attrs;
3223 153988466 : if (tree attr = lookup_hotness_attribute (std_attrs))
3224 : {
3225 90811 : tree name = get_attribute_name (attr);
3226 90811 : bool hot = (is_attribute_p ("hot", name)
3227 90811 : || is_attribute_p ("likely", name));
3228 90811 : tree pred = build_predict_expr (hot ? PRED_HOT_LABEL : PRED_COLD_LABEL,
3229 : hot ? TAKEN : NOT_TAKEN);
3230 90811 : SET_EXPR_LOCATION (pred, attrs_loc);
3231 90811 : add_stmt (pred);
3232 90811 : if (tree other = lookup_hotness_attribute (TREE_CHAIN (attr)))
3233 7 : warning (OPT_Wattributes, "ignoring attribute %qE after earlier %qE",
3234 : get_attribute_name (other), name);
3235 90811 : std_attrs = remove_hotness_attribute (std_attrs);
3236 : }
3237 : return std_attrs;
3238 : }
3239 :
3240 : /* Build IFN_ASSUME internal call for assume condition ARG. */
3241 :
3242 : tree
3243 454 : build_assume_call (location_t loc, tree arg)
3244 : {
3245 454 : if (!processing_template_decl)
3246 409 : arg = fold_build_cleanup_point_expr (TREE_TYPE (arg), arg);
3247 454 : return build_call_expr_internal_loc (loc, IFN_ASSUME, void_type_node,
3248 454 : 1, arg);
3249 : }
3250 :
3251 : /* If [[assume (cond)]] appears on this statement, handle it. */
3252 :
3253 : tree
3254 129257067 : process_stmt_assume_attribute (tree std_attrs, tree statement,
3255 : location_t attrs_loc)
3256 : {
3257 129257067 : if (std_attrs == error_mark_node)
3258 : return std_attrs;
3259 129257049 : tree attr = lookup_attribute ("gnu", "assume", std_attrs);
3260 129257049 : if (!attr)
3261 : return std_attrs;
3262 : /* The next token after the assume attribute is not ';'. */
3263 366 : if (statement)
3264 : {
3265 12 : warning_at (attrs_loc, OPT_Wattributes,
3266 : "%<assume%> attribute not followed by %<;%>");
3267 12 : attr = NULL_TREE;
3268 : }
3269 756 : for (; attr; attr = lookup_attribute ("gnu", "assume", TREE_CHAIN (attr)))
3270 : {
3271 390 : tree args = TREE_VALUE (attr);
3272 390 : if (args && PACK_EXPANSION_P (args))
3273 : {
3274 6 : auto_diagnostic_group d;
3275 6 : error_at (attrs_loc, "pack expansion of %qE attribute",
3276 : get_attribute_name (attr));
3277 6 : if (cxx_dialect >= cxx17)
3278 4 : inform (attrs_loc, "use fold expression in the attribute "
3279 : "argument instead");
3280 6 : continue;
3281 6 : }
3282 384 : int nargs = list_length (args);
3283 384 : if (nargs != 1)
3284 : {
3285 36 : auto_diagnostic_group d;
3286 36 : error_at (attrs_loc, "wrong number of arguments specified for "
3287 : "%qE attribute", get_attribute_name (attr));
3288 36 : inform (attrs_loc, "expected %i, found %i", 1, nargs);
3289 36 : }
3290 : else
3291 : {
3292 348 : tree arg = TREE_VALUE (args);
3293 348 : if (!type_dependent_expression_p (arg))
3294 303 : arg = contextual_conv_bool (arg, tf_warning_or_error);
3295 348 : if (error_operand_p (arg))
3296 18 : continue;
3297 330 : finish_expr_stmt (build_assume_call (attrs_loc, arg));
3298 : }
3299 : }
3300 366 : return remove_attribute ("gnu", "assume", std_attrs);
3301 : }
3302 :
3303 : /* Return the type std::source_location::__impl after performing
3304 : verification on it. */
3305 :
3306 : tree
3307 45 : get_source_location_impl_type ()
3308 : {
3309 45 : tree name = get_identifier ("source_location");
3310 45 : tree decl = lookup_qualified_name (std_node, name);
3311 45 : if (TREE_CODE (decl) != TYPE_DECL)
3312 : {
3313 2 : auto_diagnostic_group d;
3314 2 : if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
3315 1 : qualified_name_lookup_error (std_node, name, decl, input_location);
3316 : else
3317 1 : error ("%qD is not a type", decl);
3318 2 : return error_mark_node;
3319 2 : }
3320 43 : name = get_identifier ("__impl");
3321 43 : tree type = TREE_TYPE (decl);
3322 43 : decl = lookup_qualified_name (type, name);
3323 43 : if (TREE_CODE (decl) != TYPE_DECL)
3324 : {
3325 3 : auto_diagnostic_group d;
3326 3 : if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
3327 2 : qualified_name_lookup_error (type, name, decl, input_location);
3328 : else
3329 1 : error ("%qD is not a type", decl);
3330 3 : return error_mark_node;
3331 3 : }
3332 40 : type = TREE_TYPE (decl);
3333 40 : if (TREE_CODE (type) != RECORD_TYPE)
3334 : {
3335 1 : error ("%qD is not a class type", decl);
3336 1 : return error_mark_node;
3337 : }
3338 :
3339 39 : int cnt = 0;
3340 39 : for (tree field = TYPE_FIELDS (type);
3341 182 : (field = next_aggregate_field (field)) != NULL_TREE;
3342 143 : field = DECL_CHAIN (field))
3343 : {
3344 146 : if (DECL_NAME (field) != NULL_TREE)
3345 : {
3346 146 : const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3347 146 : if (strcmp (n, "_M_file_name") == 0
3348 108 : || strcmp (n, "_M_function_name") == 0)
3349 : {
3350 75 : if (TREE_TYPE (field) != const_string_type_node)
3351 : {
3352 1 : error ("%qD does not have %<const char *%> type", field);
3353 1 : return error_mark_node;
3354 : }
3355 74 : cnt++;
3356 74 : continue;
3357 : }
3358 71 : else if (strcmp (n, "_M_line") == 0 || strcmp (n, "_M_column") == 0)
3359 : {
3360 70 : if (TREE_CODE (TREE_TYPE (field)) != INTEGER_TYPE)
3361 : {
3362 1 : error ("%qD does not have integral type", field);
3363 1 : return error_mark_node;
3364 : }
3365 69 : cnt++;
3366 69 : continue;
3367 : }
3368 : }
3369 : cnt = 0;
3370 : break;
3371 : }
3372 36 : if (cnt != 4)
3373 : {
3374 3 : error ("%<std::source_location::__impl%> does not contain only "
3375 : "non-static data members %<_M_file_name%>, "
3376 : "%<_M_function_name%>, %<_M_line%> and %<_M_column%>");
3377 3 : return error_mark_node;
3378 : }
3379 34 : return build_qualified_type (type, TYPE_QUAL_CONST);
3380 : }
3381 :
3382 : /* Type for source_location_table hash_set. */
3383 : struct GTY((for_user)) source_location_table_entry {
3384 : location_t loc;
3385 : unsigned uid;
3386 : tree var;
3387 : };
3388 :
3389 : /* Traits class for function start hash maps below. */
3390 :
3391 : struct source_location_table_entry_hash
3392 : : ggc_remove <source_location_table_entry>
3393 : {
3394 : typedef source_location_table_entry value_type;
3395 : typedef source_location_table_entry compare_type;
3396 :
3397 : static hashval_t
3398 156 : hash (const source_location_table_entry &ref)
3399 : {
3400 156 : inchash::hash hstate (0);
3401 156 : hstate.add_int (ref.loc);
3402 156 : hstate.add_int (ref.uid);
3403 156 : return hstate.end ();
3404 : }
3405 :
3406 : static bool
3407 70 : equal (const source_location_table_entry &ref1,
3408 : const source_location_table_entry &ref2)
3409 : {
3410 70 : return ref1.loc == ref2.loc && ref1.uid == ref2.uid;
3411 : }
3412 :
3413 : static void
3414 : mark_deleted (source_location_table_entry &ref)
3415 : {
3416 : ref.loc = UNKNOWN_LOCATION;
3417 : ref.uid = -1U;
3418 : ref.var = NULL_TREE;
3419 : }
3420 :
3421 : static const bool empty_zero_p = true;
3422 :
3423 : static void
3424 0 : mark_empty (source_location_table_entry &ref)
3425 : {
3426 0 : ref.loc = UNKNOWN_LOCATION;
3427 0 : ref.uid = 0;
3428 0 : ref.var = NULL_TREE;
3429 : }
3430 :
3431 : static bool
3432 77 : is_deleted (const source_location_table_entry &ref)
3433 : {
3434 77 : return (ref.loc == UNKNOWN_LOCATION
3435 : && ref.uid == -1U
3436 77 : && ref.var == NULL_TREE);
3437 : }
3438 :
3439 : static bool
3440 2085 : is_empty (const source_location_table_entry &ref)
3441 : {
3442 2085 : return (ref.loc == UNKNOWN_LOCATION
3443 2085 : && ref.uid == 0
3444 2085 : && ref.var == NULL_TREE);
3445 : }
3446 :
3447 : static void
3448 3 : pch_nx (source_location_table_entry &p)
3449 : {
3450 3 : extern void gt_pch_nx (source_location_table_entry &);
3451 384 : gt_pch_nx (p);
3452 : }
3453 :
3454 : static void
3455 3 : pch_nx (source_location_table_entry &p, gt_pointer_operator op, void *cookie)
3456 : {
3457 3 : extern void gt_pch_nx (source_location_table_entry *, gt_pointer_operator,
3458 : void *);
3459 3 : gt_pch_nx (&p, op, cookie);
3460 3 : }
3461 : };
3462 :
3463 : static GTY(()) hash_table <source_location_table_entry_hash>
3464 : *source_location_table;
3465 : static GTY(()) unsigned int source_location_id;
3466 :
3467 : /* Fold the __builtin_source_location () call T. */
3468 :
3469 : tree
3470 99 : fold_builtin_source_location (const_tree t)
3471 : {
3472 99 : gcc_assert (TREE_CODE (t) == CALL_EXPR);
3473 : /* TREE_TYPE (t) is const std::source_location::__impl* */
3474 99 : tree source_location_impl = TREE_TYPE (TREE_TYPE (t));
3475 99 : if (source_location_impl == error_mark_node)
3476 0 : return build_zero_cst (const_ptr_type_node);
3477 99 : gcc_assert (CLASS_TYPE_P (source_location_impl)
3478 : && id_equal (TYPE_IDENTIFIER (source_location_impl), "__impl"));
3479 :
3480 99 : location_t loc = EXPR_LOCATION (t);
3481 99 : if (source_location_table == NULL)
3482 19 : source_location_table
3483 19 : = hash_table <source_location_table_entry_hash>::create_ggc (64);
3484 99 : const line_map_ordinary *map;
3485 99 : source_location_table_entry entry;
3486 99 : entry.loc
3487 99 : = linemap_resolve_location (line_table, loc, LRK_MACRO_EXPANSION_POINT,
3488 : &map);
3489 99 : entry.uid = current_function_decl ? DECL_UID (current_function_decl) : -1;
3490 99 : entry.var = error_mark_node;
3491 99 : source_location_table_entry *entryp
3492 99 : = source_location_table->find_slot (entry, INSERT);
3493 99 : tree var;
3494 99 : if (entryp->var)
3495 : var = entryp->var;
3496 : else
3497 : {
3498 87 : char tmp_name[32];
3499 87 : ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lsrc_loc", source_location_id++);
3500 87 : var = build_decl (loc, VAR_DECL, get_identifier (tmp_name),
3501 : source_location_impl);
3502 87 : TREE_STATIC (var) = 1;
3503 87 : TREE_PUBLIC (var) = 0;
3504 87 : DECL_ARTIFICIAL (var) = 1;
3505 87 : DECL_IGNORED_P (var) = 1;
3506 87 : DECL_EXTERNAL (var) = 0;
3507 87 : DECL_DECLARED_CONSTEXPR_P (var) = 1;
3508 87 : DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (var) = 1;
3509 87 : layout_decl (var, 0);
3510 :
3511 87 : vec<constructor_elt, va_gc> *v = NULL;
3512 87 : vec_alloc (v, 4);
3513 87 : for (tree field = TYPE_FIELDS (source_location_impl);
3514 435 : (field = next_aggregate_field (field)) != NULL_TREE;
3515 348 : field = DECL_CHAIN (field))
3516 : {
3517 348 : const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3518 348 : tree val = NULL_TREE;
3519 348 : if (strcmp (n, "_M_file_name") == 0)
3520 : {
3521 87 : if (const char *fname = LOCATION_FILE (loc))
3522 : {
3523 87 : fname = remap_macro_filename (fname);
3524 87 : val = build_string_literal (fname);
3525 : }
3526 : else
3527 0 : val = build_string_literal ("");
3528 : }
3529 261 : else if (strcmp (n, "_M_function_name") == 0)
3530 : {
3531 87 : const char *name = "";
3532 :
3533 87 : if (current_function_decl)
3534 60 : name = cxx_printable_name (current_function_decl, 2);
3535 :
3536 87 : val = build_string_literal (name);
3537 : }
3538 174 : else if (strcmp (n, "_M_line") == 0)
3539 87 : val = build_int_cst (TREE_TYPE (field), LOCATION_LINE (loc));
3540 87 : else if (strcmp (n, "_M_column") == 0)
3541 87 : val = build_int_cst (TREE_TYPE (field), LOCATION_COLUMN (loc));
3542 : else
3543 0 : gcc_unreachable ();
3544 348 : CONSTRUCTOR_APPEND_ELT (v, field, val);
3545 : }
3546 :
3547 87 : tree ctor = build_constructor (source_location_impl, v);
3548 87 : TREE_CONSTANT (ctor) = 1;
3549 87 : TREE_STATIC (ctor) = 1;
3550 87 : DECL_INITIAL (var) = ctor;
3551 87 : varpool_node::finalize_decl (var);
3552 87 : *entryp = entry;
3553 87 : entryp->var = var;
3554 : }
3555 :
3556 99 : return build_fold_addr_expr_with_type_loc (loc, var, TREE_TYPE (t));
3557 : }
3558 :
3559 : #include "gt-cp-cp-gimplify.h"
|