]> gcc.gnu.org Git - gcc.git/blob - gcc/gimplify.c
Formatting nit.
[gcc.git] / gcc / gimplify.c
1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
5 Major work done by Sebastian Pop <s.pop@laposte.net>,
6 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
14
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "rtl.h"
30 #include "varray.h"
31 #include "tree-gimple.h"
32 #include "tree-inline.h"
33 #include "diagnostic.h"
34 #include "langhooks.h"
35 #include "langhooks-def.h"
36 #include "tree-flow.h"
37 #include "cgraph.h"
38 #include "timevar.h"
39 #include "except.h"
40 #include "hashtab.h"
41 #include "flags.h"
42 #include "real.h"
43 #include "function.h"
44 #include "output.h"
45 #include "expr.h"
46 #include "ggc.h"
47 #include "toplev.h"
48 #include "target.h"
49 #include "optabs.h"
50 #include "pointer-set.h"
51 #include "splay-tree.h"
52
53
54 enum gimplify_omp_var_data
55 {
56 GOVD_SEEN = 1,
57 GOVD_EXPLICIT = 2,
58 GOVD_SHARED = 4,
59 GOVD_PRIVATE = 8,
60 GOVD_FIRSTPRIVATE = 16,
61 GOVD_LASTPRIVATE = 32,
62 GOVD_REDUCTION = 64,
63 GOVD_LOCAL = 128,
64 GOVD_DEBUG_PRIVATE = 256,
65 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
66 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
67 };
68
69 struct gimplify_omp_ctx
70 {
71 struct gimplify_omp_ctx *outer_context;
72 splay_tree variables;
73 struct pointer_set_t *privatized_types;
74 location_t location;
75 enum omp_clause_default_kind default_kind;
76 bool is_parallel;
77 bool is_combined_parallel;
78 };
79
80 struct gimplify_ctx
81 {
82 struct gimplify_ctx *prev_context;
83
84 tree current_bind_expr;
85 tree temps;
86 tree conditional_cleanups;
87 tree exit_label;
88 tree return_temp;
89
90 VEC(tree,heap) *case_labels;
91 /* The formal temporary table. Should this be persistent? */
92 htab_t temp_htab;
93
94 int conditions;
95 bool save_stack;
96 bool into_ssa;
97 bool allow_rhs_cond_expr;
98 };
99
100 static struct gimplify_ctx *gimplify_ctxp;
101 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
102
103
104
105 /* Formal (expression) temporary table handling: Multiple occurrences of
106 the same scalar expression are evaluated into the same temporary. */
107
108 typedef struct gimple_temp_hash_elt
109 {
110 tree val; /* Key */
111 tree temp; /* Value */
112 } elt_t;
113
114 /* Forward declarations. */
115 static enum gimplify_status gimplify_compound_expr (tree *, tree *, bool);
116
117 /* Mark X addressable. Unlike the langhook we expect X to be in gimple
118 form and we don't do any syntax checking. */
119 static void
120 mark_addressable (tree x)
121 {
122 while (handled_component_p (x))
123 x = TREE_OPERAND (x, 0);
124 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
125 return ;
126 TREE_ADDRESSABLE (x) = 1;
127 }
128
129 /* Return a hash value for a formal temporary table entry. */
130
131 static hashval_t
132 gimple_tree_hash (const void *p)
133 {
134 tree t = ((const elt_t *) p)->val;
135 return iterative_hash_expr (t, 0);
136 }
137
138 /* Compare two formal temporary table entries. */
139
140 static int
141 gimple_tree_eq (const void *p1, const void *p2)
142 {
143 tree t1 = ((const elt_t *) p1)->val;
144 tree t2 = ((const elt_t *) p2)->val;
145 enum tree_code code = TREE_CODE (t1);
146
147 if (TREE_CODE (t2) != code
148 || TREE_TYPE (t1) != TREE_TYPE (t2))
149 return 0;
150
151 if (!operand_equal_p (t1, t2, 0))
152 return 0;
153
154 /* Only allow them to compare equal if they also hash equal; otherwise
155 results are nondeterminate, and we fail bootstrap comparison. */
156 gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
157
158 return 1;
159 }
160
161 /* Set up a context for the gimplifier. */
162
163 void
164 push_gimplify_context (void)
165 {
166 struct gimplify_ctx *c;
167
168 c = (struct gimplify_ctx *) xcalloc (1, sizeof (struct gimplify_ctx));
169 c->prev_context = gimplify_ctxp;
170 if (optimize)
171 c->temp_htab = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
172
173 gimplify_ctxp = c;
174 }
175
176 /* Tear down a context for the gimplifier. If BODY is non-null, then
177 put the temporaries into the outer BIND_EXPR. Otherwise, put them
178 in the unexpanded_var_list. */
179
180 void
181 pop_gimplify_context (tree body)
182 {
183 struct gimplify_ctx *c = gimplify_ctxp;
184 tree t;
185
186 gcc_assert (c && !c->current_bind_expr);
187 gimplify_ctxp = c->prev_context;
188
189 for (t = c->temps; t ; t = TREE_CHAIN (t))
190 DECL_GIMPLE_FORMAL_TEMP_P (t) = 0;
191
192 if (body)
193 declare_vars (c->temps, body, false);
194 else
195 record_vars (c->temps);
196
197 if (optimize)
198 htab_delete (c->temp_htab);
199 free (c);
200 }
201
202 static void
203 gimple_push_bind_expr (tree bind)
204 {
205 TREE_CHAIN (bind) = gimplify_ctxp->current_bind_expr;
206 gimplify_ctxp->current_bind_expr = bind;
207 }
208
209 static void
210 gimple_pop_bind_expr (void)
211 {
212 gimplify_ctxp->current_bind_expr
213 = TREE_CHAIN (gimplify_ctxp->current_bind_expr);
214 }
215
216 tree
217 gimple_current_bind_expr (void)
218 {
219 return gimplify_ctxp->current_bind_expr;
220 }
221
222 /* Returns true iff there is a COND_EXPR between us and the innermost
223 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
224
225 static bool
226 gimple_conditional_context (void)
227 {
228 return gimplify_ctxp->conditions > 0;
229 }
230
231 /* Note that we've entered a COND_EXPR. */
232
233 static void
234 gimple_push_condition (void)
235 {
236 #ifdef ENABLE_CHECKING
237 if (gimplify_ctxp->conditions == 0)
238 gcc_assert (!gimplify_ctxp->conditional_cleanups);
239 #endif
240 ++(gimplify_ctxp->conditions);
241 }
242
243 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
244 now, add any conditional cleanups we've seen to the prequeue. */
245
246 static void
247 gimple_pop_condition (tree *pre_p)
248 {
249 int conds = --(gimplify_ctxp->conditions);
250
251 gcc_assert (conds >= 0);
252 if (conds == 0)
253 {
254 append_to_statement_list (gimplify_ctxp->conditional_cleanups, pre_p);
255 gimplify_ctxp->conditional_cleanups = NULL_TREE;
256 }
257 }
258
259 /* A stable comparison routine for use with splay trees and DECLs. */
260
261 static int
262 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
263 {
264 tree a = (tree) xa;
265 tree b = (tree) xb;
266
267 return DECL_UID (a) - DECL_UID (b);
268 }
269
270 /* Create a new omp construct that deals with variable remapping. */
271
272 static struct gimplify_omp_ctx *
273 new_omp_context (bool is_parallel, bool is_combined_parallel)
274 {
275 struct gimplify_omp_ctx *c;
276
277 c = XCNEW (struct gimplify_omp_ctx);
278 c->outer_context = gimplify_omp_ctxp;
279 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
280 c->privatized_types = pointer_set_create ();
281 c->location = input_location;
282 c->is_parallel = is_parallel;
283 c->is_combined_parallel = is_combined_parallel;
284 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
285
286 return c;
287 }
288
289 /* Destroy an omp construct that deals with variable remapping. */
290
291 static void
292 delete_omp_context (struct gimplify_omp_ctx *c)
293 {
294 splay_tree_delete (c->variables);
295 pointer_set_destroy (c->privatized_types);
296 XDELETE (c);
297 }
298
299 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
300 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
301
302 /* A subroutine of append_to_statement_list{,_force}. T is not NULL. */
303
304 static void
305 append_to_statement_list_1 (tree t, tree *list_p)
306 {
307 tree list = *list_p;
308 tree_stmt_iterator i;
309
310 if (!list)
311 {
312 if (t && TREE_CODE (t) == STATEMENT_LIST)
313 {
314 *list_p = t;
315 return;
316 }
317 *list_p = list = alloc_stmt_list ();
318 }
319
320 i = tsi_last (list);
321 tsi_link_after (&i, t, TSI_CONTINUE_LINKING);
322 }
323
324 /* Add T to the end of the list container pointed to by LIST_P.
325 If T is an expression with no effects, it is ignored. */
326
327 void
328 append_to_statement_list (tree t, tree *list_p)
329 {
330 if (t && TREE_SIDE_EFFECTS (t))
331 append_to_statement_list_1 (t, list_p);
332 }
333
334 /* Similar, but the statement is always added, regardless of side effects. */
335
336 void
337 append_to_statement_list_force (tree t, tree *list_p)
338 {
339 if (t != NULL_TREE)
340 append_to_statement_list_1 (t, list_p);
341 }
342
343 /* Both gimplify the statement T and append it to LIST_P. */
344
345 void
346 gimplify_and_add (tree t, tree *list_p)
347 {
348 gimplify_stmt (&t);
349 append_to_statement_list (t, list_p);
350 }
351
352 /* Strip off a legitimate source ending from the input string NAME of
353 length LEN. Rather than having to know the names used by all of
354 our front ends, we strip off an ending of a period followed by
355 up to five characters. (Java uses ".class".) */
356
357 static inline void
358 remove_suffix (char *name, int len)
359 {
360 int i;
361
362 for (i = 2; i < 8 && len > i; i++)
363 {
364 if (name[len - i] == '.')
365 {
366 name[len - i] = '\0';
367 break;
368 }
369 }
370 }
371
372 /* Create a nameless artificial label and put it in the current function
373 context. Returns the newly created label. */
374
375 tree
376 create_artificial_label (void)
377 {
378 tree lab = build_decl (LABEL_DECL, NULL_TREE, void_type_node);
379
380 DECL_ARTIFICIAL (lab) = 1;
381 DECL_IGNORED_P (lab) = 1;
382 DECL_CONTEXT (lab) = current_function_decl;
383 return lab;
384 }
385
386 /* Subroutine for find_single_pointer_decl. */
387
388 static tree
389 find_single_pointer_decl_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
390 void *data)
391 {
392 tree *pdecl = (tree *) data;
393
394 if (DECL_P (*tp) && POINTER_TYPE_P (TREE_TYPE (*tp)))
395 {
396 if (*pdecl)
397 {
398 /* We already found a pointer decl; return anything other
399 than NULL_TREE to unwind from walk_tree signalling that
400 we have a duplicate. */
401 return *tp;
402 }
403 *pdecl = *tp;
404 }
405
406 return NULL_TREE;
407 }
408
409 /* Find the single DECL of pointer type in the tree T and return it.
410 If there are zero or more than one such DECLs, return NULL. */
411
412 static tree
413 find_single_pointer_decl (tree t)
414 {
415 tree decl = NULL_TREE;
416
417 if (walk_tree (&t, find_single_pointer_decl_1, &decl, NULL))
418 {
419 /* find_single_pointer_decl_1 returns a nonzero value, causing
420 walk_tree to return a nonzero value, to indicate that it
421 found more than one pointer DECL. */
422 return NULL_TREE;
423 }
424
425 return decl;
426 }
427
428 /* Create a new temporary name with PREFIX. Returns an identifier. */
429
430 static GTY(()) unsigned int tmp_var_id_num;
431
432 tree
433 create_tmp_var_name (const char *prefix)
434 {
435 char *tmp_name;
436
437 if (prefix)
438 {
439 char *preftmp = ASTRDUP (prefix);
440
441 remove_suffix (preftmp, strlen (preftmp));
442 prefix = preftmp;
443 }
444
445 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
446 return get_identifier (tmp_name);
447 }
448
449
450 /* Create a new temporary variable declaration of type TYPE.
451 Does NOT push it into the current binding. */
452
453 tree
454 create_tmp_var_raw (tree type, const char *prefix)
455 {
456 tree tmp_var;
457 tree new_type;
458
459 /* Make the type of the variable writable. */
460 new_type = build_type_variant (type, 0, 0);
461 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
462
463 tmp_var = build_decl (VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
464 type);
465
466 /* The variable was declared by the compiler. */
467 DECL_ARTIFICIAL (tmp_var) = 1;
468 /* And we don't want debug info for it. */
469 DECL_IGNORED_P (tmp_var) = 1;
470
471 /* Make the variable writable. */
472 TREE_READONLY (tmp_var) = 0;
473
474 DECL_EXTERNAL (tmp_var) = 0;
475 TREE_STATIC (tmp_var) = 0;
476 TREE_USED (tmp_var) = 1;
477
478 return tmp_var;
479 }
480
481 /* Create a new temporary variable declaration of type TYPE. DOES push the
482 variable into the current binding. Further, assume that this is called
483 only from gimplification or optimization, at which point the creation of
484 certain types are bugs. */
485
486 tree
487 create_tmp_var (tree type, const char *prefix)
488 {
489 tree tmp_var;
490
491 /* We don't allow types that are addressable (meaning we can't make copies),
492 or incomplete. We also used to reject every variable size objects here,
493 but now support those for which a constant upper bound can be obtained.
494 The processing for variable sizes is performed in gimple_add_tmp_var,
495 point at which it really matters and possibly reached via paths not going
496 through this function, e.g. after direct calls to create_tmp_var_raw. */
497 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
498
499 tmp_var = create_tmp_var_raw (type, prefix);
500 gimple_add_tmp_var (tmp_var);
501 return tmp_var;
502 }
503
504 /* Given a tree, try to return a useful variable name that we can use
505 to prefix a temporary that is being assigned the value of the tree.
506 I.E. given <temp> = &A, return A. */
507
508 const char *
509 get_name (const_tree t)
510 {
511 const_tree stripped_decl;
512
513 stripped_decl = t;
514 STRIP_NOPS (stripped_decl);
515 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
516 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
517 else
518 {
519 switch (TREE_CODE (stripped_decl))
520 {
521 case ADDR_EXPR:
522 return get_name (TREE_OPERAND (stripped_decl, 0));
523 default:
524 return NULL;
525 }
526 }
527 }
528
529 /* Create a temporary with a name derived from VAL. Subroutine of
530 lookup_tmp_var; nobody else should call this function. */
531
532 static inline tree
533 create_tmp_from_val (tree val)
534 {
535 return create_tmp_var (TYPE_MAIN_VARIANT (TREE_TYPE (val)), get_name (val));
536 }
537
538 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
539 an existing expression temporary. */
540
541 static tree
542 lookup_tmp_var (tree val, bool is_formal)
543 {
544 tree ret;
545
546 /* If not optimizing, never really reuse a temporary. local-alloc
547 won't allocate any variable that is used in more than one basic
548 block, which means it will go into memory, causing much extra
549 work in reload and final and poorer code generation, outweighing
550 the extra memory allocation here. */
551 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
552 ret = create_tmp_from_val (val);
553 else
554 {
555 elt_t elt, *elt_p;
556 void **slot;
557
558 elt.val = val;
559 slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT);
560 if (*slot == NULL)
561 {
562 elt_p = XNEW (elt_t);
563 elt_p->val = val;
564 elt_p->temp = ret = create_tmp_from_val (val);
565 *slot = (void *) elt_p;
566 }
567 else
568 {
569 elt_p = (elt_t *) *slot;
570 ret = elt_p->temp;
571 }
572 }
573
574 if (is_formal)
575 DECL_GIMPLE_FORMAL_TEMP_P (ret) = 1;
576
577 return ret;
578 }
579
580 /* Returns a formal temporary variable initialized with VAL. PRE_P is as
581 in gimplify_expr. Only use this function if:
582
583 1) The value of the unfactored expression represented by VAL will not
584 change between the initialization and use of the temporary, and
585 2) The temporary will not be otherwise modified.
586
587 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
588 and #2 means it is inappropriate for && temps.
589
590 For other cases, use get_initialized_tmp_var instead. */
591
592 static tree
593 internal_get_tmp_var (tree val, tree *pre_p, tree *post_p, bool is_formal)
594 {
595 tree t, mod;
596
597 gimplify_expr (&val, pre_p, post_p, is_gimple_formal_tmp_rhs, fb_rvalue);
598
599 t = lookup_tmp_var (val, is_formal);
600
601 if (is_formal)
602 {
603 tree u = find_single_pointer_decl (val);
604
605 if (u && TREE_CODE (u) == VAR_DECL && DECL_BASED_ON_RESTRICT_P (u))
606 u = DECL_GET_RESTRICT_BASE (u);
607 if (u && TYPE_RESTRICT (TREE_TYPE (u)))
608 {
609 if (DECL_BASED_ON_RESTRICT_P (t))
610 gcc_assert (u == DECL_GET_RESTRICT_BASE (t));
611 else
612 {
613 DECL_BASED_ON_RESTRICT_P (t) = 1;
614 SET_DECL_RESTRICT_BASE (t, u);
615 }
616 }
617 }
618
619 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
620 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
621 DECL_GIMPLE_REG_P (t) = 1;
622
623 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
624
625 if (EXPR_HAS_LOCATION (val))
626 SET_EXPR_LOCUS (mod, EXPR_LOCUS (val));
627 else
628 SET_EXPR_LOCATION (mod, input_location);
629
630 /* gimplify_modify_expr might want to reduce this further. */
631 gimplify_and_add (mod, pre_p);
632
633 /* If we're gimplifying into ssa, gimplify_modify_expr will have
634 given our temporary an ssa name. Find and return it. */
635 if (gimplify_ctxp->into_ssa)
636 t = TREE_OPERAND (mod, 0);
637
638 return t;
639 }
640
641 /* Returns a formal temporary variable initialized with VAL. PRE_P
642 points to a statement list where side-effects needed to compute VAL
643 should be stored. */
644
645 tree
646 get_formal_tmp_var (tree val, tree *pre_p)
647 {
648 return internal_get_tmp_var (val, pre_p, NULL, true);
649 }
650
651 /* Returns a temporary variable initialized with VAL. PRE_P and POST_P
652 are as in gimplify_expr. */
653
654 tree
655 get_initialized_tmp_var (tree val, tree *pre_p, tree *post_p)
656 {
657 return internal_get_tmp_var (val, pre_p, post_p, false);
658 }
659
660 /* Declares all the variables in VARS in SCOPE. If DEBUG_INFO is
661 true, generate debug info for them; otherwise don't. */
662
663 void
664 declare_vars (tree vars, tree scope, bool debug_info)
665 {
666 tree last = vars;
667 if (last)
668 {
669 tree temps, block;
670
671 /* C99 mode puts the default 'return 0;' for main outside the outer
672 braces. So drill down until we find an actual scope. */
673 while (TREE_CODE (scope) == COMPOUND_EXPR)
674 scope = TREE_OPERAND (scope, 0);
675
676 gcc_assert (TREE_CODE (scope) == BIND_EXPR);
677
678 temps = nreverse (last);
679
680 block = BIND_EXPR_BLOCK (scope);
681 if (!block || !debug_info)
682 {
683 TREE_CHAIN (last) = BIND_EXPR_VARS (scope);
684 BIND_EXPR_VARS (scope) = temps;
685 }
686 else
687 {
688 /* We need to attach the nodes both to the BIND_EXPR and to its
689 associated BLOCK for debugging purposes. The key point here
690 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
691 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
692 if (BLOCK_VARS (block))
693 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
694 else
695 {
696 BIND_EXPR_VARS (scope) = chainon (BIND_EXPR_VARS (scope), temps);
697 BLOCK_VARS (block) = temps;
698 }
699 }
700 }
701 }
702
703 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
704 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
705 no such upper bound can be obtained. */
706
707 static void
708 force_constant_size (tree var)
709 {
710 /* The only attempt we make is by querying the maximum size of objects
711 of the variable's type. */
712
713 HOST_WIDE_INT max_size;
714
715 gcc_assert (TREE_CODE (var) == VAR_DECL);
716
717 max_size = max_int_size_in_bytes (TREE_TYPE (var));
718
719 gcc_assert (max_size >= 0);
720
721 DECL_SIZE_UNIT (var)
722 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
723 DECL_SIZE (var)
724 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
725 }
726
727 void
728 gimple_add_tmp_var (tree tmp)
729 {
730 gcc_assert (!TREE_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
731
732 /* Later processing assumes that the object size is constant, which might
733 not be true at this point. Force the use of a constant upper bound in
734 this case. */
735 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
736 force_constant_size (tmp);
737
738 DECL_CONTEXT (tmp) = current_function_decl;
739 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
740
741 if (gimplify_ctxp)
742 {
743 TREE_CHAIN (tmp) = gimplify_ctxp->temps;
744 gimplify_ctxp->temps = tmp;
745
746 /* Mark temporaries local within the nearest enclosing parallel. */
747 if (gimplify_omp_ctxp)
748 {
749 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
750 while (ctx && !ctx->is_parallel)
751 ctx = ctx->outer_context;
752 if (ctx)
753 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
754 }
755 }
756 else if (cfun)
757 record_vars (tmp);
758 else
759 declare_vars (tmp, DECL_SAVED_TREE (current_function_decl), false);
760 }
761
762 /* Determines whether to assign a locus to the statement STMT. */
763
764 static bool
765 should_carry_locus_p (const_tree stmt)
766 {
767 /* Don't emit a line note for a label. We particularly don't want to
768 emit one for the break label, since it doesn't actually correspond
769 to the beginning of the loop/switch. */
770 if (TREE_CODE (stmt) == LABEL_EXPR)
771 return false;
772
773 /* Do not annotate empty statements, since it confuses gcov. */
774 if (!TREE_SIDE_EFFECTS (stmt))
775 return false;
776
777 return true;
778 }
779
780 static void
781 annotate_one_with_locus (tree t, location_t locus)
782 {
783 if (CAN_HAVE_LOCATION_P (t)
784 && ! EXPR_HAS_LOCATION (t) && should_carry_locus_p (t))
785 SET_EXPR_LOCATION (t, locus);
786 }
787
788 void
789 annotate_all_with_locus (tree *stmt_p, location_t locus)
790 {
791 tree_stmt_iterator i;
792
793 if (!*stmt_p)
794 return;
795
796 for (i = tsi_start (*stmt_p); !tsi_end_p (i); tsi_next (&i))
797 {
798 tree t = tsi_stmt (i);
799
800 /* Assuming we've already been gimplified, we shouldn't
801 see nested chaining constructs anymore. */
802 gcc_assert (TREE_CODE (t) != STATEMENT_LIST
803 && TREE_CODE (t) != COMPOUND_EXPR);
804
805 annotate_one_with_locus (t, locus);
806 }
807 }
808
809 /* Similar to copy_tree_r() but do not copy SAVE_EXPR or TARGET_EXPR nodes.
810 These nodes model computations that should only be done once. If we
811 were to unshare something like SAVE_EXPR(i++), the gimplification
812 process would create wrong code. */
813
814 static tree
815 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
816 {
817 enum tree_code code = TREE_CODE (*tp);
818 /* Don't unshare types, decls, constants and SAVE_EXPR nodes. */
819 if (TREE_CODE_CLASS (code) == tcc_type
820 || TREE_CODE_CLASS (code) == tcc_declaration
821 || TREE_CODE_CLASS (code) == tcc_constant
822 || code == SAVE_EXPR || code == TARGET_EXPR
823 /* We can't do anything sensible with a BLOCK used as an expression,
824 but we also can't just die when we see it because of non-expression
825 uses. So just avert our eyes and cross our fingers. Silly Java. */
826 || code == BLOCK)
827 *walk_subtrees = 0;
828 else
829 {
830 gcc_assert (code != BIND_EXPR);
831 copy_tree_r (tp, walk_subtrees, data);
832 }
833
834 return NULL_TREE;
835 }
836
837 /* Callback for walk_tree to unshare most of the shared trees rooted at
838 *TP. If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1),
839 then *TP is deep copied by calling copy_tree_r.
840
841 This unshares the same trees as copy_tree_r with the exception of
842 SAVE_EXPR nodes. These nodes model computations that should only be
843 done once. If we were to unshare something like SAVE_EXPR(i++), the
844 gimplification process would create wrong code. */
845
846 static tree
847 copy_if_shared_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
848 void *data ATTRIBUTE_UNUSED)
849 {
850 tree t = *tp;
851 enum tree_code code = TREE_CODE (t);
852
853 /* Skip types, decls, and constants. But we do want to look at their
854 types and the bounds of types. Mark them as visited so we properly
855 unmark their subtrees on the unmark pass. If we've already seen them,
856 don't look down further. */
857 if (TREE_CODE_CLASS (code) == tcc_type
858 || TREE_CODE_CLASS (code) == tcc_declaration
859 || TREE_CODE_CLASS (code) == tcc_constant)
860 {
861 if (TREE_VISITED (t))
862 *walk_subtrees = 0;
863 else
864 TREE_VISITED (t) = 1;
865 }
866
867 /* If this node has been visited already, unshare it and don't look
868 any deeper. */
869 else if (TREE_VISITED (t))
870 {
871 walk_tree (tp, mostly_copy_tree_r, NULL, NULL);
872 *walk_subtrees = 0;
873 }
874
875 /* Otherwise, mark the tree as visited and keep looking. */
876 else
877 TREE_VISITED (t) = 1;
878
879 return NULL_TREE;
880 }
881
882 static tree
883 unmark_visited_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
884 void *data ATTRIBUTE_UNUSED)
885 {
886 if (TREE_VISITED (*tp))
887 TREE_VISITED (*tp) = 0;
888 else
889 *walk_subtrees = 0;
890
891 return NULL_TREE;
892 }
893
894 /* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the
895 bodies of any nested functions if we are unsharing the entire body of
896 FNDECL. */
897
898 static void
899 unshare_body (tree *body_p, tree fndecl)
900 {
901 struct cgraph_node *cgn = cgraph_node (fndecl);
902
903 walk_tree (body_p, copy_if_shared_r, NULL, NULL);
904 if (body_p == &DECL_SAVED_TREE (fndecl))
905 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
906 unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
907 }
908
909 /* Likewise, but mark all trees as not visited. */
910
911 static void
912 unvisit_body (tree *body_p, tree fndecl)
913 {
914 struct cgraph_node *cgn = cgraph_node (fndecl);
915
916 walk_tree (body_p, unmark_visited_r, NULL, NULL);
917 if (body_p == &DECL_SAVED_TREE (fndecl))
918 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
919 unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
920 }
921
922 /* Unshare T and all the trees reached from T via TREE_CHAIN. */
923
924 static void
925 unshare_all_trees (tree t)
926 {
927 walk_tree (&t, copy_if_shared_r, NULL, NULL);
928 walk_tree (&t, unmark_visited_r, NULL, NULL);
929 }
930
931 /* Unconditionally make an unshared copy of EXPR. This is used when using
932 stored expressions which span multiple functions, such as BINFO_VTABLE,
933 as the normal unsharing process can't tell that they're shared. */
934
935 tree
936 unshare_expr (tree expr)
937 {
938 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
939 return expr;
940 }
941
942 /* A terser interface for building a representation of an exception
943 specification. */
944
945 tree
946 gimple_build_eh_filter (tree body, tree allowed, tree failure)
947 {
948 tree t;
949
950 /* FIXME should the allowed types go in TREE_TYPE? */
951 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
952 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
953
954 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
955 append_to_statement_list (body, &TREE_OPERAND (t, 0));
956
957 return t;
958 }
959
960 \f
961 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
962 contain statements and have a value. Assign its value to a temporary
963 and give it void_type_node. Returns the temporary, or NULL_TREE if
964 WRAPPER was already void. */
965
966 tree
967 voidify_wrapper_expr (tree wrapper, tree temp)
968 {
969 tree type = TREE_TYPE (wrapper);
970 if (type && !VOID_TYPE_P (type))
971 {
972 tree *p;
973
974 /* Set p to point to the body of the wrapper. Loop until we find
975 something that isn't a wrapper. */
976 for (p = &wrapper; p && *p; )
977 {
978 switch (TREE_CODE (*p))
979 {
980 case BIND_EXPR:
981 TREE_SIDE_EFFECTS (*p) = 1;
982 TREE_TYPE (*p) = void_type_node;
983 /* For a BIND_EXPR, the body is operand 1. */
984 p = &BIND_EXPR_BODY (*p);
985 break;
986
987 case CLEANUP_POINT_EXPR:
988 case TRY_FINALLY_EXPR:
989 case TRY_CATCH_EXPR:
990 TREE_SIDE_EFFECTS (*p) = 1;
991 TREE_TYPE (*p) = void_type_node;
992 p = &TREE_OPERAND (*p, 0);
993 break;
994
995 case STATEMENT_LIST:
996 {
997 tree_stmt_iterator i = tsi_last (*p);
998 TREE_SIDE_EFFECTS (*p) = 1;
999 TREE_TYPE (*p) = void_type_node;
1000 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1001 }
1002 break;
1003
1004 case COMPOUND_EXPR:
1005 /* Advance to the last statement. Set all container types to void. */
1006 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1007 {
1008 TREE_SIDE_EFFECTS (*p) = 1;
1009 TREE_TYPE (*p) = void_type_node;
1010 }
1011 break;
1012
1013 default:
1014 goto out;
1015 }
1016 }
1017
1018 out:
1019 if (p == NULL || IS_EMPTY_STMT (*p))
1020 temp = NULL_TREE;
1021 else if (temp)
1022 {
1023 /* The wrapper is on the RHS of an assignment that we're pushing
1024 down. */
1025 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1026 || TREE_CODE (temp) == GIMPLE_MODIFY_STMT
1027 || TREE_CODE (temp) == MODIFY_EXPR);
1028 GENERIC_TREE_OPERAND (temp, 1) = *p;
1029 *p = temp;
1030 }
1031 else
1032 {
1033 temp = create_tmp_var (type, "retval");
1034 *p = build2 (INIT_EXPR, type, temp, *p);
1035 }
1036
1037 return temp;
1038 }
1039
1040 return NULL_TREE;
1041 }
1042
1043 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1044 a temporary through which they communicate. */
1045
1046 static void
1047 build_stack_save_restore (tree *save, tree *restore)
1048 {
1049 tree save_call, tmp_var;
1050
1051 save_call =
1052 build_call_expr (implicit_built_in_decls[BUILT_IN_STACK_SAVE], 0);
1053 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1054
1055 *save = build_gimple_modify_stmt (tmp_var, save_call);
1056 *restore =
1057 build_call_expr (implicit_built_in_decls[BUILT_IN_STACK_RESTORE],
1058 1, tmp_var);
1059 }
1060
1061 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1062
1063 static enum gimplify_status
1064 gimplify_bind_expr (tree *expr_p, tree *pre_p)
1065 {
1066 tree bind_expr = *expr_p;
1067 bool old_save_stack = gimplify_ctxp->save_stack;
1068 tree t;
1069
1070 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1071
1072 /* Mark variables seen in this bind expr. */
1073 for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t))
1074 {
1075 if (TREE_CODE (t) == VAR_DECL)
1076 {
1077 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1078
1079 /* Mark variable as local. */
1080 if (ctx && !is_global_var (t)
1081 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1082 || splay_tree_lookup (ctx->variables,
1083 (splay_tree_key) t) == NULL))
1084 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1085
1086 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1087 }
1088
1089 /* Preliminarily mark non-addressed complex variables as eligible
1090 for promotion to gimple registers. We'll transform their uses
1091 as we find them. */
1092 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1093 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1094 && !TREE_THIS_VOLATILE (t)
1095 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1096 && !needs_to_live_in_memory (t))
1097 DECL_GIMPLE_REG_P (t) = 1;
1098 }
1099
1100 gimple_push_bind_expr (bind_expr);
1101 gimplify_ctxp->save_stack = false;
1102
1103 gimplify_to_stmt_list (&BIND_EXPR_BODY (bind_expr));
1104
1105 if (gimplify_ctxp->save_stack)
1106 {
1107 tree stack_save, stack_restore;
1108
1109 /* Save stack on entry and restore it on exit. Add a try_finally
1110 block to achieve this. Note that mudflap depends on the
1111 format of the emitted code: see mx_register_decls(). */
1112 build_stack_save_restore (&stack_save, &stack_restore);
1113
1114 t = build2 (TRY_FINALLY_EXPR, void_type_node,
1115 BIND_EXPR_BODY (bind_expr), NULL_TREE);
1116 append_to_statement_list (stack_restore, &TREE_OPERAND (t, 1));
1117
1118 BIND_EXPR_BODY (bind_expr) = NULL_TREE;
1119 append_to_statement_list (stack_save, &BIND_EXPR_BODY (bind_expr));
1120 append_to_statement_list (t, &BIND_EXPR_BODY (bind_expr));
1121 }
1122
1123 gimplify_ctxp->save_stack = old_save_stack;
1124 gimple_pop_bind_expr ();
1125
1126 if (temp)
1127 {
1128 *expr_p = temp;
1129 append_to_statement_list (bind_expr, pre_p);
1130 return GS_OK;
1131 }
1132 else
1133 return GS_ALL_DONE;
1134 }
1135
1136 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1137 GIMPLE value, it is assigned to a new temporary and the statement is
1138 re-written to return the temporary.
1139
1140 PRE_P points to the list where side effects that must happen before
1141 STMT should be stored. */
1142
1143 static enum gimplify_status
1144 gimplify_return_expr (tree stmt, tree *pre_p)
1145 {
1146 tree ret_expr = TREE_OPERAND (stmt, 0);
1147 tree result_decl, result;
1148
1149 if (!ret_expr || TREE_CODE (ret_expr) == RESULT_DECL
1150 || ret_expr == error_mark_node)
1151 return GS_ALL_DONE;
1152
1153 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1154 result_decl = NULL_TREE;
1155 else
1156 {
1157 result_decl = GENERIC_TREE_OPERAND (ret_expr, 0);
1158 if (TREE_CODE (result_decl) == INDIRECT_REF)
1159 /* See through a return by reference. */
1160 result_decl = TREE_OPERAND (result_decl, 0);
1161
1162 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1163 || TREE_CODE (ret_expr) == GIMPLE_MODIFY_STMT
1164 || TREE_CODE (ret_expr) == INIT_EXPR)
1165 && TREE_CODE (result_decl) == RESULT_DECL);
1166 }
1167
1168 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1169 Recall that aggregate_value_p is FALSE for any aggregate type that is
1170 returned in registers. If we're returning values in registers, then
1171 we don't want to extend the lifetime of the RESULT_DECL, particularly
1172 across another call. In addition, for those aggregates for which
1173 hard_function_value generates a PARALLEL, we'll die during normal
1174 expansion of structure assignments; there's special code in expand_return
1175 to handle this case that does not exist in expand_expr. */
1176 if (!result_decl
1177 || aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1178 result = result_decl;
1179 else if (gimplify_ctxp->return_temp)
1180 result = gimplify_ctxp->return_temp;
1181 else
1182 {
1183 result = create_tmp_var (TREE_TYPE (result_decl), NULL);
1184 if (TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1185 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1186 DECL_GIMPLE_REG_P (result) = 1;
1187
1188 /* ??? With complex control flow (usually involving abnormal edges),
1189 we can wind up warning about an uninitialized value for this. Due
1190 to how this variable is constructed and initialized, this is never
1191 true. Give up and never warn. */
1192 TREE_NO_WARNING (result) = 1;
1193
1194 gimplify_ctxp->return_temp = result;
1195 }
1196
1197 /* Smash the lhs of the GIMPLE_MODIFY_STMT to the temporary we plan to use.
1198 Then gimplify the whole thing. */
1199 if (result != result_decl)
1200 GENERIC_TREE_OPERAND (ret_expr, 0) = result;
1201
1202 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1203
1204 /* If we didn't use a temporary, then the result is just the result_decl.
1205 Otherwise we need a simple copy. This should already be gimple. */
1206 if (result == result_decl)
1207 ret_expr = result;
1208 else
1209 ret_expr = build_gimple_modify_stmt (result_decl, result);
1210 TREE_OPERAND (stmt, 0) = ret_expr;
1211
1212 return GS_ALL_DONE;
1213 }
1214
1215 static void
1216 gimplify_vla_decl (tree decl, tree *stmt_p)
1217 {
1218 /* This is a variable-sized decl. Simplify its size and mark it
1219 for deferred expansion. Note that mudflap depends on the format
1220 of the emitted code: see mx_register_decls(). */
1221 tree t, addr, ptr_type;
1222
1223 gimplify_one_sizepos (&DECL_SIZE (decl), stmt_p);
1224 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), stmt_p);
1225
1226 /* All occurrences of this decl in final gimplified code will be
1227 replaced by indirection. Setting DECL_VALUE_EXPR does two
1228 things: First, it lets the rest of the gimplifier know what
1229 replacement to use. Second, it lets the debug info know
1230 where to find the value. */
1231 ptr_type = build_pointer_type (TREE_TYPE (decl));
1232 addr = create_tmp_var (ptr_type, get_name (decl));
1233 DECL_IGNORED_P (addr) = 0;
1234 t = build_fold_indirect_ref (addr);
1235 SET_DECL_VALUE_EXPR (decl, t);
1236 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1237
1238 t = built_in_decls[BUILT_IN_ALLOCA];
1239 t = build_call_expr (t, 1, DECL_SIZE_UNIT (decl));
1240 t = fold_convert (ptr_type, t);
1241 t = build_gimple_modify_stmt (addr, t);
1242
1243 gimplify_and_add (t, stmt_p);
1244
1245 /* Indicate that we need to restore the stack level when the
1246 enclosing BIND_EXPR is exited. */
1247 gimplify_ctxp->save_stack = true;
1248 }
1249
1250 /* Gimplifies a DECL_EXPR node *STMT_P by making any necessary allocation
1251 and initialization explicit. */
1252
1253 static enum gimplify_status
1254 gimplify_decl_expr (tree *stmt_p)
1255 {
1256 tree stmt = *stmt_p;
1257 tree decl = DECL_EXPR_DECL (stmt);
1258
1259 *stmt_p = NULL_TREE;
1260
1261 if (TREE_TYPE (decl) == error_mark_node)
1262 return GS_ERROR;
1263
1264 if ((TREE_CODE (decl) == TYPE_DECL
1265 || TREE_CODE (decl) == VAR_DECL)
1266 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1267 gimplify_type_sizes (TREE_TYPE (decl), stmt_p);
1268
1269 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1270 {
1271 tree init = DECL_INITIAL (decl);
1272
1273 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1274 gimplify_vla_decl (decl, stmt_p);
1275
1276 if (init && init != error_mark_node)
1277 {
1278 if (!TREE_STATIC (decl))
1279 {
1280 DECL_INITIAL (decl) = NULL_TREE;
1281 init = build2 (INIT_EXPR, void_type_node, decl, init);
1282 gimplify_and_add (init, stmt_p);
1283 }
1284 else
1285 /* We must still examine initializers for static variables
1286 as they may contain a label address. */
1287 walk_tree (&init, force_labels_r, NULL, NULL);
1288 }
1289
1290 /* Some front ends do not explicitly declare all anonymous
1291 artificial variables. We compensate here by declaring the
1292 variables, though it would be better if the front ends would
1293 explicitly declare them. */
1294 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1295 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1296 gimple_add_tmp_var (decl);
1297 }
1298
1299 return GS_ALL_DONE;
1300 }
1301
1302 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1303 and replacing the LOOP_EXPR with goto, but if the loop contains an
1304 EXIT_EXPR, we need to append a label for it to jump to. */
1305
1306 static enum gimplify_status
1307 gimplify_loop_expr (tree *expr_p, tree *pre_p)
1308 {
1309 tree saved_label = gimplify_ctxp->exit_label;
1310 tree start_label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
1311 tree jump_stmt = build_and_jump (&LABEL_EXPR_LABEL (start_label));
1312
1313 append_to_statement_list (start_label, pre_p);
1314
1315 gimplify_ctxp->exit_label = NULL_TREE;
1316
1317 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1318
1319 if (gimplify_ctxp->exit_label)
1320 {
1321 append_to_statement_list (jump_stmt, pre_p);
1322 *expr_p = build1 (LABEL_EXPR, void_type_node, gimplify_ctxp->exit_label);
1323 }
1324 else
1325 *expr_p = jump_stmt;
1326
1327 gimplify_ctxp->exit_label = saved_label;
1328
1329 return GS_ALL_DONE;
1330 }
1331
1332 /* Compare two case labels. Because the front end should already have
1333 made sure that case ranges do not overlap, it is enough to only compare
1334 the CASE_LOW values of each case label. */
1335
1336 static int
1337 compare_case_labels (const void *p1, const void *p2)
1338 {
1339 const_tree const case1 = *(const_tree const*)p1;
1340 const_tree const case2 = *(const_tree const*)p2;
1341
1342 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
1343 }
1344
1345 /* Sort the case labels in LABEL_VEC in place in ascending order. */
1346
1347 void
1348 sort_case_labels (tree label_vec)
1349 {
1350 size_t len = TREE_VEC_LENGTH (label_vec);
1351 tree default_case = TREE_VEC_ELT (label_vec, len - 1);
1352
1353 if (CASE_LOW (default_case))
1354 {
1355 size_t i;
1356
1357 /* The last label in the vector should be the default case
1358 but it is not. */
1359 for (i = 0; i < len; ++i)
1360 {
1361 tree t = TREE_VEC_ELT (label_vec, i);
1362 if (!CASE_LOW (t))
1363 {
1364 default_case = t;
1365 TREE_VEC_ELT (label_vec, i) = TREE_VEC_ELT (label_vec, len - 1);
1366 TREE_VEC_ELT (label_vec, len - 1) = default_case;
1367 break;
1368 }
1369 }
1370 }
1371
1372 qsort (&TREE_VEC_ELT (label_vec, 0), len - 1, sizeof (tree),
1373 compare_case_labels);
1374 }
1375
1376 /* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can
1377 branch to. */
1378
1379 static enum gimplify_status
1380 gimplify_switch_expr (tree *expr_p, tree *pre_p)
1381 {
1382 tree switch_expr = *expr_p;
1383 enum gimplify_status ret;
1384
1385 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL,
1386 is_gimple_val, fb_rvalue);
1387
1388 if (SWITCH_BODY (switch_expr))
1389 {
1390 VEC(tree,heap) *labels, *saved_labels;
1391 tree label_vec, default_case = NULL_TREE;
1392 size_t i, len;
1393
1394 /* If someone can be bothered to fill in the labels, they can
1395 be bothered to null out the body too. */
1396 gcc_assert (!SWITCH_LABELS (switch_expr));
1397
1398 saved_labels = gimplify_ctxp->case_labels;
1399 gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
1400
1401 gimplify_to_stmt_list (&SWITCH_BODY (switch_expr));
1402
1403 labels = gimplify_ctxp->case_labels;
1404 gimplify_ctxp->case_labels = saved_labels;
1405
1406 i = 0;
1407 while (i < VEC_length (tree, labels))
1408 {
1409 tree elt = VEC_index (tree, labels, i);
1410 tree low = CASE_LOW (elt);
1411 bool remove_element = FALSE;
1412
1413 if (low)
1414 {
1415 /* Discard empty ranges. */
1416 tree high = CASE_HIGH (elt);
1417 if (high && tree_int_cst_lt (high, low))
1418 remove_element = TRUE;
1419 }
1420 else
1421 {
1422 /* The default case must be the last label in the list. */
1423 gcc_assert (!default_case);
1424 default_case = elt;
1425 remove_element = TRUE;
1426 }
1427
1428 if (remove_element)
1429 VEC_ordered_remove (tree, labels, i);
1430 else
1431 i++;
1432 }
1433 len = i;
1434
1435 label_vec = make_tree_vec (len + 1);
1436 SWITCH_LABELS (*expr_p) = label_vec;
1437 append_to_statement_list (switch_expr, pre_p);
1438
1439 if (! default_case)
1440 {
1441 /* If the switch has no default label, add one, so that we jump
1442 around the switch body. */
1443 default_case = build3 (CASE_LABEL_EXPR, void_type_node, NULL_TREE,
1444 NULL_TREE, create_artificial_label ());
1445 append_to_statement_list (SWITCH_BODY (switch_expr), pre_p);
1446 *expr_p = build1 (LABEL_EXPR, void_type_node,
1447 CASE_LABEL (default_case));
1448 }
1449 else
1450 *expr_p = SWITCH_BODY (switch_expr);
1451
1452 for (i = 0; i < len; ++i)
1453 TREE_VEC_ELT (label_vec, i) = VEC_index (tree, labels, i);
1454 TREE_VEC_ELT (label_vec, len) = default_case;
1455
1456 VEC_free (tree, heap, labels);
1457
1458 sort_case_labels (label_vec);
1459
1460 SWITCH_BODY (switch_expr) = NULL;
1461 }
1462 else
1463 gcc_assert (SWITCH_LABELS (switch_expr));
1464
1465 return ret;
1466 }
1467
1468 static enum gimplify_status
1469 gimplify_case_label_expr (tree *expr_p)
1470 {
1471 tree expr = *expr_p;
1472 struct gimplify_ctx *ctxp;
1473
1474 /* Invalid OpenMP programs can play Duff's Device type games with
1475 #pragma omp parallel. At least in the C front end, we don't
1476 detect such invalid branches until after gimplification. */
1477 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1478 if (ctxp->case_labels)
1479 break;
1480
1481 VEC_safe_push (tree, heap, ctxp->case_labels, expr);
1482 *expr_p = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (expr));
1483 return GS_ALL_DONE;
1484 }
1485
1486 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1487 if necessary. */
1488
1489 tree
1490 build_and_jump (tree *label_p)
1491 {
1492 if (label_p == NULL)
1493 /* If there's nowhere to jump, just fall through. */
1494 return NULL_TREE;
1495
1496 if (*label_p == NULL_TREE)
1497 {
1498 tree label = create_artificial_label ();
1499 *label_p = label;
1500 }
1501
1502 return build1 (GOTO_EXPR, void_type_node, *label_p);
1503 }
1504
1505 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1506 This also involves building a label to jump to and communicating it to
1507 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1508
1509 static enum gimplify_status
1510 gimplify_exit_expr (tree *expr_p)
1511 {
1512 tree cond = TREE_OPERAND (*expr_p, 0);
1513 tree expr;
1514
1515 expr = build_and_jump (&gimplify_ctxp->exit_label);
1516 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1517 *expr_p = expr;
1518
1519 return GS_OK;
1520 }
1521
1522 /* A helper function to be called via walk_tree. Mark all labels under *TP
1523 as being forced. To be called for DECL_INITIAL of static variables. */
1524
1525 tree
1526 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1527 {
1528 if (TYPE_P (*tp))
1529 *walk_subtrees = 0;
1530 if (TREE_CODE (*tp) == LABEL_DECL)
1531 FORCED_LABEL (*tp) = 1;
1532
1533 return NULL_TREE;
1534 }
1535
1536 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1537 different from its canonical type, wrap the whole thing inside a
1538 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1539 type.
1540
1541 The canonical type of a COMPONENT_REF is the type of the field being
1542 referenced--unless the field is a bit-field which can be read directly
1543 in a smaller mode, in which case the canonical type is the
1544 sign-appropriate type corresponding to that mode. */
1545
1546 static void
1547 canonicalize_component_ref (tree *expr_p)
1548 {
1549 tree expr = *expr_p;
1550 tree type;
1551
1552 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1553
1554 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1555 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1556 else
1557 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1558
1559 /* One could argue that all the stuff below is not necessary for
1560 the non-bitfield case and declare it a FE error if type
1561 adjustment would be needed. */
1562 if (TREE_TYPE (expr) != type)
1563 {
1564 #ifdef ENABLE_TYPES_CHECKING
1565 tree old_type = TREE_TYPE (expr);
1566 #endif
1567 int type_quals;
1568
1569 /* We need to preserve qualifiers and propagate them from
1570 operand 0. */
1571 type_quals = TYPE_QUALS (type)
1572 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1573 if (TYPE_QUALS (type) != type_quals)
1574 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1575
1576 /* Set the type of the COMPONENT_REF to the underlying type. */
1577 TREE_TYPE (expr) = type;
1578
1579 #ifdef ENABLE_TYPES_CHECKING
1580 /* It is now a FE error, if the conversion from the canonical
1581 type to the original expression type is not useless. */
1582 gcc_assert (useless_type_conversion_p (old_type, type));
1583 #endif
1584 }
1585 }
1586
1587 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1588 to foo, embed that change in the ADDR_EXPR by converting
1589 T array[U];
1590 (T *)&array
1591 ==>
1592 &array[L]
1593 where L is the lower bound. For simplicity, only do this for constant
1594 lower bound.
1595 The constraint is that the type of &array[L] is trivially convertible
1596 to T *. */
1597
1598 static void
1599 canonicalize_addr_expr (tree *expr_p)
1600 {
1601 tree expr = *expr_p;
1602 tree addr_expr = TREE_OPERAND (expr, 0);
1603 tree datype, ddatype, pddatype;
1604
1605 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
1606 if (!POINTER_TYPE_P (TREE_TYPE (expr))
1607 || TREE_CODE (addr_expr) != ADDR_EXPR)
1608 return;
1609
1610 /* The addr_expr type should be a pointer to an array. */
1611 datype = TREE_TYPE (TREE_TYPE (addr_expr));
1612 if (TREE_CODE (datype) != ARRAY_TYPE)
1613 return;
1614
1615 /* The pointer to element type shall be trivially convertible to
1616 the expression pointer type. */
1617 ddatype = TREE_TYPE (datype);
1618 pddatype = build_pointer_type (ddatype);
1619 if (!useless_type_conversion_p (pddatype, ddatype))
1620 return;
1621
1622 /* The lower bound and element sizes must be constant. */
1623 if (!TYPE_SIZE_UNIT (ddatype)
1624 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1625 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1626 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1627 return;
1628
1629 /* All checks succeeded. Build a new node to merge the cast. */
1630 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1631 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1632 NULL_TREE, NULL_TREE);
1633 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1634 }
1635
1636 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1637 underneath as appropriate. */
1638
1639 static enum gimplify_status
1640 gimplify_conversion (tree *expr_p)
1641 {
1642 tree tem;
1643 gcc_assert (TREE_CODE (*expr_p) == NOP_EXPR
1644 || TREE_CODE (*expr_p) == CONVERT_EXPR);
1645
1646 /* Then strip away all but the outermost conversion. */
1647 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1648
1649 /* And remove the outermost conversion if it's useless. */
1650 if (tree_ssa_useless_type_conversion (*expr_p))
1651 *expr_p = TREE_OPERAND (*expr_p, 0);
1652
1653 /* Attempt to avoid NOP_EXPR by producing reference to a subtype.
1654 For example this fold (subclass *)&A into &A->subclass avoiding
1655 a need for statement. */
1656 if (TREE_CODE (*expr_p) == NOP_EXPR
1657 && POINTER_TYPE_P (TREE_TYPE (*expr_p))
1658 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (*expr_p, 0)))
1659 && (tem = maybe_fold_offset_to_reference
1660 (TREE_OPERAND (*expr_p, 0),
1661 integer_zero_node, TREE_TYPE (TREE_TYPE (*expr_p)))))
1662 {
1663 tree ptr_type = build_pointer_type (TREE_TYPE (tem));
1664 if (useless_type_conversion_p (TREE_TYPE (*expr_p), ptr_type))
1665 *expr_p = build_fold_addr_expr_with_type (tem, ptr_type);
1666 }
1667
1668 /* If we still have a conversion at the toplevel,
1669 then canonicalize some constructs. */
1670 if (TREE_CODE (*expr_p) == NOP_EXPR || TREE_CODE (*expr_p) == CONVERT_EXPR)
1671 {
1672 tree sub = TREE_OPERAND (*expr_p, 0);
1673
1674 /* If a NOP conversion is changing the type of a COMPONENT_REF
1675 expression, then canonicalize its type now in order to expose more
1676 redundant conversions. */
1677 if (TREE_CODE (sub) == COMPONENT_REF)
1678 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1679
1680 /* If a NOP conversion is changing a pointer to array of foo
1681 to a pointer to foo, embed that change in the ADDR_EXPR. */
1682 else if (TREE_CODE (sub) == ADDR_EXPR)
1683 canonicalize_addr_expr (expr_p);
1684 }
1685
1686 return GS_OK;
1687 }
1688
1689 /* Gimplify a VAR_DECL or PARM_DECL. Returns GS_OK if we expanded a
1690 DECL_VALUE_EXPR, and it's worth re-examining things. */
1691
1692 static enum gimplify_status
1693 gimplify_var_or_parm_decl (tree *expr_p)
1694 {
1695 tree decl = *expr_p;
1696
1697 /* ??? If this is a local variable, and it has not been seen in any
1698 outer BIND_EXPR, then it's probably the result of a duplicate
1699 declaration, for which we've already issued an error. It would
1700 be really nice if the front end wouldn't leak these at all.
1701 Currently the only known culprit is C++ destructors, as seen
1702 in g++.old-deja/g++.jason/binding.C. */
1703 if (TREE_CODE (decl) == VAR_DECL
1704 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1705 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1706 && decl_function_context (decl) == current_function_decl)
1707 {
1708 gcc_assert (errorcount || sorrycount);
1709 return GS_ERROR;
1710 }
1711
1712 /* When within an OpenMP context, notice uses of variables. */
1713 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1714 return GS_ALL_DONE;
1715
1716 /* If the decl is an alias for another expression, substitute it now. */
1717 if (DECL_HAS_VALUE_EXPR_P (decl))
1718 {
1719 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
1720 return GS_OK;
1721 }
1722
1723 return GS_ALL_DONE;
1724 }
1725
1726
1727 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1728 node pointed to by EXPR_P.
1729
1730 compound_lval
1731 : min_lval '[' val ']'
1732 | min_lval '.' ID
1733 | compound_lval '[' val ']'
1734 | compound_lval '.' ID
1735
1736 This is not part of the original SIMPLE definition, which separates
1737 array and member references, but it seems reasonable to handle them
1738 together. Also, this way we don't run into problems with union
1739 aliasing; gcc requires that for accesses through a union to alias, the
1740 union reference must be explicit, which was not always the case when we
1741 were splitting up array and member refs.
1742
1743 PRE_P points to the list where side effects that must happen before
1744 *EXPR_P should be stored.
1745
1746 POST_P points to the list where side effects that must happen after
1747 *EXPR_P should be stored. */
1748
1749 static enum gimplify_status
1750 gimplify_compound_lval (tree *expr_p, tree *pre_p,
1751 tree *post_p, fallback_t fallback)
1752 {
1753 tree *p;
1754 VEC(tree,heap) *stack;
1755 enum gimplify_status ret = GS_OK, tret;
1756 int i;
1757
1758 /* Create a stack of the subexpressions so later we can walk them in
1759 order from inner to outer. */
1760 stack = VEC_alloc (tree, heap, 10);
1761
1762 /* We can handle anything that get_inner_reference can deal with. */
1763 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1764 {
1765 restart:
1766 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1767 if (TREE_CODE (*p) == INDIRECT_REF)
1768 *p = fold_indirect_ref (*p);
1769
1770 if (handled_component_p (*p))
1771 ;
1772 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1773 additional COMPONENT_REFs. */
1774 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1775 && gimplify_var_or_parm_decl (p) == GS_OK)
1776 goto restart;
1777 else
1778 break;
1779
1780 VEC_safe_push (tree, heap, stack, *p);
1781 }
1782
1783 gcc_assert (VEC_length (tree, stack));
1784
1785 /* Now STACK is a stack of pointers to all the refs we've walked through
1786 and P points to the innermost expression.
1787
1788 Java requires that we elaborated nodes in source order. That
1789 means we must gimplify the inner expression followed by each of
1790 the indices, in order. But we can't gimplify the inner
1791 expression until we deal with any variable bounds, sizes, or
1792 positions in order to deal with PLACEHOLDER_EXPRs.
1793
1794 So we do this in three steps. First we deal with the annotations
1795 for any variables in the components, then we gimplify the base,
1796 then we gimplify any indices, from left to right. */
1797 for (i = VEC_length (tree, stack) - 1; i >= 0; i--)
1798 {
1799 tree t = VEC_index (tree, stack, i);
1800
1801 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1802 {
1803 /* Gimplify the low bound and element type size and put them into
1804 the ARRAY_REF. If these values are set, they have already been
1805 gimplified. */
1806 if (!TREE_OPERAND (t, 2))
1807 {
1808 tree low = unshare_expr (array_ref_low_bound (t));
1809 if (!is_gimple_min_invariant (low))
1810 {
1811 TREE_OPERAND (t, 2) = low;
1812 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1813 is_gimple_formal_tmp_reg, fb_rvalue);
1814 ret = MIN (ret, tret);
1815 }
1816 }
1817
1818 if (!TREE_OPERAND (t, 3))
1819 {
1820 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
1821 tree elmt_size = unshare_expr (array_ref_element_size (t));
1822 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
1823
1824 /* Divide the element size by the alignment of the element
1825 type (above). */
1826 elmt_size = size_binop (EXACT_DIV_EXPR, elmt_size, factor);
1827
1828 if (!is_gimple_min_invariant (elmt_size))
1829 {
1830 TREE_OPERAND (t, 3) = elmt_size;
1831 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
1832 is_gimple_formal_tmp_reg, fb_rvalue);
1833 ret = MIN (ret, tret);
1834 }
1835 }
1836 }
1837 else if (TREE_CODE (t) == COMPONENT_REF)
1838 {
1839 /* Set the field offset into T and gimplify it. */
1840 if (!TREE_OPERAND (t, 2))
1841 {
1842 tree offset = unshare_expr (component_ref_field_offset (t));
1843 tree field = TREE_OPERAND (t, 1);
1844 tree factor
1845 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
1846
1847 /* Divide the offset by its alignment. */
1848 offset = size_binop (EXACT_DIV_EXPR, offset, factor);
1849
1850 if (!is_gimple_min_invariant (offset))
1851 {
1852 TREE_OPERAND (t, 2) = offset;
1853 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1854 is_gimple_formal_tmp_reg, fb_rvalue);
1855 ret = MIN (ret, tret);
1856 }
1857 }
1858 }
1859 }
1860
1861 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
1862 so as to match the min_lval predicate. Failure to do so may result
1863 in the creation of large aggregate temporaries. */
1864 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
1865 fallback | fb_lvalue);
1866 ret = MIN (ret, tret);
1867
1868 /* And finally, the indices and operands to BIT_FIELD_REF. During this
1869 loop we also remove any useless conversions. */
1870 for (; VEC_length (tree, stack) > 0; )
1871 {
1872 tree t = VEC_pop (tree, stack);
1873
1874 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1875 {
1876 /* Gimplify the dimension.
1877 Temporary fix for gcc.c-torture/execute/20040313-1.c.
1878 Gimplify non-constant array indices into a temporary
1879 variable.
1880 FIXME - The real fix is to gimplify post-modify
1881 expressions into a minimal gimple lvalue. However, that
1882 exposes bugs in alias analysis. The alias analyzer does
1883 not handle &PTR->FIELD very well. Will fix after the
1884 branch is merged into mainline (dnovillo 2004-05-03). */
1885 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
1886 {
1887 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
1888 is_gimple_formal_tmp_reg, fb_rvalue);
1889 ret = MIN (ret, tret);
1890 }
1891 }
1892 else if (TREE_CODE (t) == BIT_FIELD_REF)
1893 {
1894 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
1895 is_gimple_val, fb_rvalue);
1896 ret = MIN (ret, tret);
1897 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1898 is_gimple_val, fb_rvalue);
1899 ret = MIN (ret, tret);
1900 }
1901
1902 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
1903
1904 /* The innermost expression P may have originally had TREE_SIDE_EFFECTS
1905 set which would have caused all the outer expressions in EXPR_P
1906 leading to P to also have had TREE_SIDE_EFFECTS set. */
1907 recalculate_side_effects (t);
1908 }
1909
1910 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval, fallback);
1911 ret = MIN (ret, tret);
1912
1913 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
1914 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
1915 {
1916 canonicalize_component_ref (expr_p);
1917 ret = MIN (ret, GS_OK);
1918 }
1919
1920 VEC_free (tree, heap, stack);
1921
1922 return ret;
1923 }
1924
1925 /* Gimplify the self modifying expression pointed to by EXPR_P
1926 (++, --, +=, -=).
1927
1928 PRE_P points to the list where side effects that must happen before
1929 *EXPR_P should be stored.
1930
1931 POST_P points to the list where side effects that must happen after
1932 *EXPR_P should be stored.
1933
1934 WANT_VALUE is nonzero iff we want to use the value of this expression
1935 in another expression. */
1936
1937 static enum gimplify_status
1938 gimplify_self_mod_expr (tree *expr_p, tree *pre_p, tree *post_p,
1939 bool want_value)
1940 {
1941 enum tree_code code;
1942 tree lhs, lvalue, rhs, t1, post = NULL, *orig_post_p = post_p;
1943 bool postfix;
1944 enum tree_code arith_code;
1945 enum gimplify_status ret;
1946
1947 code = TREE_CODE (*expr_p);
1948
1949 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
1950 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
1951
1952 /* Prefix or postfix? */
1953 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
1954 /* Faster to treat as prefix if result is not used. */
1955 postfix = want_value;
1956 else
1957 postfix = false;
1958
1959 /* For postfix, make sure the inner expression's post side effects
1960 are executed after side effects from this expression. */
1961 if (postfix)
1962 post_p = &post;
1963
1964 /* Add or subtract? */
1965 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
1966 arith_code = PLUS_EXPR;
1967 else
1968 arith_code = MINUS_EXPR;
1969
1970 /* Gimplify the LHS into a GIMPLE lvalue. */
1971 lvalue = TREE_OPERAND (*expr_p, 0);
1972 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
1973 if (ret == GS_ERROR)
1974 return ret;
1975
1976 /* Extract the operands to the arithmetic operation. */
1977 lhs = lvalue;
1978 rhs = TREE_OPERAND (*expr_p, 1);
1979
1980 /* For postfix operator, we evaluate the LHS to an rvalue and then use
1981 that as the result value and in the postqueue operation. */
1982 if (postfix)
1983 {
1984 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
1985 if (ret == GS_ERROR)
1986 return ret;
1987 }
1988
1989 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
1990 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
1991 {
1992 rhs = fold_convert (sizetype, rhs);
1993 if (arith_code == MINUS_EXPR)
1994 rhs = fold_build1 (NEGATE_EXPR, TREE_TYPE (rhs), rhs);
1995 arith_code = POINTER_PLUS_EXPR;
1996 }
1997
1998 t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
1999 t1 = build_gimple_modify_stmt (lvalue, t1);
2000
2001 if (postfix)
2002 {
2003 gimplify_and_add (t1, orig_post_p);
2004 append_to_statement_list (post, orig_post_p);
2005 *expr_p = lhs;
2006 return GS_ALL_DONE;
2007 }
2008 else
2009 {
2010 *expr_p = t1;
2011 return GS_OK;
2012 }
2013 }
2014
2015 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
2016
2017 static void
2018 maybe_with_size_expr (tree *expr_p)
2019 {
2020 tree expr = *expr_p;
2021 tree type = TREE_TYPE (expr);
2022 tree size;
2023
2024 /* If we've already wrapped this or the type is error_mark_node, we can't do
2025 anything. */
2026 if (TREE_CODE (expr) == WITH_SIZE_EXPR
2027 || type == error_mark_node)
2028 return;
2029
2030 /* If the size isn't known or is a constant, we have nothing to do. */
2031 size = TYPE_SIZE_UNIT (type);
2032 if (!size || TREE_CODE (size) == INTEGER_CST)
2033 return;
2034
2035 /* Otherwise, make a WITH_SIZE_EXPR. */
2036 size = unshare_expr (size);
2037 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2038 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2039 }
2040
2041 /* Subroutine of gimplify_call_expr: Gimplify a single argument. */
2042
2043 static enum gimplify_status
2044 gimplify_arg (tree *expr_p, tree *pre_p)
2045 {
2046 bool (*test) (tree);
2047 fallback_t fb;
2048
2049 /* In general, we allow lvalues for function arguments to avoid
2050 extra overhead of copying large aggregates out of even larger
2051 aggregates into temporaries only to copy the temporaries to
2052 the argument list. Make optimizers happy by pulling out to
2053 temporaries those types that fit in registers. */
2054 if (is_gimple_reg_type (TREE_TYPE (*expr_p)))
2055 test = is_gimple_val, fb = fb_rvalue;
2056 else
2057 test = is_gimple_lvalue, fb = fb_either;
2058
2059 /* If this is a variable sized type, we must remember the size. */
2060 maybe_with_size_expr (expr_p);
2061
2062 /* There is a sequence point before a function call. Side effects in
2063 the argument list must occur before the actual call. So, when
2064 gimplifying arguments, force gimplify_expr to use an internal
2065 post queue which is then appended to the end of PRE_P. */
2066 return gimplify_expr (expr_p, pre_p, NULL, test, fb);
2067 }
2068
2069 /* Gimplify the CALL_EXPR node pointed to by EXPR_P. PRE_P points to the
2070 list where side effects that must happen before *EXPR_P should be stored.
2071 WANT_VALUE is true if the result of the call is desired. */
2072
2073 static enum gimplify_status
2074 gimplify_call_expr (tree *expr_p, tree *pre_p, bool want_value)
2075 {
2076 tree decl, parms, p;
2077 enum gimplify_status ret;
2078 int i, nargs;
2079
2080 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2081
2082 /* For reliable diagnostics during inlining, it is necessary that
2083 every call_expr be annotated with file and line. */
2084 if (! EXPR_HAS_LOCATION (*expr_p))
2085 SET_EXPR_LOCATION (*expr_p, input_location);
2086
2087 /* This may be a call to a builtin function.
2088
2089 Builtin function calls may be transformed into different
2090 (and more efficient) builtin function calls under certain
2091 circumstances. Unfortunately, gimplification can muck things
2092 up enough that the builtin expanders are not aware that certain
2093 transformations are still valid.
2094
2095 So we attempt transformation/gimplification of the call before
2096 we gimplify the CALL_EXPR. At this time we do not manage to
2097 transform all calls in the same manner as the expanders do, but
2098 we do transform most of them. */
2099 decl = get_callee_fndecl (*expr_p);
2100 if (decl && DECL_BUILT_IN (decl))
2101 {
2102 tree new = fold_call_expr (*expr_p, !want_value);
2103
2104 if (new && new != *expr_p)
2105 {
2106 /* There was a transformation of this call which computes the
2107 same value, but in a more efficient way. Return and try
2108 again. */
2109 *expr_p = new;
2110 return GS_OK;
2111 }
2112
2113 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2114 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_START)
2115 {
2116 if (call_expr_nargs (*expr_p) < 2)
2117 {
2118 error ("too few arguments to function %<va_start%>");
2119 *expr_p = build_empty_stmt ();
2120 return GS_OK;
2121 }
2122
2123 if (fold_builtin_next_arg (*expr_p, true))
2124 {
2125 *expr_p = build_empty_stmt ();
2126 return GS_OK;
2127 }
2128 /* Avoid gimplifying the second argument to va_start, which needs
2129 to be the plain PARM_DECL. */
2130 return gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p);
2131 }
2132 }
2133
2134 /* There is a sequence point before the call, so any side effects in
2135 the calling expression must occur before the actual call. Force
2136 gimplify_expr to use an internal post queue. */
2137 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2138 is_gimple_call_addr, fb_rvalue);
2139
2140 nargs = call_expr_nargs (*expr_p);
2141
2142 /* Get argument types for verification. */
2143 decl = get_callee_fndecl (*expr_p);
2144 parms = NULL_TREE;
2145 if (decl)
2146 parms = TYPE_ARG_TYPES (TREE_TYPE (decl));
2147 else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
2148 parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
2149
2150 /* Verify if the type of the argument matches that of the function
2151 declaration. If we cannot verify this or there is a mismatch,
2152 mark the call expression so it doesn't get inlined later. */
2153 if (decl && DECL_ARGUMENTS (decl))
2154 {
2155 for (i = 0, p = DECL_ARGUMENTS (decl); i < nargs;
2156 i++, p = TREE_CHAIN (p))
2157 {
2158 /* We cannot distinguish a varargs function from the case
2159 of excess parameters, still deferring the inlining decision
2160 to the callee is possible. */
2161 if (!p)
2162 break;
2163 if (p == error_mark_node
2164 || CALL_EXPR_ARG (*expr_p, i) == error_mark_node
2165 || !fold_convertible_p (DECL_ARG_TYPE (p),
2166 CALL_EXPR_ARG (*expr_p, i)))
2167 {
2168 CALL_CANNOT_INLINE_P (*expr_p) = 1;
2169 break;
2170 }
2171 }
2172 }
2173 else if (parms)
2174 {
2175 for (i = 0, p = parms; i < nargs; i++, p = TREE_CHAIN (p))
2176 {
2177 /* If this is a varargs function defer inlining decision
2178 to callee. */
2179 if (!p)
2180 break;
2181 if (TREE_VALUE (p) == error_mark_node
2182 || CALL_EXPR_ARG (*expr_p, i) == error_mark_node
2183 || TREE_CODE (TREE_VALUE (p)) == VOID_TYPE
2184 || !fold_convertible_p (TREE_VALUE (p),
2185 CALL_EXPR_ARG (*expr_p, i)))
2186 {
2187 CALL_CANNOT_INLINE_P (*expr_p) = 1;
2188 break;
2189 }
2190 }
2191 }
2192 else
2193 {
2194 if (nargs != 0)
2195 CALL_CANNOT_INLINE_P (*expr_p) = 1;
2196 i = 0;
2197 p = NULL_TREE;
2198 }
2199
2200 /* If the last argument is __builtin_va_arg_pack () and it is not
2201 passed as a named argument, decrease the number of CALL_EXPR
2202 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
2203 if (!p
2204 && i < nargs
2205 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2206 {
2207 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2208 tree last_arg_fndecl = get_callee_fndecl (last_arg);
2209
2210 if (last_arg_fndecl
2211 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2212 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2213 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2214 {
2215 tree call = *expr_p;
2216
2217 --nargs;
2218 *expr_p = build_call_array (TREE_TYPE (call), CALL_EXPR_FN (call),
2219 nargs, CALL_EXPR_ARGP (call));
2220 /* Copy all CALL_EXPR flags, locus and block, except
2221 CALL_EXPR_VA_ARG_PACK flag. */
2222 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2223 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2224 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2225 = CALL_EXPR_RETURN_SLOT_OPT (call);
2226 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2227 CALL_CANNOT_INLINE_P (*expr_p)
2228 = CALL_CANNOT_INLINE_P (call);
2229 TREE_NOTHROW (*expr_p) = TREE_NOTHROW (call);
2230 SET_EXPR_LOCUS (*expr_p, EXPR_LOCUS (call));
2231 TREE_BLOCK (*expr_p) = TREE_BLOCK (call);
2232 /* Set CALL_EXPR_VA_ARG_PACK. */
2233 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2234 }
2235 }
2236
2237 /* Finally, gimplify the function arguments. */
2238 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2239 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2240 PUSH_ARGS_REVERSED ? i-- : i++)
2241 {
2242 enum gimplify_status t;
2243
2244 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p);
2245
2246 if (t == GS_ERROR)
2247 ret = GS_ERROR;
2248 }
2249
2250 /* Try this again in case gimplification exposed something. */
2251 if (ret != GS_ERROR)
2252 {
2253 tree new = fold_call_expr (*expr_p, !want_value);
2254
2255 if (new && new != *expr_p)
2256 {
2257 /* There was a transformation of this call which computes the
2258 same value, but in a more efficient way. Return and try
2259 again. */
2260 *expr_p = new;
2261 return GS_OK;
2262 }
2263 }
2264
2265 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2266 decl. This allows us to eliminate redundant or useless
2267 calls to "const" functions. */
2268 if (TREE_CODE (*expr_p) == CALL_EXPR
2269 && (call_expr_flags (*expr_p) & (ECF_CONST | ECF_PURE)))
2270 TREE_SIDE_EFFECTS (*expr_p) = 0;
2271
2272 return ret;
2273 }
2274
2275 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2276 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2277
2278 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2279 condition is true or false, respectively. If null, we should generate
2280 our own to skip over the evaluation of this specific expression.
2281
2282 This function is the tree equivalent of do_jump.
2283
2284 shortcut_cond_r should only be called by shortcut_cond_expr. */
2285
2286 static tree
2287 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p)
2288 {
2289 tree local_label = NULL_TREE;
2290 tree t, expr = NULL;
2291
2292 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2293 retain the shortcut semantics. Just insert the gotos here;
2294 shortcut_cond_expr will append the real blocks later. */
2295 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2296 {
2297 /* Turn if (a && b) into
2298
2299 if (a); else goto no;
2300 if (b) goto yes; else goto no;
2301 (no:) */
2302
2303 if (false_label_p == NULL)
2304 false_label_p = &local_label;
2305
2306 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p);
2307 append_to_statement_list (t, &expr);
2308
2309 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2310 false_label_p);
2311 append_to_statement_list (t, &expr);
2312 }
2313 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2314 {
2315 /* Turn if (a || b) into
2316
2317 if (a) goto yes;
2318 if (b) goto yes; else goto no;
2319 (yes:) */
2320
2321 if (true_label_p == NULL)
2322 true_label_p = &local_label;
2323
2324 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL);
2325 append_to_statement_list (t, &expr);
2326
2327 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2328 false_label_p);
2329 append_to_statement_list (t, &expr);
2330 }
2331 else if (TREE_CODE (pred) == COND_EXPR)
2332 {
2333 /* As long as we're messing with gotos, turn if (a ? b : c) into
2334 if (a)
2335 if (b) goto yes; else goto no;
2336 else
2337 if (c) goto yes; else goto no; */
2338 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2339 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2340 false_label_p),
2341 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2342 false_label_p));
2343 }
2344 else
2345 {
2346 expr = build3 (COND_EXPR, void_type_node, pred,
2347 build_and_jump (true_label_p),
2348 build_and_jump (false_label_p));
2349 }
2350
2351 if (local_label)
2352 {
2353 t = build1 (LABEL_EXPR, void_type_node, local_label);
2354 append_to_statement_list (t, &expr);
2355 }
2356
2357 return expr;
2358 }
2359
2360 static tree
2361 shortcut_cond_expr (tree expr)
2362 {
2363 tree pred = TREE_OPERAND (expr, 0);
2364 tree then_ = TREE_OPERAND (expr, 1);
2365 tree else_ = TREE_OPERAND (expr, 2);
2366 tree true_label, false_label, end_label, t;
2367 tree *true_label_p;
2368 tree *false_label_p;
2369 bool emit_end, emit_false, jump_over_else;
2370 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2371 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2372
2373 /* First do simple transformations. */
2374 if (!else_se)
2375 {
2376 /* If there is no 'else', turn (a && b) into if (a) if (b). */
2377 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2378 {
2379 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2380 then_ = shortcut_cond_expr (expr);
2381 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2382 pred = TREE_OPERAND (pred, 0);
2383 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2384 }
2385 }
2386 if (!then_se)
2387 {
2388 /* If there is no 'then', turn
2389 if (a || b); else d
2390 into
2391 if (a); else if (b); else d. */
2392 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2393 {
2394 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2395 else_ = shortcut_cond_expr (expr);
2396 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2397 pred = TREE_OPERAND (pred, 0);
2398 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2399 }
2400 }
2401
2402 /* If we're done, great. */
2403 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2404 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2405 return expr;
2406
2407 /* Otherwise we need to mess with gotos. Change
2408 if (a) c; else d;
2409 to
2410 if (a); else goto no;
2411 c; goto end;
2412 no: d; end:
2413 and recursively gimplify the condition. */
2414
2415 true_label = false_label = end_label = NULL_TREE;
2416
2417 /* If our arms just jump somewhere, hijack those labels so we don't
2418 generate jumps to jumps. */
2419
2420 if (then_
2421 && TREE_CODE (then_) == GOTO_EXPR
2422 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2423 {
2424 true_label = GOTO_DESTINATION (then_);
2425 then_ = NULL;
2426 then_se = false;
2427 }
2428
2429 if (else_
2430 && TREE_CODE (else_) == GOTO_EXPR
2431 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2432 {
2433 false_label = GOTO_DESTINATION (else_);
2434 else_ = NULL;
2435 else_se = false;
2436 }
2437
2438 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2439 if (true_label)
2440 true_label_p = &true_label;
2441 else
2442 true_label_p = NULL;
2443
2444 /* The 'else' branch also needs a label if it contains interesting code. */
2445 if (false_label || else_se)
2446 false_label_p = &false_label;
2447 else
2448 false_label_p = NULL;
2449
2450 /* If there was nothing else in our arms, just forward the label(s). */
2451 if (!then_se && !else_se)
2452 return shortcut_cond_r (pred, true_label_p, false_label_p);
2453
2454 /* If our last subexpression already has a terminal label, reuse it. */
2455 if (else_se)
2456 expr = expr_last (else_);
2457 else if (then_se)
2458 expr = expr_last (then_);
2459 else
2460 expr = NULL;
2461 if (expr && TREE_CODE (expr) == LABEL_EXPR)
2462 end_label = LABEL_EXPR_LABEL (expr);
2463
2464 /* If we don't care about jumping to the 'else' branch, jump to the end
2465 if the condition is false. */
2466 if (!false_label_p)
2467 false_label_p = &end_label;
2468
2469 /* We only want to emit these labels if we aren't hijacking them. */
2470 emit_end = (end_label == NULL_TREE);
2471 emit_false = (false_label == NULL_TREE);
2472
2473 /* We only emit the jump over the else clause if we have to--if the
2474 then clause may fall through. Otherwise we can wind up with a
2475 useless jump and a useless label at the end of gimplified code,
2476 which will cause us to think that this conditional as a whole
2477 falls through even if it doesn't. If we then inline a function
2478 which ends with such a condition, that can cause us to issue an
2479 inappropriate warning about control reaching the end of a
2480 non-void function. */
2481 jump_over_else = block_may_fallthru (then_);
2482
2483 pred = shortcut_cond_r (pred, true_label_p, false_label_p);
2484
2485 expr = NULL;
2486 append_to_statement_list (pred, &expr);
2487
2488 append_to_statement_list (then_, &expr);
2489 if (else_se)
2490 {
2491 if (jump_over_else)
2492 {
2493 t = build_and_jump (&end_label);
2494 append_to_statement_list (t, &expr);
2495 }
2496 if (emit_false)
2497 {
2498 t = build1 (LABEL_EXPR, void_type_node, false_label);
2499 append_to_statement_list (t, &expr);
2500 }
2501 append_to_statement_list (else_, &expr);
2502 }
2503 if (emit_end && end_label)
2504 {
2505 t = build1 (LABEL_EXPR, void_type_node, end_label);
2506 append_to_statement_list (t, &expr);
2507 }
2508
2509 return expr;
2510 }
2511
2512 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2513
2514 tree
2515 gimple_boolify (tree expr)
2516 {
2517 tree type = TREE_TYPE (expr);
2518
2519 if (TREE_CODE (type) == BOOLEAN_TYPE)
2520 return expr;
2521
2522 switch (TREE_CODE (expr))
2523 {
2524 case TRUTH_AND_EXPR:
2525 case TRUTH_OR_EXPR:
2526 case TRUTH_XOR_EXPR:
2527 case TRUTH_ANDIF_EXPR:
2528 case TRUTH_ORIF_EXPR:
2529 /* Also boolify the arguments of truth exprs. */
2530 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2531 /* FALLTHRU */
2532
2533 case TRUTH_NOT_EXPR:
2534 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2535 /* FALLTHRU */
2536
2537 case EQ_EXPR: case NE_EXPR:
2538 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
2539 /* These expressions always produce boolean results. */
2540 TREE_TYPE (expr) = boolean_type_node;
2541 return expr;
2542
2543 default:
2544 /* Other expressions that get here must have boolean values, but
2545 might need to be converted to the appropriate mode. */
2546 return fold_convert (boolean_type_node, expr);
2547 }
2548 }
2549
2550 /* Given a conditional expression *EXPR_P without side effects, gimplify
2551 its operands. New statements are inserted to PRE_P. */
2552
2553 static enum gimplify_status
2554 gimplify_pure_cond_expr (tree *expr_p, tree *pre_p)
2555 {
2556 tree expr = *expr_p, cond;
2557 enum gimplify_status ret, tret;
2558 enum tree_code code;
2559
2560 cond = gimple_boolify (COND_EXPR_COND (expr));
2561
2562 /* We need to handle && and || specially, as their gimplification
2563 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
2564 code = TREE_CODE (cond);
2565 if (code == TRUTH_ANDIF_EXPR)
2566 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2567 else if (code == TRUTH_ORIF_EXPR)
2568 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
2569 ret = gimplify_expr (&cond, pre_p, NULL,
2570 is_gimple_condexpr, fb_rvalue);
2571 COND_EXPR_COND (*expr_p) = cond;
2572
2573 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2574 is_gimple_val, fb_rvalue);
2575 ret = MIN (ret, tret);
2576 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2577 is_gimple_val, fb_rvalue);
2578
2579 return MIN (ret, tret);
2580 }
2581
2582 /* Returns true if evaluating EXPR could trap.
2583 EXPR is GENERIC, while tree_could_trap_p can be called
2584 only on GIMPLE. */
2585
2586 static bool
2587 generic_expr_could_trap_p (tree expr)
2588 {
2589 unsigned i, n;
2590
2591 if (!expr || is_gimple_val (expr))
2592 return false;
2593
2594 if (!EXPR_P (expr) || tree_could_trap_p (expr))
2595 return true;
2596
2597 n = TREE_OPERAND_LENGTH (expr);
2598 for (i = 0; i < n; i++)
2599 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
2600 return true;
2601
2602 return false;
2603 }
2604
2605 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2606 into
2607
2608 if (p) if (p)
2609 t1 = a; a;
2610 else or else
2611 t1 = b; b;
2612 t1;
2613
2614 The second form is used when *EXPR_P is of type void.
2615
2616 TARGET is the tree for T1 above.
2617
2618 PRE_P points to the list where side effects that must happen before
2619 *EXPR_P should be stored. */
2620
2621 static enum gimplify_status
2622 gimplify_cond_expr (tree *expr_p, tree *pre_p, fallback_t fallback)
2623 {
2624 tree expr = *expr_p;
2625 tree tmp, tmp2, type;
2626 enum gimplify_status ret;
2627
2628 type = TREE_TYPE (expr);
2629
2630 /* If this COND_EXPR has a value, copy the values into a temporary within
2631 the arms. */
2632 if (! VOID_TYPE_P (type))
2633 {
2634 tree result;
2635
2636 /* If an rvalue is ok or we do not require an lvalue, avoid creating
2637 an addressable temporary. */
2638 if (((fallback & fb_rvalue)
2639 || !(fallback & fb_lvalue))
2640 && !TREE_ADDRESSABLE (type))
2641 {
2642 if (gimplify_ctxp->allow_rhs_cond_expr
2643 /* If either branch has side effects or could trap, it can't be
2644 evaluated unconditionally. */
2645 && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1))
2646 && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 1))
2647 && !TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 2))
2648 && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 2)))
2649 return gimplify_pure_cond_expr (expr_p, pre_p);
2650
2651 result = tmp2 = tmp = create_tmp_var (TREE_TYPE (expr), "iftmp");
2652 ret = GS_ALL_DONE;
2653 }
2654 else
2655 {
2656 tree type = build_pointer_type (TREE_TYPE (expr));
2657
2658 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2659 TREE_OPERAND (expr, 1) =
2660 build_fold_addr_expr (TREE_OPERAND (expr, 1));
2661
2662 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2663 TREE_OPERAND (expr, 2) =
2664 build_fold_addr_expr (TREE_OPERAND (expr, 2));
2665
2666 tmp2 = tmp = create_tmp_var (type, "iftmp");
2667
2668 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (expr, 0),
2669 TREE_OPERAND (expr, 1), TREE_OPERAND (expr, 2));
2670
2671 result = build_fold_indirect_ref (tmp);
2672 ret = GS_ALL_DONE;
2673 }
2674
2675 /* Build the then clause, 't1 = a;'. But don't build an assignment
2676 if this branch is void; in C++ it can be, if it's a throw. */
2677 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2678 TREE_OPERAND (expr, 1)
2679 = build_gimple_modify_stmt (tmp, TREE_OPERAND (expr, 1));
2680
2681 /* Build the else clause, 't1 = b;'. */
2682 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2683 TREE_OPERAND (expr, 2)
2684 = build_gimple_modify_stmt (tmp2, TREE_OPERAND (expr, 2));
2685
2686 TREE_TYPE (expr) = void_type_node;
2687 recalculate_side_effects (expr);
2688
2689 /* Move the COND_EXPR to the prequeue. */
2690 gimplify_and_add (expr, pre_p);
2691
2692 *expr_p = result;
2693 return ret;
2694 }
2695
2696 /* Make sure the condition has BOOLEAN_TYPE. */
2697 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2698
2699 /* Break apart && and || conditions. */
2700 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
2701 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
2702 {
2703 expr = shortcut_cond_expr (expr);
2704
2705 if (expr != *expr_p)
2706 {
2707 *expr_p = expr;
2708
2709 /* We can't rely on gimplify_expr to re-gimplify the expanded
2710 form properly, as cleanups might cause the target labels to be
2711 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
2712 set up a conditional context. */
2713 gimple_push_condition ();
2714 gimplify_stmt (expr_p);
2715 gimple_pop_condition (pre_p);
2716
2717 return GS_ALL_DONE;
2718 }
2719 }
2720
2721 /* Now do the normal gimplification. */
2722 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
2723 is_gimple_condexpr, fb_rvalue);
2724
2725 gimple_push_condition ();
2726
2727 gimplify_to_stmt_list (&TREE_OPERAND (expr, 1));
2728 gimplify_to_stmt_list (&TREE_OPERAND (expr, 2));
2729 recalculate_side_effects (expr);
2730
2731 gimple_pop_condition (pre_p);
2732
2733 if (ret == GS_ERROR)
2734 ;
2735 else if (TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1)))
2736 ret = GS_ALL_DONE;
2737 else if (TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 2)))
2738 /* Rewrite "if (a); else b" to "if (!a) b" */
2739 {
2740 TREE_OPERAND (expr, 0) = invert_truthvalue (TREE_OPERAND (expr, 0));
2741 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
2742 is_gimple_condexpr, fb_rvalue);
2743
2744 tmp = TREE_OPERAND (expr, 1);
2745 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 2);
2746 TREE_OPERAND (expr, 2) = tmp;
2747 }
2748 else
2749 /* Both arms are empty; replace the COND_EXPR with its predicate. */
2750 expr = TREE_OPERAND (expr, 0);
2751
2752 *expr_p = expr;
2753 return ret;
2754 }
2755
2756 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
2757 a call to __builtin_memcpy. */
2758
2759 static enum gimplify_status
2760 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value)
2761 {
2762 tree t, to, to_ptr, from, from_ptr;
2763
2764 to = GENERIC_TREE_OPERAND (*expr_p, 0);
2765 from = GENERIC_TREE_OPERAND (*expr_p, 1);
2766
2767 from_ptr = build_fold_addr_expr (from);
2768
2769 to_ptr = build_fold_addr_expr (to);
2770 t = implicit_built_in_decls[BUILT_IN_MEMCPY];
2771 t = build_call_expr (t, 3, to_ptr, from_ptr, size);
2772
2773 if (want_value)
2774 {
2775 t = build1 (NOP_EXPR, TREE_TYPE (to_ptr), t);
2776 t = build1 (INDIRECT_REF, TREE_TYPE (to), t);
2777 }
2778
2779 *expr_p = t;
2780 return GS_OK;
2781 }
2782
2783 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
2784 a call to __builtin_memset. In this case we know that the RHS is
2785 a CONSTRUCTOR with an empty element list. */
2786
2787 static enum gimplify_status
2788 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value)
2789 {
2790 tree t, to, to_ptr;
2791
2792 to = GENERIC_TREE_OPERAND (*expr_p, 0);
2793
2794 to_ptr = build_fold_addr_expr (to);
2795 t = implicit_built_in_decls[BUILT_IN_MEMSET];
2796 t = build_call_expr (t, 3, to_ptr, integer_zero_node, size);
2797
2798 if (want_value)
2799 {
2800 t = build1 (NOP_EXPR, TREE_TYPE (to_ptr), t);
2801 t = build1 (INDIRECT_REF, TREE_TYPE (to), t);
2802 }
2803
2804 *expr_p = t;
2805 return GS_OK;
2806 }
2807
2808 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
2809 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
2810 assignment. Returns non-null if we detect a potential overlap. */
2811
2812 struct gimplify_init_ctor_preeval_data
2813 {
2814 /* The base decl of the lhs object. May be NULL, in which case we
2815 have to assume the lhs is indirect. */
2816 tree lhs_base_decl;
2817
2818 /* The alias set of the lhs object. */
2819 alias_set_type lhs_alias_set;
2820 };
2821
2822 static tree
2823 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
2824 {
2825 struct gimplify_init_ctor_preeval_data *data
2826 = (struct gimplify_init_ctor_preeval_data *) xdata;
2827 tree t = *tp;
2828
2829 /* If we find the base object, obviously we have overlap. */
2830 if (data->lhs_base_decl == t)
2831 return t;
2832
2833 /* If the constructor component is indirect, determine if we have a
2834 potential overlap with the lhs. The only bits of information we
2835 have to go on at this point are addressability and alias sets. */
2836 if (TREE_CODE (t) == INDIRECT_REF
2837 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
2838 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
2839 return t;
2840
2841 /* If the constructor component is a call, determine if it can hide a
2842 potential overlap with the lhs through an INDIRECT_REF like above. */
2843 if (TREE_CODE (t) == CALL_EXPR)
2844 {
2845 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
2846
2847 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
2848 if (POINTER_TYPE_P (TREE_VALUE (type))
2849 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
2850 && alias_sets_conflict_p (data->lhs_alias_set,
2851 get_alias_set
2852 (TREE_TYPE (TREE_VALUE (type)))))
2853 return t;
2854 }
2855
2856 if (IS_TYPE_OR_DECL_P (t))
2857 *walk_subtrees = 0;
2858 return NULL;
2859 }
2860
2861 /* A subroutine of gimplify_init_constructor. Pre-evaluate *EXPR_P,
2862 force values that overlap with the lhs (as described by *DATA)
2863 into temporaries. */
2864
2865 static void
2866 gimplify_init_ctor_preeval (tree *expr_p, tree *pre_p, tree *post_p,
2867 struct gimplify_init_ctor_preeval_data *data)
2868 {
2869 enum gimplify_status one;
2870
2871 /* If the value is invariant, then there's nothing to pre-evaluate.
2872 But ensure it doesn't have any side-effects since a SAVE_EXPR is
2873 invariant but has side effects and might contain a reference to
2874 the object we're initializing. */
2875 if (TREE_INVARIANT (*expr_p) && !TREE_SIDE_EFFECTS (*expr_p))
2876 return;
2877
2878 /* If the type has non-trivial constructors, we can't pre-evaluate. */
2879 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
2880 return;
2881
2882 /* Recurse for nested constructors. */
2883 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
2884 {
2885 unsigned HOST_WIDE_INT ix;
2886 constructor_elt *ce;
2887 VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p);
2888
2889 for (ix = 0; VEC_iterate (constructor_elt, v, ix, ce); ix++)
2890 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
2891 return;
2892 }
2893
2894 /* If this is a variable sized type, we must remember the size. */
2895 maybe_with_size_expr (expr_p);
2896
2897 /* Gimplify the constructor element to something appropriate for the rhs
2898 of a MODIFY_EXPR. Given that we know the lhs is an aggregate, we know
2899 the gimplifier will consider this a store to memory. Doing this
2900 gimplification now means that we won't have to deal with complicated
2901 language-specific trees, nor trees like SAVE_EXPR that can induce
2902 exponential search behavior. */
2903 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
2904 if (one == GS_ERROR)
2905 {
2906 *expr_p = NULL;
2907 return;
2908 }
2909
2910 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
2911 with the lhs, since "a = { .x=a }" doesn't make sense. This will
2912 always be true for all scalars, since is_gimple_mem_rhs insists on a
2913 temporary variable for them. */
2914 if (DECL_P (*expr_p))
2915 return;
2916
2917 /* If this is of variable size, we have no choice but to assume it doesn't
2918 overlap since we can't make a temporary for it. */
2919 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
2920 return;
2921
2922 /* Otherwise, we must search for overlap ... */
2923 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
2924 return;
2925
2926 /* ... and if found, force the value into a temporary. */
2927 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
2928 }
2929
2930 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
2931 a RANGE_EXPR in a CONSTRUCTOR for an array.
2932
2933 var = lower;
2934 loop_entry:
2935 object[var] = value;
2936 if (var == upper)
2937 goto loop_exit;
2938 var = var + 1;
2939 goto loop_entry;
2940 loop_exit:
2941
2942 We increment var _after_ the loop exit check because we might otherwise
2943 fail if upper == TYPE_MAX_VALUE (type for upper).
2944
2945 Note that we never have to deal with SAVE_EXPRs here, because this has
2946 already been taken care of for us, in gimplify_init_ctor_preeval(). */
2947
2948 static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *,
2949 tree *, bool);
2950
2951 static void
2952 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
2953 tree value, tree array_elt_type,
2954 tree *pre_p, bool cleared)
2955 {
2956 tree loop_entry_label, loop_exit_label;
2957 tree var, var_type, cref, tmp;
2958
2959 loop_entry_label = create_artificial_label ();
2960 loop_exit_label = create_artificial_label ();
2961
2962 /* Create and initialize the index variable. */
2963 var_type = TREE_TYPE (upper);
2964 var = create_tmp_var (var_type, NULL);
2965 append_to_statement_list (build_gimple_modify_stmt (var, lower), pre_p);
2966
2967 /* Add the loop entry label. */
2968 append_to_statement_list (build1 (LABEL_EXPR,
2969 void_type_node,
2970 loop_entry_label),
2971 pre_p);
2972
2973 /* Build the reference. */
2974 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
2975 var, NULL_TREE, NULL_TREE);
2976
2977 /* If we are a constructor, just call gimplify_init_ctor_eval to do
2978 the store. Otherwise just assign value to the reference. */
2979
2980 if (TREE_CODE (value) == CONSTRUCTOR)
2981 /* NB we might have to call ourself recursively through
2982 gimplify_init_ctor_eval if the value is a constructor. */
2983 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
2984 pre_p, cleared);
2985 else
2986 append_to_statement_list (build_gimple_modify_stmt (cref, value), pre_p);
2987
2988 /* We exit the loop when the index var is equal to the upper bound. */
2989 gimplify_and_add (build3 (COND_EXPR, void_type_node,
2990 build2 (EQ_EXPR, boolean_type_node,
2991 var, upper),
2992 build1 (GOTO_EXPR,
2993 void_type_node,
2994 loop_exit_label),
2995 NULL_TREE),
2996 pre_p);
2997
2998 /* Otherwise, increment the index var... */
2999 tmp = build2 (PLUS_EXPR, var_type, var,
3000 fold_convert (var_type, integer_one_node));
3001 append_to_statement_list (build_gimple_modify_stmt (var, tmp), pre_p);
3002
3003 /* ...and jump back to the loop entry. */
3004 append_to_statement_list (build1 (GOTO_EXPR,
3005 void_type_node,
3006 loop_entry_label),
3007 pre_p);
3008
3009 /* Add the loop exit label. */
3010 append_to_statement_list (build1 (LABEL_EXPR,
3011 void_type_node,
3012 loop_exit_label),
3013 pre_p);
3014 }
3015
3016 /* Return true if FDECL is accessing a field that is zero sized. */
3017
3018 static bool
3019 zero_sized_field_decl (const_tree fdecl)
3020 {
3021 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3022 && integer_zerop (DECL_SIZE (fdecl)))
3023 return true;
3024 return false;
3025 }
3026
3027 /* Return true if TYPE is zero sized. */
3028
3029 static bool
3030 zero_sized_type (const_tree type)
3031 {
3032 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3033 && integer_zerop (TYPE_SIZE (type)))
3034 return true;
3035 return false;
3036 }
3037
3038 /* A subroutine of gimplify_init_constructor. Generate individual
3039 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
3040 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
3041 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
3042 zeroed first. */
3043
3044 static void
3045 gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
3046 tree *pre_p, bool cleared)
3047 {
3048 tree array_elt_type = NULL;
3049 unsigned HOST_WIDE_INT ix;
3050 tree purpose, value;
3051
3052 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3053 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3054
3055 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3056 {
3057 tree cref, init;
3058
3059 /* NULL values are created above for gimplification errors. */
3060 if (value == NULL)
3061 continue;
3062
3063 if (cleared && initializer_zerop (value))
3064 continue;
3065
3066 /* ??? Here's to hoping the front end fills in all of the indices,
3067 so we don't have to figure out what's missing ourselves. */
3068 gcc_assert (purpose);
3069
3070 /* Skip zero-sized fields, unless value has side-effects. This can
3071 happen with calls to functions returning a zero-sized type, which
3072 we shouldn't discard. As a number of downstream passes don't
3073 expect sets of zero-sized fields, we rely on the gimplification of
3074 the MODIFY_EXPR we make below to drop the assignment statement. */
3075 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3076 continue;
3077
3078 /* If we have a RANGE_EXPR, we have to build a loop to assign the
3079 whole range. */
3080 if (TREE_CODE (purpose) == RANGE_EXPR)
3081 {
3082 tree lower = TREE_OPERAND (purpose, 0);
3083 tree upper = TREE_OPERAND (purpose, 1);
3084
3085 /* If the lower bound is equal to upper, just treat it as if
3086 upper was the index. */
3087 if (simple_cst_equal (lower, upper))
3088 purpose = upper;
3089 else
3090 {
3091 gimplify_init_ctor_eval_range (object, lower, upper, value,
3092 array_elt_type, pre_p, cleared);
3093 continue;
3094 }
3095 }
3096
3097 if (array_elt_type)
3098 {
3099 /* Do not use bitsizetype for ARRAY_REF indices. */
3100 if (TYPE_DOMAIN (TREE_TYPE (object)))
3101 purpose = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3102 purpose);
3103 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3104 purpose, NULL_TREE, NULL_TREE);
3105 }
3106 else
3107 {
3108 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3109 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3110 unshare_expr (object), purpose, NULL_TREE);
3111 }
3112
3113 if (TREE_CODE (value) == CONSTRUCTOR
3114 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3115 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3116 pre_p, cleared);
3117 else
3118 {
3119 init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3120 gimplify_and_add (init, pre_p);
3121 }
3122 }
3123 }
3124
3125 /* A subroutine of gimplify_modify_expr. Break out elements of a
3126 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3127
3128 Note that we still need to clear any elements that don't have explicit
3129 initializers, so if not all elements are initialized we keep the
3130 original MODIFY_EXPR, we just remove all of the constructor elements.
3131
3132 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3133 GS_ERROR if we would have to create a temporary when gimplifying
3134 this constructor. Otherwise, return GS_OK.
3135
3136 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
3137
3138 static enum gimplify_status
3139 gimplify_init_constructor (tree *expr_p, tree *pre_p,
3140 tree *post_p, bool want_value,
3141 bool notify_temp_creation)
3142 {
3143 tree object;
3144 tree ctor = GENERIC_TREE_OPERAND (*expr_p, 1);
3145 tree type = TREE_TYPE (ctor);
3146 enum gimplify_status ret;
3147 VEC(constructor_elt,gc) *elts;
3148
3149 if (TREE_CODE (ctor) != CONSTRUCTOR)
3150 return GS_UNHANDLED;
3151
3152 if (!notify_temp_creation)
3153 {
3154 ret = gimplify_expr (&GENERIC_TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3155 is_gimple_lvalue, fb_lvalue);
3156 if (ret == GS_ERROR)
3157 return ret;
3158 }
3159 object = GENERIC_TREE_OPERAND (*expr_p, 0);
3160
3161 elts = CONSTRUCTOR_ELTS (ctor);
3162
3163 ret = GS_ALL_DONE;
3164 switch (TREE_CODE (type))
3165 {
3166 case RECORD_TYPE:
3167 case UNION_TYPE:
3168 case QUAL_UNION_TYPE:
3169 case ARRAY_TYPE:
3170 {
3171 struct gimplify_init_ctor_preeval_data preeval_data;
3172 HOST_WIDE_INT num_type_elements, num_ctor_elements;
3173 HOST_WIDE_INT num_nonzero_elements;
3174 bool cleared, valid_const_initializer;
3175
3176 /* Aggregate types must lower constructors to initialization of
3177 individual elements. The exception is that a CONSTRUCTOR node
3178 with no elements indicates zero-initialization of the whole. */
3179 if (VEC_empty (constructor_elt, elts))
3180 {
3181 if (notify_temp_creation)
3182 return GS_OK;
3183 break;
3184 }
3185
3186 /* Fetch information about the constructor to direct later processing.
3187 We might want to make static versions of it in various cases, and
3188 can only do so if it known to be a valid constant initializer. */
3189 valid_const_initializer
3190 = categorize_ctor_elements (ctor, &num_nonzero_elements,
3191 &num_ctor_elements, &cleared);
3192
3193 /* If a const aggregate variable is being initialized, then it
3194 should never be a lose to promote the variable to be static. */
3195 if (valid_const_initializer
3196 && num_nonzero_elements > 1
3197 && TREE_READONLY (object)
3198 && TREE_CODE (object) == VAR_DECL)
3199 {
3200 if (notify_temp_creation)
3201 return GS_ERROR;
3202 DECL_INITIAL (object) = ctor;
3203 TREE_STATIC (object) = 1;
3204 if (!DECL_NAME (object))
3205 DECL_NAME (object) = create_tmp_var_name ("C");
3206 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3207
3208 /* ??? C++ doesn't automatically append a .<number> to the
3209 assembler name, and even when it does, it looks a FE private
3210 data structures to figure out what that number should be,
3211 which are not set for this variable. I suppose this is
3212 important for local statics for inline functions, which aren't
3213 "local" in the object file sense. So in order to get a unique
3214 TU-local symbol, we must invoke the lhd version now. */
3215 lhd_set_decl_assembler_name (object);
3216
3217 *expr_p = NULL_TREE;
3218 break;
3219 }
3220
3221 /* If there are "lots" of initialized elements, even discounting
3222 those that are not address constants (and thus *must* be
3223 computed at runtime), then partition the constructor into
3224 constant and non-constant parts. Block copy the constant
3225 parts in, then generate code for the non-constant parts. */
3226 /* TODO. There's code in cp/typeck.c to do this. */
3227
3228 num_type_elements = count_type_elements (type, true);
3229
3230 /* If count_type_elements could not determine number of type elements
3231 for a constant-sized object, assume clearing is needed.
3232 Don't do this for variable-sized objects, as store_constructor
3233 will ignore the clearing of variable-sized objects. */
3234 if (num_type_elements < 0 && int_size_in_bytes (type) >= 0)
3235 cleared = true;
3236 /* If there are "lots" of zeros, then block clear the object first. */
3237 else if (num_type_elements - num_nonzero_elements > CLEAR_RATIO
3238 && num_nonzero_elements < num_type_elements/4)
3239 cleared = true;
3240 /* ??? This bit ought not be needed. For any element not present
3241 in the initializer, we should simply set them to zero. Except
3242 we'd need to *find* the elements that are not present, and that
3243 requires trickery to avoid quadratic compile-time behavior in
3244 large cases or excessive memory use in small cases. */
3245 else if (num_ctor_elements < num_type_elements)
3246 cleared = true;
3247
3248 /* If there are "lots" of initialized elements, and all of them
3249 are valid address constants, then the entire initializer can
3250 be dropped to memory, and then memcpy'd out. Don't do this
3251 for sparse arrays, though, as it's more efficient to follow
3252 the standard CONSTRUCTOR behavior of memset followed by
3253 individual element initialization. */
3254 if (valid_const_initializer && !cleared)
3255 {
3256 HOST_WIDE_INT size = int_size_in_bytes (type);
3257 unsigned int align;
3258
3259 /* ??? We can still get unbounded array types, at least
3260 from the C++ front end. This seems wrong, but attempt
3261 to work around it for now. */
3262 if (size < 0)
3263 {
3264 size = int_size_in_bytes (TREE_TYPE (object));
3265 if (size >= 0)
3266 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3267 }
3268
3269 /* Find the maximum alignment we can assume for the object. */
3270 /* ??? Make use of DECL_OFFSET_ALIGN. */
3271 if (DECL_P (object))
3272 align = DECL_ALIGN (object);
3273 else
3274 align = TYPE_ALIGN (type);
3275
3276 if (size > 0 && !can_move_by_pieces (size, align))
3277 {
3278 tree new;
3279
3280 if (notify_temp_creation)
3281 return GS_ERROR;
3282
3283 new = create_tmp_var_raw (type, "C");
3284
3285 gimple_add_tmp_var (new);
3286 TREE_STATIC (new) = 1;
3287 TREE_READONLY (new) = 1;
3288 DECL_INITIAL (new) = ctor;
3289 if (align > DECL_ALIGN (new))
3290 {
3291 DECL_ALIGN (new) = align;
3292 DECL_USER_ALIGN (new) = 1;
3293 }
3294 walk_tree (&DECL_INITIAL (new), force_labels_r, NULL, NULL);
3295
3296 GENERIC_TREE_OPERAND (*expr_p, 1) = new;
3297
3298 /* This is no longer an assignment of a CONSTRUCTOR, but
3299 we still may have processing to do on the LHS. So
3300 pretend we didn't do anything here to let that happen. */
3301 return GS_UNHANDLED;
3302 }
3303 }
3304
3305 if (notify_temp_creation)
3306 return GS_OK;
3307
3308 /* If there are nonzero elements, pre-evaluate to capture elements
3309 overlapping with the lhs into temporaries. We must do this before
3310 clearing to fetch the values before they are zeroed-out. */
3311 if (num_nonzero_elements > 0)
3312 {
3313 preeval_data.lhs_base_decl = get_base_address (object);
3314 if (!DECL_P (preeval_data.lhs_base_decl))
3315 preeval_data.lhs_base_decl = NULL;
3316 preeval_data.lhs_alias_set = get_alias_set (object);
3317
3318 gimplify_init_ctor_preeval (&GENERIC_TREE_OPERAND (*expr_p, 1),
3319 pre_p, post_p, &preeval_data);
3320 }
3321
3322 if (cleared)
3323 {
3324 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3325 Note that we still have to gimplify, in order to handle the
3326 case of variable sized types. Avoid shared tree structures. */
3327 CONSTRUCTOR_ELTS (ctor) = NULL;
3328 object = unshare_expr (object);
3329 gimplify_stmt (expr_p);
3330 append_to_statement_list (*expr_p, pre_p);
3331 }
3332
3333 /* If we have not block cleared the object, or if there are nonzero
3334 elements in the constructor, add assignments to the individual
3335 scalar fields of the object. */
3336 if (!cleared || num_nonzero_elements > 0)
3337 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3338
3339 *expr_p = NULL_TREE;
3340 }
3341 break;
3342
3343 case COMPLEX_TYPE:
3344 {
3345 tree r, i;
3346
3347 if (notify_temp_creation)
3348 return GS_OK;
3349
3350 /* Extract the real and imaginary parts out of the ctor. */
3351 gcc_assert (VEC_length (constructor_elt, elts) == 2);
3352 r = VEC_index (constructor_elt, elts, 0)->value;
3353 i = VEC_index (constructor_elt, elts, 1)->value;
3354 if (r == NULL || i == NULL)
3355 {
3356 tree zero = fold_convert (TREE_TYPE (type), integer_zero_node);
3357 if (r == NULL)
3358 r = zero;
3359 if (i == NULL)
3360 i = zero;
3361 }
3362
3363 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3364 represent creation of a complex value. */
3365 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3366 {
3367 ctor = build_complex (type, r, i);
3368 TREE_OPERAND (*expr_p, 1) = ctor;
3369 }
3370 else
3371 {
3372 ctor = build2 (COMPLEX_EXPR, type, r, i);
3373 TREE_OPERAND (*expr_p, 1) = ctor;
3374 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
3375 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3376 fb_rvalue);
3377 }
3378 }
3379 break;
3380
3381 case VECTOR_TYPE:
3382 {
3383 unsigned HOST_WIDE_INT ix;
3384 constructor_elt *ce;
3385
3386 if (notify_temp_creation)
3387 return GS_OK;
3388
3389 /* Go ahead and simplify constant constructors to VECTOR_CST. */
3390 if (TREE_CONSTANT (ctor))
3391 {
3392 bool constant_p = true;
3393 tree value;
3394
3395 /* Even when ctor is constant, it might contain non-*_CST
3396 elements, such as addresses or trapping values like
3397 1.0/0.0 - 1.0/0.0. Such expressions don't belong
3398 in VECTOR_CST nodes. */
3399 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
3400 if (!CONSTANT_CLASS_P (value))
3401 {
3402 constant_p = false;
3403 break;
3404 }
3405
3406 if (constant_p)
3407 {
3408 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
3409 break;
3410 }
3411
3412 /* Don't reduce an initializer constant even if we can't
3413 make a VECTOR_CST. It won't do anything for us, and it'll
3414 prevent us from representing it as a single constant. */
3415 if (initializer_constant_valid_p (ctor, type))
3416 break;
3417
3418 TREE_CONSTANT (ctor) = 0;
3419 TREE_INVARIANT (ctor) = 0;
3420 }
3421
3422 /* Vector types use CONSTRUCTOR all the way through gimple
3423 compilation as a general initializer. */
3424 for (ix = 0; VEC_iterate (constructor_elt, elts, ix, ce); ix++)
3425 {
3426 enum gimplify_status tret;
3427 tret = gimplify_expr (&ce->value, pre_p, post_p,
3428 is_gimple_val, fb_rvalue);
3429 if (tret == GS_ERROR)
3430 ret = GS_ERROR;
3431 }
3432 if (!is_gimple_reg (GENERIC_TREE_OPERAND (*expr_p, 0)))
3433 GENERIC_TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
3434 }
3435 break;
3436
3437 default:
3438 /* So how did we get a CONSTRUCTOR for a scalar type? */
3439 gcc_unreachable ();
3440 }
3441
3442 if (ret == GS_ERROR)
3443 return GS_ERROR;
3444 else if (want_value)
3445 {
3446 append_to_statement_list (*expr_p, pre_p);
3447 *expr_p = object;
3448 return GS_OK;
3449 }
3450 else
3451 return GS_ALL_DONE;
3452 }
3453
3454 /* Given a pointer value OP0, return a simplified version of an
3455 indirection through OP0, or NULL_TREE if no simplification is
3456 possible. Note that the resulting type may be different from
3457 the type pointed to in the sense that it is still compatible
3458 from the langhooks point of view. */
3459
3460 tree
3461 gimple_fold_indirect_ref (tree t)
3462 {
3463 tree type = TREE_TYPE (TREE_TYPE (t));
3464 tree sub = t;
3465 tree subtype;
3466
3467 STRIP_USELESS_TYPE_CONVERSION (sub);
3468 subtype = TREE_TYPE (sub);
3469 if (!POINTER_TYPE_P (subtype))
3470 return NULL_TREE;
3471
3472 if (TREE_CODE (sub) == ADDR_EXPR)
3473 {
3474 tree op = TREE_OPERAND (sub, 0);
3475 tree optype = TREE_TYPE (op);
3476 /* *&p => p */
3477 if (useless_type_conversion_p (type, optype))
3478 return op;
3479
3480 /* *(foo *)&fooarray => fooarray[0] */
3481 if (TREE_CODE (optype) == ARRAY_TYPE
3482 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3483 {
3484 tree type_domain = TYPE_DOMAIN (optype);
3485 tree min_val = size_zero_node;
3486 if (type_domain && TYPE_MIN_VALUE (type_domain))
3487 min_val = TYPE_MIN_VALUE (type_domain);
3488 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3489 }
3490 }
3491
3492 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3493 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
3494 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
3495 {
3496 tree type_domain;
3497 tree min_val = size_zero_node;
3498 tree osub = sub;
3499 sub = gimple_fold_indirect_ref (sub);
3500 if (! sub)
3501 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
3502 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
3503 if (type_domain && TYPE_MIN_VALUE (type_domain))
3504 min_val = TYPE_MIN_VALUE (type_domain);
3505 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
3506 }
3507
3508 return NULL_TREE;
3509 }
3510
3511 /* Given a pointer value OP0, return a simplified version of an
3512 indirection through OP0, or NULL_TREE if no simplification is
3513 possible. This may only be applied to a rhs of an expression.
3514 Note that the resulting type may be different from the type pointed
3515 to in the sense that it is still compatible from the langhooks
3516 point of view. */
3517
3518 static tree
3519 gimple_fold_indirect_ref_rhs (tree t)
3520 {
3521 return gimple_fold_indirect_ref (t);
3522 }
3523
3524 /* Subroutine of gimplify_modify_expr to do simplifications of
3525 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
3526 something changes. */
3527
3528 static enum gimplify_status
3529 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p, tree *pre_p,
3530 tree *post_p, bool want_value)
3531 {
3532 enum gimplify_status ret = GS_OK;
3533
3534 while (ret != GS_UNHANDLED)
3535 switch (TREE_CODE (*from_p))
3536 {
3537 case VAR_DECL:
3538 /* If we're assigning from a constant constructor, move the
3539 constructor expression to the RHS of the MODIFY_EXPR. */
3540 if (DECL_INITIAL (*from_p)
3541 && TYPE_READONLY (TREE_TYPE (*from_p))
3542 && !TREE_THIS_VOLATILE (*from_p)
3543 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
3544 {
3545 tree old_from = *from_p;
3546
3547 /* Move the constructor into the RHS. */
3548 *from_p = unshare_expr (DECL_INITIAL (*from_p));
3549
3550 /* Let's see if gimplify_init_constructor will need to put
3551 it in memory. If so, revert the change. */
3552 ret = gimplify_init_constructor (expr_p, NULL, NULL, false, true);
3553 if (ret == GS_ERROR)
3554 {
3555 *from_p = old_from;
3556 /* Fall through. */
3557 }
3558 else
3559 {
3560 ret = GS_OK;
3561 break;
3562 }
3563 }
3564 ret = GS_UNHANDLED;
3565 break;
3566 case INDIRECT_REF:
3567 {
3568 /* If we have code like
3569
3570 *(const A*)(A*)&x
3571
3572 where the type of "x" is a (possibly cv-qualified variant
3573 of "A"), treat the entire expression as identical to "x".
3574 This kind of code arises in C++ when an object is bound
3575 to a const reference, and if "x" is a TARGET_EXPR we want
3576 to take advantage of the optimization below. */
3577 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
3578 if (t)
3579 {
3580 *from_p = t;
3581 ret = GS_OK;
3582 }
3583 else
3584 ret = GS_UNHANDLED;
3585 break;
3586 }
3587
3588 case TARGET_EXPR:
3589 {
3590 /* If we are initializing something from a TARGET_EXPR, strip the
3591 TARGET_EXPR and initialize it directly, if possible. This can't
3592 be done if the initializer is void, since that implies that the
3593 temporary is set in some non-trivial way.
3594
3595 ??? What about code that pulls out the temp and uses it
3596 elsewhere? I think that such code never uses the TARGET_EXPR as
3597 an initializer. If I'm wrong, we'll die because the temp won't
3598 have any RTL. In that case, I guess we'll need to replace
3599 references somehow. */
3600 tree init = TARGET_EXPR_INITIAL (*from_p);
3601
3602 if (!VOID_TYPE_P (TREE_TYPE (init)))
3603 {
3604 *from_p = init;
3605 ret = GS_OK;
3606 }
3607 else
3608 ret = GS_UNHANDLED;
3609 }
3610 break;
3611
3612 case COMPOUND_EXPR:
3613 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
3614 caught. */
3615 gimplify_compound_expr (from_p, pre_p, true);
3616 ret = GS_OK;
3617 break;
3618
3619 case CONSTRUCTOR:
3620 /* If we're initializing from a CONSTRUCTOR, break this into
3621 individual MODIFY_EXPRs. */
3622 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
3623 false);
3624
3625 case COND_EXPR:
3626 /* If we're assigning to a non-register type, push the assignment
3627 down into the branches. This is mandatory for ADDRESSABLE types,
3628 since we cannot generate temporaries for such, but it saves a
3629 copy in other cases as well. */
3630 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
3631 {
3632 /* This code should mirror the code in gimplify_cond_expr. */
3633 enum tree_code code = TREE_CODE (*expr_p);
3634 tree cond = *from_p;
3635 tree result = *to_p;
3636
3637 ret = gimplify_expr (&result, pre_p, post_p,
3638 is_gimple_min_lval, fb_lvalue);
3639 if (ret != GS_ERROR)
3640 ret = GS_OK;
3641
3642 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
3643 TREE_OPERAND (cond, 1)
3644 = build2 (code, void_type_node, result,
3645 TREE_OPERAND (cond, 1));
3646 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
3647 TREE_OPERAND (cond, 2)
3648 = build2 (code, void_type_node, unshare_expr (result),
3649 TREE_OPERAND (cond, 2));
3650
3651 TREE_TYPE (cond) = void_type_node;
3652 recalculate_side_effects (cond);
3653
3654 if (want_value)
3655 {
3656 gimplify_and_add (cond, pre_p);
3657 *expr_p = unshare_expr (result);
3658 }
3659 else
3660 *expr_p = cond;
3661 return ret;
3662 }
3663 else
3664 ret = GS_UNHANDLED;
3665 break;
3666
3667 case CALL_EXPR:
3668 /* For calls that return in memory, give *to_p as the CALL_EXPR's
3669 return slot so that we don't generate a temporary. */
3670 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
3671 && aggregate_value_p (*from_p, *from_p))
3672 {
3673 bool use_target;
3674
3675 if (!(rhs_predicate_for (*to_p))(*from_p))
3676 /* If we need a temporary, *to_p isn't accurate. */
3677 use_target = false;
3678 else if (TREE_CODE (*to_p) == RESULT_DECL
3679 && DECL_NAME (*to_p) == NULL_TREE
3680 && needs_to_live_in_memory (*to_p))
3681 /* It's OK to use the return slot directly unless it's an NRV. */
3682 use_target = true;
3683 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
3684 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
3685 /* Don't force regs into memory. */
3686 use_target = false;
3687 else if (TREE_CODE (*to_p) == VAR_DECL
3688 && DECL_GIMPLE_FORMAL_TEMP_P (*to_p))
3689 /* Don't use the original target if it's a formal temp; we
3690 don't want to take their addresses. */
3691 use_target = false;
3692 else if (TREE_CODE (*expr_p) == INIT_EXPR)
3693 /* It's OK to use the target directly if it's being
3694 initialized. */
3695 use_target = true;
3696 else if (!is_gimple_non_addressable (*to_p))
3697 /* Don't use the original target if it's already addressable;
3698 if its address escapes, and the called function uses the
3699 NRV optimization, a conforming program could see *to_p
3700 change before the called function returns; see c++/19317.
3701 When optimizing, the return_slot pass marks more functions
3702 as safe after we have escape info. */
3703 use_target = false;
3704 else
3705 use_target = true;
3706
3707 if (use_target)
3708 {
3709 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
3710 mark_addressable (*to_p);
3711 }
3712 }
3713
3714 ret = GS_UNHANDLED;
3715 break;
3716
3717 /* If we're initializing from a container, push the initialization
3718 inside it. */
3719 case CLEANUP_POINT_EXPR:
3720 case BIND_EXPR:
3721 case STATEMENT_LIST:
3722 {
3723 tree wrap = *from_p;
3724 tree t;
3725
3726 ret = gimplify_expr (to_p, pre_p, post_p,
3727 is_gimple_min_lval, fb_lvalue);
3728 if (ret != GS_ERROR)
3729 ret = GS_OK;
3730
3731 t = voidify_wrapper_expr (wrap, *expr_p);
3732 gcc_assert (t == *expr_p);
3733
3734 if (want_value)
3735 {
3736 gimplify_and_add (wrap, pre_p);
3737 *expr_p = unshare_expr (*to_p);
3738 }
3739 else
3740 *expr_p = wrap;
3741 return GS_OK;
3742 }
3743
3744 default:
3745 ret = GS_UNHANDLED;
3746 break;
3747 }
3748
3749 return ret;
3750 }
3751
3752 /* Destructively convert the TREE pointer in TP into a gimple tuple if
3753 appropriate. */
3754
3755 static void
3756 tree_to_gimple_tuple (tree *tp)
3757 {
3758
3759 switch (TREE_CODE (*tp))
3760 {
3761 case GIMPLE_MODIFY_STMT:
3762 return;
3763 case MODIFY_EXPR:
3764 {
3765 struct gimple_stmt *gs;
3766 tree lhs = TREE_OPERAND (*tp, 0);
3767 bool def_stmt_self_p = false;
3768
3769 if (TREE_CODE (lhs) == SSA_NAME)
3770 {
3771 if (SSA_NAME_DEF_STMT (lhs) == *tp)
3772 def_stmt_self_p = true;
3773 }
3774
3775 gs = &make_node (GIMPLE_MODIFY_STMT)->gstmt;
3776 gs->base = (*tp)->base;
3777 /* The set to base above overwrites the CODE. */
3778 TREE_SET_CODE ((tree) gs, GIMPLE_MODIFY_STMT);
3779
3780 SET_EXPR_LOCUS ((tree) gs, EXPR_LOCUS (*tp));
3781 gs->operands[0] = TREE_OPERAND (*tp, 0);
3782 gs->operands[1] = TREE_OPERAND (*tp, 1);
3783 gs->block = TREE_BLOCK (*tp);
3784 *tp = (tree)gs;
3785
3786 /* If we re-gimplify a set to an SSA_NAME, we must change the
3787 SSA name's DEF_STMT link. */
3788 if (def_stmt_self_p)
3789 SSA_NAME_DEF_STMT (GIMPLE_STMT_OPERAND (*tp, 0)) = *tp;
3790
3791 return;
3792 }
3793 default:
3794 break;
3795 }
3796 }
3797
3798 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
3799 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
3800 DECL_GIMPLE_REG_P set.
3801
3802 IMPORTANT NOTE: This promotion is performed by introducing a load of the
3803 other, unmodified part of the complex object just before the total store.
3804 As a consequence, if the object is still uninitialized, an undefined value
3805 will be loaded into a register, which may result in a spurious exception
3806 if the register is floating-point and the value happens to be a signaling
3807 NaN for example. Then the fully-fledged complex operations lowering pass
3808 followed by a DCE pass are necessary in order to fix things up. */
3809
3810 static enum gimplify_status
3811 gimplify_modify_expr_complex_part (tree *expr_p, tree *pre_p, bool want_value)
3812 {
3813 enum tree_code code, ocode;
3814 tree lhs, rhs, new_rhs, other, realpart, imagpart;
3815
3816 lhs = GENERIC_TREE_OPERAND (*expr_p, 0);
3817 rhs = GENERIC_TREE_OPERAND (*expr_p, 1);
3818 code = TREE_CODE (lhs);
3819 lhs = TREE_OPERAND (lhs, 0);
3820
3821 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
3822 other = build1 (ocode, TREE_TYPE (rhs), lhs);
3823 other = get_formal_tmp_var (other, pre_p);
3824
3825 realpart = code == REALPART_EXPR ? rhs : other;
3826 imagpart = code == REALPART_EXPR ? other : rhs;
3827
3828 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
3829 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
3830 else
3831 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
3832
3833 GENERIC_TREE_OPERAND (*expr_p, 0) = lhs;
3834 GENERIC_TREE_OPERAND (*expr_p, 1) = new_rhs;
3835
3836 if (want_value)
3837 {
3838 tree_to_gimple_tuple (expr_p);
3839
3840 append_to_statement_list (*expr_p, pre_p);
3841 *expr_p = rhs;
3842 }
3843
3844 return GS_ALL_DONE;
3845 }
3846
3847 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
3848
3849 modify_expr
3850 : varname '=' rhs
3851 | '*' ID '=' rhs
3852
3853 PRE_P points to the list where side effects that must happen before
3854 *EXPR_P should be stored.
3855
3856 POST_P points to the list where side effects that must happen after
3857 *EXPR_P should be stored.
3858
3859 WANT_VALUE is nonzero iff we want to use the value of this expression
3860 in another expression. */
3861
3862 static enum gimplify_status
3863 gimplify_modify_expr (tree *expr_p, tree *pre_p, tree *post_p, bool want_value)
3864 {
3865 tree *from_p = &GENERIC_TREE_OPERAND (*expr_p, 1);
3866 tree *to_p = &GENERIC_TREE_OPERAND (*expr_p, 0);
3867 enum gimplify_status ret = GS_UNHANDLED;
3868
3869 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
3870 || TREE_CODE (*expr_p) == GIMPLE_MODIFY_STMT
3871 || TREE_CODE (*expr_p) == INIT_EXPR);
3872
3873 /* See if any simplifications can be done based on what the RHS is. */
3874 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
3875 want_value);
3876 if (ret != GS_UNHANDLED)
3877 return ret;
3878
3879 /* For zero sized types only gimplify the left hand side and right hand
3880 side as statements and throw away the assignment. Do this after
3881 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
3882 types properly. */
3883 if (zero_sized_type (TREE_TYPE (*from_p)))
3884 {
3885 gimplify_stmt (from_p);
3886 gimplify_stmt (to_p);
3887 append_to_statement_list (*from_p, pre_p);
3888 append_to_statement_list (*to_p, pre_p);
3889 *expr_p = NULL_TREE;
3890 return GS_ALL_DONE;
3891 }
3892
3893 /* If the value being copied is of variable width, compute the length
3894 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
3895 before gimplifying any of the operands so that we can resolve any
3896 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
3897 the size of the expression to be copied, not of the destination, so
3898 that is what we must here. */
3899 maybe_with_size_expr (from_p);
3900
3901 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3902 if (ret == GS_ERROR)
3903 return ret;
3904
3905 ret = gimplify_expr (from_p, pre_p, post_p,
3906 rhs_predicate_for (*to_p), fb_rvalue);
3907 if (ret == GS_ERROR)
3908 return ret;
3909
3910 /* Now see if the above changed *from_p to something we handle specially. */
3911 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
3912 want_value);
3913 if (ret != GS_UNHANDLED)
3914 return ret;
3915
3916 /* If we've got a variable sized assignment between two lvalues (i.e. does
3917 not involve a call), then we can make things a bit more straightforward
3918 by converting the assignment to memcpy or memset. */
3919 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
3920 {
3921 tree from = TREE_OPERAND (*from_p, 0);
3922 tree size = TREE_OPERAND (*from_p, 1);
3923
3924 if (TREE_CODE (from) == CONSTRUCTOR)
3925 return gimplify_modify_expr_to_memset (expr_p, size, want_value);
3926 if (is_gimple_addressable (from))
3927 {
3928 *from_p = from;
3929 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value);
3930 }
3931 }
3932
3933 /* Transform partial stores to non-addressable complex variables into
3934 total stores. This allows us to use real instead of virtual operands
3935 for these variables, which improves optimization. */
3936 if ((TREE_CODE (*to_p) == REALPART_EXPR
3937 || TREE_CODE (*to_p) == IMAGPART_EXPR)
3938 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
3939 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
3940
3941 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
3942 {
3943 /* If we've somehow already got an SSA_NAME on the LHS, then
3944 we're probably modified it twice. Not good. */
3945 gcc_assert (TREE_CODE (*to_p) != SSA_NAME);
3946 *to_p = make_ssa_name (*to_p, *expr_p);
3947 }
3948
3949 /* Try to alleviate the effects of the gimplification creating artificial
3950 temporaries (see for example is_gimple_reg_rhs) on the debug info. */
3951 if (!gimplify_ctxp->into_ssa
3952 && DECL_P (*from_p) && DECL_IGNORED_P (*from_p)
3953 && DECL_P (*to_p) && !DECL_IGNORED_P (*to_p))
3954 {
3955 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
3956 DECL_NAME (*from_p)
3957 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
3958 DECL_DEBUG_EXPR_IS_FROM (*from_p) = 1;
3959 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
3960 }
3961
3962 if (want_value)
3963 {
3964 tree_to_gimple_tuple (expr_p);
3965
3966 append_to_statement_list (*expr_p, pre_p);
3967 *expr_p = *to_p;
3968 return GS_OK;
3969 }
3970
3971 return GS_ALL_DONE;
3972 }
3973
3974 /* Gimplify a comparison between two variable-sized objects. Do this
3975 with a call to BUILT_IN_MEMCMP. */
3976
3977 static enum gimplify_status
3978 gimplify_variable_sized_compare (tree *expr_p)
3979 {
3980 tree op0 = TREE_OPERAND (*expr_p, 0);
3981 tree op1 = TREE_OPERAND (*expr_p, 1);
3982 tree t, arg, dest, src;
3983
3984 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
3985 arg = unshare_expr (arg);
3986 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
3987 src = build_fold_addr_expr (op1);
3988 dest = build_fold_addr_expr (op0);
3989 t = implicit_built_in_decls[BUILT_IN_MEMCMP];
3990 t = build_call_expr (t, 3, dest, src, arg);
3991 *expr_p
3992 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
3993
3994 return GS_OK;
3995 }
3996
3997 /* Gimplify a comparison between two aggregate objects of integral scalar
3998 mode as a comparison between the bitwise equivalent scalar values. */
3999
4000 static enum gimplify_status
4001 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4002 {
4003 tree op0 = TREE_OPERAND (*expr_p, 0);
4004 tree op1 = TREE_OPERAND (*expr_p, 1);
4005
4006 tree type = TREE_TYPE (op0);
4007 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4008
4009 op0 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op0);
4010 op1 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op1);
4011
4012 *expr_p
4013 = fold_build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4014
4015 return GS_OK;
4016 }
4017
4018 /* Gimplify TRUTH_ANDIF_EXPR and TRUTH_ORIF_EXPR expressions. EXPR_P
4019 points to the expression to gimplify.
4020
4021 Expressions of the form 'a && b' are gimplified to:
4022
4023 a && b ? true : false
4024
4025 gimplify_cond_expr will do the rest.
4026
4027 PRE_P points to the list where side effects that must happen before
4028 *EXPR_P should be stored. */
4029
4030 static enum gimplify_status
4031 gimplify_boolean_expr (tree *expr_p)
4032 {
4033 /* Preserve the original type of the expression. */
4034 tree type = TREE_TYPE (*expr_p);
4035
4036 *expr_p = build3 (COND_EXPR, type, *expr_p,
4037 fold_convert (type, boolean_true_node),
4038 fold_convert (type, boolean_false_node));
4039
4040 return GS_OK;
4041 }
4042
4043 /* Gimplifies an expression sequence. This function gimplifies each
4044 expression and re-writes the original expression with the last
4045 expression of the sequence in GIMPLE form.
4046
4047 PRE_P points to the list where the side effects for all the
4048 expressions in the sequence will be emitted.
4049
4050 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
4051 /* ??? Should rearrange to share the pre-queue with all the indirect
4052 invocations of gimplify_expr. Would probably save on creations
4053 of statement_list nodes. */
4054
4055 static enum gimplify_status
4056 gimplify_compound_expr (tree *expr_p, tree *pre_p, bool want_value)
4057 {
4058 tree t = *expr_p;
4059
4060 do
4061 {
4062 tree *sub_p = &TREE_OPERAND (t, 0);
4063
4064 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4065 gimplify_compound_expr (sub_p, pre_p, false);
4066 else
4067 gimplify_stmt (sub_p);
4068 append_to_statement_list (*sub_p, pre_p);
4069
4070 t = TREE_OPERAND (t, 1);
4071 }
4072 while (TREE_CODE (t) == COMPOUND_EXPR);
4073
4074 *expr_p = t;
4075 if (want_value)
4076 return GS_OK;
4077 else
4078 {
4079 gimplify_stmt (expr_p);
4080 return GS_ALL_DONE;
4081 }
4082 }
4083
4084 /* Gimplifies a statement list. These may be created either by an
4085 enlightened front-end, or by shortcut_cond_expr. */
4086
4087 static enum gimplify_status
4088 gimplify_statement_list (tree *expr_p, tree *pre_p)
4089 {
4090 tree temp = voidify_wrapper_expr (*expr_p, NULL);
4091
4092 tree_stmt_iterator i = tsi_start (*expr_p);
4093
4094 while (!tsi_end_p (i))
4095 {
4096 tree t;
4097
4098 gimplify_stmt (tsi_stmt_ptr (i));
4099
4100 t = tsi_stmt (i);
4101 if (t == NULL)
4102 tsi_delink (&i);
4103 else if (TREE_CODE (t) == STATEMENT_LIST)
4104 {
4105 tsi_link_before (&i, t, TSI_SAME_STMT);
4106 tsi_delink (&i);
4107 }
4108 else
4109 tsi_next (&i);
4110 }
4111
4112 if (temp)
4113 {
4114 append_to_statement_list (*expr_p, pre_p);
4115 *expr_p = temp;
4116 return GS_OK;
4117 }
4118
4119 return GS_ALL_DONE;
4120 }
4121
4122 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
4123 gimplify. After gimplification, EXPR_P will point to a new temporary
4124 that holds the original value of the SAVE_EXPR node.
4125
4126 PRE_P points to the list where side effects that must happen before
4127 *EXPR_P should be stored. */
4128
4129 static enum gimplify_status
4130 gimplify_save_expr (tree *expr_p, tree *pre_p, tree *post_p)
4131 {
4132 enum gimplify_status ret = GS_ALL_DONE;
4133 tree val;
4134
4135 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4136 val = TREE_OPERAND (*expr_p, 0);
4137
4138 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
4139 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4140 {
4141 /* The operand may be a void-valued expression such as SAVE_EXPRs
4142 generated by the Java frontend for class initialization. It is
4143 being executed only for its side-effects. */
4144 if (TREE_TYPE (val) == void_type_node)
4145 {
4146 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4147 is_gimple_stmt, fb_none);
4148 append_to_statement_list (TREE_OPERAND (*expr_p, 0), pre_p);
4149 val = NULL;
4150 }
4151 else
4152 val = get_initialized_tmp_var (val, pre_p, post_p);
4153
4154 TREE_OPERAND (*expr_p, 0) = val;
4155 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4156 }
4157
4158 *expr_p = val;
4159
4160 return ret;
4161 }
4162
4163 /* Re-write the ADDR_EXPR node pointed to by EXPR_P
4164
4165 unary_expr
4166 : ...
4167 | '&' varname
4168 ...
4169
4170 PRE_P points to the list where side effects that must happen before
4171 *EXPR_P should be stored.
4172
4173 POST_P points to the list where side effects that must happen after
4174 *EXPR_P should be stored. */
4175
4176 static enum gimplify_status
4177 gimplify_addr_expr (tree *expr_p, tree *pre_p, tree *post_p)
4178 {
4179 tree expr = *expr_p;
4180 tree op0 = TREE_OPERAND (expr, 0);
4181 enum gimplify_status ret;
4182
4183 switch (TREE_CODE (op0))
4184 {
4185 case INDIRECT_REF:
4186 case MISALIGNED_INDIRECT_REF:
4187 do_indirect_ref:
4188 /* Check if we are dealing with an expression of the form '&*ptr'.
4189 While the front end folds away '&*ptr' into 'ptr', these
4190 expressions may be generated internally by the compiler (e.g.,
4191 builtins like __builtin_va_end). */
4192 /* Caution: the silent array decomposition semantics we allow for
4193 ADDR_EXPR means we can't always discard the pair. */
4194 /* Gimplification of the ADDR_EXPR operand may drop
4195 cv-qualification conversions, so make sure we add them if
4196 needed. */
4197 {
4198 tree op00 = TREE_OPERAND (op0, 0);
4199 tree t_expr = TREE_TYPE (expr);
4200 tree t_op00 = TREE_TYPE (op00);
4201
4202 if (!useless_type_conversion_p (t_expr, t_op00))
4203 op00 = fold_convert (TREE_TYPE (expr), op00);
4204 *expr_p = op00;
4205 ret = GS_OK;
4206 }
4207 break;
4208
4209 case VIEW_CONVERT_EXPR:
4210 /* Take the address of our operand and then convert it to the type of
4211 this ADDR_EXPR.
4212
4213 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4214 all clear. The impact of this transformation is even less clear. */
4215
4216 /* If the operand is a useless conversion, look through it. Doing so
4217 guarantees that the ADDR_EXPR and its operand will remain of the
4218 same type. */
4219 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
4220 op0 = TREE_OPERAND (op0, 0);
4221
4222 *expr_p = fold_convert (TREE_TYPE (expr),
4223 build_fold_addr_expr (TREE_OPERAND (op0, 0)));
4224 ret = GS_OK;
4225 break;
4226
4227 default:
4228 /* We use fb_either here because the C frontend sometimes takes
4229 the address of a call that returns a struct; see
4230 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
4231 the implied temporary explicit. */
4232
4233 /* Mark the RHS addressable. */
4234 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
4235 is_gimple_addressable, fb_either);
4236 if (ret != GS_ERROR)
4237 {
4238 op0 = TREE_OPERAND (expr, 0);
4239
4240 /* For various reasons, the gimplification of the expression
4241 may have made a new INDIRECT_REF. */
4242 if (TREE_CODE (op0) == INDIRECT_REF)
4243 goto do_indirect_ref;
4244
4245 /* Make sure TREE_INVARIANT, TREE_CONSTANT, and TREE_SIDE_EFFECTS
4246 is set properly. */
4247 recompute_tree_invariant_for_addr_expr (expr);
4248
4249 mark_addressable (TREE_OPERAND (expr, 0));
4250 }
4251 break;
4252 }
4253
4254 return ret;
4255 }
4256
4257 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
4258 value; output operands should be a gimple lvalue. */
4259
4260 static enum gimplify_status
4261 gimplify_asm_expr (tree *expr_p, tree *pre_p, tree *post_p)
4262 {
4263 tree expr = *expr_p;
4264 int noutputs = list_length (ASM_OUTPUTS (expr));
4265 const char **oconstraints
4266 = (const char **) alloca ((noutputs) * sizeof (const char *));
4267 int i;
4268 tree link;
4269 const char *constraint;
4270 bool allows_mem, allows_reg, is_inout;
4271 enum gimplify_status ret, tret;
4272
4273 ret = GS_ALL_DONE;
4274 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = TREE_CHAIN (link))
4275 {
4276 size_t constraint_len;
4277 oconstraints[i] = constraint
4278 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4279 constraint_len = strlen (constraint);
4280 if (constraint_len == 0)
4281 continue;
4282
4283 parse_output_constraint (&constraint, i, 0, 0,
4284 &allows_mem, &allows_reg, &is_inout);
4285
4286 if (!allows_reg && allows_mem)
4287 mark_addressable (TREE_VALUE (link));
4288
4289 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4290 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
4291 fb_lvalue | fb_mayfail);
4292 if (tret == GS_ERROR)
4293 {
4294 error ("invalid lvalue in asm output %d", i);
4295 ret = tret;
4296 }
4297
4298 if (is_inout)
4299 {
4300 /* An input/output operand. To give the optimizers more
4301 flexibility, split it into separate input and output
4302 operands. */
4303 tree input;
4304 char buf[10];
4305
4306 /* Turn the in/out constraint into an output constraint. */
4307 char *p = xstrdup (constraint);
4308 p[0] = '=';
4309 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
4310
4311 /* And add a matching input constraint. */
4312 if (allows_reg)
4313 {
4314 sprintf (buf, "%d", i);
4315
4316 /* If there are multiple alternatives in the constraint,
4317 handle each of them individually. Those that allow register
4318 will be replaced with operand number, the others will stay
4319 unchanged. */
4320 if (strchr (p, ',') != NULL)
4321 {
4322 size_t len = 0, buflen = strlen (buf);
4323 char *beg, *end, *str, *dst;
4324
4325 for (beg = p + 1;;)
4326 {
4327 end = strchr (beg, ',');
4328 if (end == NULL)
4329 end = strchr (beg, '\0');
4330 if ((size_t) (end - beg) < buflen)
4331 len += buflen + 1;
4332 else
4333 len += end - beg + 1;
4334 if (*end)
4335 beg = end + 1;
4336 else
4337 break;
4338 }
4339
4340 str = (char *) alloca (len);
4341 for (beg = p + 1, dst = str;;)
4342 {
4343 const char *tem;
4344 bool mem_p, reg_p, inout_p;
4345
4346 end = strchr (beg, ',');
4347 if (end)
4348 *end = '\0';
4349 beg[-1] = '=';
4350 tem = beg - 1;
4351 parse_output_constraint (&tem, i, 0, 0,
4352 &mem_p, &reg_p, &inout_p);
4353 if (dst != str)
4354 *dst++ = ',';
4355 if (reg_p)
4356 {
4357 memcpy (dst, buf, buflen);
4358 dst += buflen;
4359 }
4360 else
4361 {
4362 if (end)
4363 len = end - beg;
4364 else
4365 len = strlen (beg);
4366 memcpy (dst, beg, len);
4367 dst += len;
4368 }
4369 if (end)
4370 beg = end + 1;
4371 else
4372 break;
4373 }
4374 *dst = '\0';
4375 input = build_string (dst - str, str);
4376 }
4377 else
4378 input = build_string (strlen (buf), buf);
4379 }
4380 else
4381 input = build_string (constraint_len - 1, constraint + 1);
4382
4383 free (p);
4384
4385 input = build_tree_list (build_tree_list (NULL_TREE, input),
4386 unshare_expr (TREE_VALUE (link)));
4387 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
4388 }
4389 }
4390
4391 for (link = ASM_INPUTS (expr); link; ++i, link = TREE_CHAIN (link))
4392 {
4393 constraint
4394 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4395 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4396 oconstraints, &allows_mem, &allows_reg);
4397
4398 /* If we can't make copies, we can only accept memory. */
4399 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
4400 {
4401 if (allows_mem)
4402 allows_reg = 0;
4403 else
4404 {
4405 error ("impossible constraint in %<asm%>");
4406 error ("non-memory input %d must stay in memory", i);
4407 return GS_ERROR;
4408 }
4409 }
4410
4411 /* If the operand is a memory input, it should be an lvalue. */
4412 if (!allows_reg && allows_mem)
4413 {
4414 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4415 is_gimple_lvalue, fb_lvalue | fb_mayfail);
4416 mark_addressable (TREE_VALUE (link));
4417 if (tret == GS_ERROR)
4418 {
4419 error ("memory input %d is not directly addressable", i);
4420 ret = tret;
4421 }
4422 }
4423 else
4424 {
4425 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4426 is_gimple_asm_val, fb_rvalue);
4427 if (tret == GS_ERROR)
4428 ret = tret;
4429 }
4430 }
4431
4432 return ret;
4433 }
4434
4435 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
4436 WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
4437 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
4438 return to this function.
4439
4440 FIXME should we complexify the prequeue handling instead? Or use flags
4441 for all the cleanups and let the optimizer tighten them up? The current
4442 code seems pretty fragile; it will break on a cleanup within any
4443 non-conditional nesting. But any such nesting would be broken, anyway;
4444 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
4445 and continues out of it. We can do that at the RTL level, though, so
4446 having an optimizer to tighten up try/finally regions would be a Good
4447 Thing. */
4448
4449 static enum gimplify_status
4450 gimplify_cleanup_point_expr (tree *expr_p, tree *pre_p)
4451 {
4452 tree_stmt_iterator iter;
4453 tree body;
4454
4455 tree temp = voidify_wrapper_expr (*expr_p, NULL);
4456
4457 /* We only care about the number of conditions between the innermost
4458 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
4459 any cleanups collected outside the CLEANUP_POINT_EXPR. */
4460 int old_conds = gimplify_ctxp->conditions;
4461 tree old_cleanups = gimplify_ctxp->conditional_cleanups;
4462 gimplify_ctxp->conditions = 0;
4463 gimplify_ctxp->conditional_cleanups = NULL_TREE;
4464
4465 body = TREE_OPERAND (*expr_p, 0);
4466 gimplify_to_stmt_list (&body);
4467
4468 gimplify_ctxp->conditions = old_conds;
4469 gimplify_ctxp->conditional_cleanups = old_cleanups;
4470
4471 for (iter = tsi_start (body); !tsi_end_p (iter); )
4472 {
4473 tree *wce_p = tsi_stmt_ptr (iter);
4474 tree wce = *wce_p;
4475
4476 if (TREE_CODE (wce) == WITH_CLEANUP_EXPR)
4477 {
4478 if (tsi_one_before_end_p (iter))
4479 {
4480 tsi_link_before (&iter, TREE_OPERAND (wce, 0), TSI_SAME_STMT);
4481 tsi_delink (&iter);
4482 break;
4483 }
4484 else
4485 {
4486 tree sl, tfe;
4487 enum tree_code code;
4488
4489 if (CLEANUP_EH_ONLY (wce))
4490 code = TRY_CATCH_EXPR;
4491 else
4492 code = TRY_FINALLY_EXPR;
4493
4494 sl = tsi_split_statement_list_after (&iter);
4495 tfe = build2 (code, void_type_node, sl, NULL_TREE);
4496 append_to_statement_list (TREE_OPERAND (wce, 0),
4497 &TREE_OPERAND (tfe, 1));
4498 *wce_p = tfe;
4499 iter = tsi_start (sl);
4500 }
4501 }
4502 else
4503 tsi_next (&iter);
4504 }
4505
4506 if (temp)
4507 {
4508 *expr_p = temp;
4509 append_to_statement_list (body, pre_p);
4510 return GS_OK;
4511 }
4512 else
4513 {
4514 *expr_p = body;
4515 return GS_ALL_DONE;
4516 }
4517 }
4518
4519 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
4520 is the cleanup action required. */
4521
4522 static void
4523 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, tree *pre_p)
4524 {
4525 tree wce;
4526
4527 /* Errors can result in improperly nested cleanups. Which results in
4528 confusion when trying to resolve the WITH_CLEANUP_EXPR. */
4529 if (errorcount || sorrycount)
4530 return;
4531
4532 if (gimple_conditional_context ())
4533 {
4534 /* If we're in a conditional context, this is more complex. We only
4535 want to run the cleanup if we actually ran the initialization that
4536 necessitates it, but we want to run it after the end of the
4537 conditional context. So we wrap the try/finally around the
4538 condition and use a flag to determine whether or not to actually
4539 run the destructor. Thus
4540
4541 test ? f(A()) : 0
4542
4543 becomes (approximately)
4544
4545 flag = 0;
4546 try {
4547 if (test) { A::A(temp); flag = 1; val = f(temp); }
4548 else { val = 0; }
4549 } finally {
4550 if (flag) A::~A(temp);
4551 }
4552 val
4553 */
4554
4555 tree flag = create_tmp_var (boolean_type_node, "cleanup");
4556 tree ffalse = build_gimple_modify_stmt (flag, boolean_false_node);
4557 tree ftrue = build_gimple_modify_stmt (flag, boolean_true_node);
4558 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
4559 wce = build1 (WITH_CLEANUP_EXPR, void_type_node, cleanup);
4560 append_to_statement_list (ffalse, &gimplify_ctxp->conditional_cleanups);
4561 append_to_statement_list (wce, &gimplify_ctxp->conditional_cleanups);
4562 append_to_statement_list (ftrue, pre_p);
4563
4564 /* Because of this manipulation, and the EH edges that jump
4565 threading cannot redirect, the temporary (VAR) will appear
4566 to be used uninitialized. Don't warn. */
4567 TREE_NO_WARNING (var) = 1;
4568 }
4569 else
4570 {
4571 wce = build1 (WITH_CLEANUP_EXPR, void_type_node, cleanup);
4572 CLEANUP_EH_ONLY (wce) = eh_only;
4573 append_to_statement_list (wce, pre_p);
4574 }
4575
4576 gimplify_stmt (&TREE_OPERAND (wce, 0));
4577 }
4578
4579 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
4580
4581 static enum gimplify_status
4582 gimplify_target_expr (tree *expr_p, tree *pre_p, tree *post_p)
4583 {
4584 tree targ = *expr_p;
4585 tree temp = TARGET_EXPR_SLOT (targ);
4586 tree init = TARGET_EXPR_INITIAL (targ);
4587 enum gimplify_status ret;
4588
4589 if (init)
4590 {
4591 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
4592 to the temps list. Handle also variable length TARGET_EXPRs. */
4593 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
4594 {
4595 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
4596 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
4597 gimplify_vla_decl (temp, pre_p);
4598 }
4599 else
4600 gimple_add_tmp_var (temp);
4601
4602 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
4603 expression is supposed to initialize the slot. */
4604 if (VOID_TYPE_P (TREE_TYPE (init)))
4605 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
4606 else
4607 {
4608 init = build2 (INIT_EXPR, void_type_node, temp, init);
4609 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt,
4610 fb_none);
4611 }
4612 if (ret == GS_ERROR)
4613 {
4614 /* PR c++/28266 Make sure this is expanded only once. */
4615 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
4616 return GS_ERROR;
4617 }
4618 append_to_statement_list (init, pre_p);
4619
4620 /* If needed, push the cleanup for the temp. */
4621 if (TARGET_EXPR_CLEANUP (targ))
4622 {
4623 gimplify_stmt (&TARGET_EXPR_CLEANUP (targ));
4624 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
4625 CLEANUP_EH_ONLY (targ), pre_p);
4626 }
4627
4628 /* Only expand this once. */
4629 TREE_OPERAND (targ, 3) = init;
4630 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
4631 }
4632 else
4633 /* We should have expanded this before. */
4634 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
4635
4636 *expr_p = temp;
4637 return GS_OK;
4638 }
4639
4640 /* Gimplification of expression trees. */
4641
4642 /* Gimplify an expression which appears at statement context; usually, this
4643 means replacing it with a suitably gimple STATEMENT_LIST. */
4644
4645 void
4646 gimplify_stmt (tree *stmt_p)
4647 {
4648 gimplify_expr (stmt_p, NULL, NULL, is_gimple_stmt, fb_none);
4649 }
4650
4651 /* Similarly, but force the result to be a STATEMENT_LIST. */
4652
4653 void
4654 gimplify_to_stmt_list (tree *stmt_p)
4655 {
4656 gimplify_stmt (stmt_p);
4657 if (!*stmt_p)
4658 *stmt_p = alloc_stmt_list ();
4659 else if (TREE_CODE (*stmt_p) != STATEMENT_LIST)
4660 {
4661 tree t = *stmt_p;
4662 *stmt_p = alloc_stmt_list ();
4663 append_to_statement_list (t, stmt_p);
4664 }
4665 }
4666
4667
4668 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
4669 to CTX. If entries already exist, force them to be some flavor of private.
4670 If there is no enclosing parallel, do nothing. */
4671
4672 void
4673 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
4674 {
4675 splay_tree_node n;
4676
4677 if (decl == NULL || !DECL_P (decl))
4678 return;
4679
4680 do
4681 {
4682 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4683 if (n != NULL)
4684 {
4685 if (n->value & GOVD_SHARED)
4686 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
4687 else
4688 return;
4689 }
4690 else if (ctx->is_parallel)
4691 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
4692
4693 ctx = ctx->outer_context;
4694 }
4695 while (ctx);
4696 }
4697
4698 /* Similarly for each of the type sizes of TYPE. */
4699
4700 static void
4701 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
4702 {
4703 if (type == NULL || type == error_mark_node)
4704 return;
4705 type = TYPE_MAIN_VARIANT (type);
4706
4707 if (pointer_set_insert (ctx->privatized_types, type))
4708 return;
4709
4710 switch (TREE_CODE (type))
4711 {
4712 case INTEGER_TYPE:
4713 case ENUMERAL_TYPE:
4714 case BOOLEAN_TYPE:
4715 case REAL_TYPE:
4716 case FIXED_POINT_TYPE:
4717 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
4718 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
4719 break;
4720
4721 case ARRAY_TYPE:
4722 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
4723 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
4724 break;
4725
4726 case RECORD_TYPE:
4727 case UNION_TYPE:
4728 case QUAL_UNION_TYPE:
4729 {
4730 tree field;
4731 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4732 if (TREE_CODE (field) == FIELD_DECL)
4733 {
4734 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
4735 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
4736 }
4737 }
4738 break;
4739
4740 case POINTER_TYPE:
4741 case REFERENCE_TYPE:
4742 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
4743 break;
4744
4745 default:
4746 break;
4747 }
4748
4749 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
4750 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
4751 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
4752 }
4753
4754 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
4755
4756 static void
4757 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
4758 {
4759 splay_tree_node n;
4760 unsigned int nflags;
4761 tree t;
4762
4763 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4764 return;
4765
4766 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
4767 there are constructors involved somewhere. */
4768 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
4769 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
4770 flags |= GOVD_SEEN;
4771
4772 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4773 if (n != NULL)
4774 {
4775 /* We shouldn't be re-adding the decl with the same data
4776 sharing class. */
4777 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
4778 /* The only combination of data sharing classes we should see is
4779 FIRSTPRIVATE and LASTPRIVATE. */
4780 nflags = n->value | flags;
4781 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
4782 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE));
4783 n->value = nflags;
4784 return;
4785 }
4786
4787 /* When adding a variable-sized variable, we have to handle all sorts
4788 of additional bits of data: the pointer replacement variable, and
4789 the parameters of the type. */
4790 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
4791 {
4792 /* Add the pointer replacement variable as PRIVATE if the variable
4793 replacement is private, else FIRSTPRIVATE since we'll need the
4794 address of the original variable either for SHARED, or for the
4795 copy into or out of the context. */
4796 if (!(flags & GOVD_LOCAL))
4797 {
4798 nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
4799 nflags |= flags & GOVD_SEEN;
4800 t = DECL_VALUE_EXPR (decl);
4801 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
4802 t = TREE_OPERAND (t, 0);
4803 gcc_assert (DECL_P (t));
4804 omp_add_variable (ctx, t, nflags);
4805 }
4806
4807 /* Add all of the variable and type parameters (which should have
4808 been gimplified to a formal temporary) as FIRSTPRIVATE. */
4809 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
4810 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
4811 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
4812
4813 /* The variable-sized variable itself is never SHARED, only some form
4814 of PRIVATE. The sharing would take place via the pointer variable
4815 which we remapped above. */
4816 if (flags & GOVD_SHARED)
4817 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
4818 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
4819
4820 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
4821 alloca statement we generate for the variable, so make sure it
4822 is available. This isn't automatically needed for the SHARED
4823 case, since we won't be allocating local storage then.
4824 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
4825 in this case omp_notice_variable will be called later
4826 on when it is gimplified. */
4827 else if (! (flags & GOVD_LOCAL))
4828 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
4829 }
4830 else if (lang_hooks.decls.omp_privatize_by_reference (decl))
4831 {
4832 gcc_assert ((flags & GOVD_LOCAL) == 0);
4833 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
4834
4835 /* Similar to the direct variable sized case above, we'll need the
4836 size of references being privatized. */
4837 if ((flags & GOVD_SHARED) == 0)
4838 {
4839 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
4840 if (TREE_CODE (t) != INTEGER_CST)
4841 omp_notice_variable (ctx, t, true);
4842 }
4843 }
4844
4845 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
4846 }
4847
4848 /* Record the fact that DECL was used within the OpenMP context CTX.
4849 IN_CODE is true when real code uses DECL, and false when we should
4850 merely emit default(none) errors. Return true if DECL is going to
4851 be remapped and thus DECL shouldn't be gimplified into its
4852 DECL_VALUE_EXPR (if any). */
4853
4854 static bool
4855 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
4856 {
4857 splay_tree_node n;
4858 unsigned flags = in_code ? GOVD_SEEN : 0;
4859 bool ret = false, shared;
4860
4861 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4862 return false;
4863
4864 /* Threadprivate variables are predetermined. */
4865 if (is_global_var (decl))
4866 {
4867 if (DECL_THREAD_LOCAL_P (decl))
4868 return false;
4869
4870 if (DECL_HAS_VALUE_EXPR_P (decl))
4871 {
4872 tree value = get_base_address (DECL_VALUE_EXPR (decl));
4873
4874 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
4875 return false;
4876 }
4877 }
4878
4879 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4880 if (n == NULL)
4881 {
4882 enum omp_clause_default_kind default_kind, kind;
4883
4884 if (!ctx->is_parallel)
4885 goto do_outer;
4886
4887 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
4888 remapped firstprivate instead of shared. To some extent this is
4889 addressed in omp_firstprivatize_type_sizes, but not effectively. */
4890 default_kind = ctx->default_kind;
4891 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
4892 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
4893 default_kind = kind;
4894
4895 switch (default_kind)
4896 {
4897 case OMP_CLAUSE_DEFAULT_NONE:
4898 error ("%qs not specified in enclosing parallel",
4899 IDENTIFIER_POINTER (DECL_NAME (decl)));
4900 error ("%Henclosing parallel", &ctx->location);
4901 /* FALLTHRU */
4902 case OMP_CLAUSE_DEFAULT_SHARED:
4903 flags |= GOVD_SHARED;
4904 break;
4905 case OMP_CLAUSE_DEFAULT_PRIVATE:
4906 flags |= GOVD_PRIVATE;
4907 break;
4908 default:
4909 gcc_unreachable ();
4910 }
4911
4912 omp_add_variable (ctx, decl, flags);
4913
4914 shared = (flags & GOVD_SHARED) != 0;
4915 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
4916 goto do_outer;
4917 }
4918
4919 shared = ((flags | n->value) & GOVD_SHARED) != 0;
4920 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
4921
4922 /* If nothing changed, there's nothing left to do. */
4923 if ((n->value & flags) == flags)
4924 return ret;
4925 flags |= n->value;
4926 n->value = flags;
4927
4928 do_outer:
4929 /* If the variable is private in the current context, then we don't
4930 need to propagate anything to an outer context. */
4931 if (flags & GOVD_PRIVATE)
4932 return ret;
4933 if (ctx->outer_context
4934 && omp_notice_variable (ctx->outer_context, decl, in_code))
4935 return true;
4936 return ret;
4937 }
4938
4939 /* Verify that DECL is private within CTX. If there's specific information
4940 to the contrary in the innermost scope, generate an error. */
4941
4942 static bool
4943 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
4944 {
4945 splay_tree_node n;
4946
4947 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4948 if (n != NULL)
4949 {
4950 if (n->value & GOVD_SHARED)
4951 {
4952 if (ctx == gimplify_omp_ctxp)
4953 {
4954 error ("iteration variable %qs should be private",
4955 IDENTIFIER_POINTER (DECL_NAME (decl)));
4956 n->value = GOVD_PRIVATE;
4957 return true;
4958 }
4959 else
4960 return false;
4961 }
4962 else if ((n->value & GOVD_EXPLICIT) != 0
4963 && (ctx == gimplify_omp_ctxp
4964 || (ctx->is_combined_parallel
4965 && gimplify_omp_ctxp->outer_context == ctx)))
4966 {
4967 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
4968 error ("iteration variable %qs should not be firstprivate",
4969 IDENTIFIER_POINTER (DECL_NAME (decl)));
4970 else if ((n->value & GOVD_REDUCTION) != 0)
4971 error ("iteration variable %qs should not be reduction",
4972 IDENTIFIER_POINTER (DECL_NAME (decl)));
4973 }
4974 return true;
4975 }
4976
4977 if (ctx->is_parallel)
4978 return false;
4979 else if (ctx->outer_context)
4980 return omp_is_private (ctx->outer_context, decl);
4981 else
4982 return !is_global_var (decl);
4983 }
4984
4985 /* Return true if DECL is private within a parallel region
4986 that binds to the current construct's context or in parallel
4987 region's REDUCTION clause. */
4988
4989 static bool
4990 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
4991 {
4992 splay_tree_node n;
4993
4994 do
4995 {
4996 ctx = ctx->outer_context;
4997 if (ctx == NULL)
4998 return !(is_global_var (decl)
4999 /* References might be private, but might be shared too. */
5000 || lang_hooks.decls.omp_privatize_by_reference (decl));
5001
5002 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5003 if (n != NULL)
5004 return (n->value & GOVD_SHARED) == 0;
5005 }
5006 while (!ctx->is_parallel);
5007 return false;
5008 }
5009
5010 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
5011 and previous omp contexts. */
5012
5013 static void
5014 gimplify_scan_omp_clauses (tree *list_p, tree *pre_p, bool in_parallel,
5015 bool in_combined_parallel)
5016 {
5017 struct gimplify_omp_ctx *ctx, *outer_ctx;
5018 tree c;
5019
5020 ctx = new_omp_context (in_parallel, in_combined_parallel);
5021 outer_ctx = ctx->outer_context;
5022
5023 while ((c = *list_p) != NULL)
5024 {
5025 enum gimplify_status gs;
5026 bool remove = false;
5027 bool notice_outer = true;
5028 const char *check_non_private = NULL;
5029 unsigned int flags;
5030 tree decl;
5031
5032 switch (OMP_CLAUSE_CODE (c))
5033 {
5034 case OMP_CLAUSE_PRIVATE:
5035 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
5036 notice_outer = false;
5037 goto do_add;
5038 case OMP_CLAUSE_SHARED:
5039 flags = GOVD_SHARED | GOVD_EXPLICIT;
5040 goto do_add;
5041 case OMP_CLAUSE_FIRSTPRIVATE:
5042 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
5043 check_non_private = "firstprivate";
5044 goto do_add;
5045 case OMP_CLAUSE_LASTPRIVATE:
5046 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
5047 check_non_private = "lastprivate";
5048 goto do_add;
5049 case OMP_CLAUSE_REDUCTION:
5050 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
5051 check_non_private = "reduction";
5052 goto do_add;
5053
5054 do_add:
5055 decl = OMP_CLAUSE_DECL (c);
5056 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5057 {
5058 remove = true;
5059 break;
5060 }
5061 omp_add_variable (ctx, decl, flags);
5062 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5063 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5064 {
5065 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
5066 GOVD_LOCAL | GOVD_SEEN);
5067 gimplify_omp_ctxp = ctx;
5068 push_gimplify_context ();
5069 gimplify_stmt (&OMP_CLAUSE_REDUCTION_INIT (c));
5070 pop_gimplify_context (OMP_CLAUSE_REDUCTION_INIT (c));
5071 push_gimplify_context ();
5072 gimplify_stmt (&OMP_CLAUSE_REDUCTION_MERGE (c));
5073 pop_gimplify_context (OMP_CLAUSE_REDUCTION_MERGE (c));
5074 gimplify_omp_ctxp = outer_ctx;
5075 }
5076 if (notice_outer)
5077 goto do_notice;
5078 break;
5079
5080 case OMP_CLAUSE_COPYIN:
5081 case OMP_CLAUSE_COPYPRIVATE:
5082 decl = OMP_CLAUSE_DECL (c);
5083 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
5084 {
5085 remove = true;
5086 break;
5087 }
5088 do_notice:
5089 if (outer_ctx)
5090 omp_notice_variable (outer_ctx, decl, true);
5091 if (check_non_private
5092 && !in_parallel
5093 && omp_check_private (ctx, decl))
5094 {
5095 error ("%s variable %qs is private in outer context",
5096 check_non_private, IDENTIFIER_POINTER (DECL_NAME (decl)));
5097 remove = true;
5098 }
5099 break;
5100
5101 case OMP_CLAUSE_IF:
5102 OMP_CLAUSE_OPERAND (c, 0)
5103 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
5104 /* Fall through. */
5105
5106 case OMP_CLAUSE_SCHEDULE:
5107 case OMP_CLAUSE_NUM_THREADS:
5108 gs = gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
5109 is_gimple_val, fb_rvalue);
5110 if (gs == GS_ERROR)
5111 remove = true;
5112 break;
5113
5114 case OMP_CLAUSE_NOWAIT:
5115 case OMP_CLAUSE_ORDERED:
5116 break;
5117
5118 case OMP_CLAUSE_DEFAULT:
5119 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
5120 break;
5121
5122 default:
5123 gcc_unreachable ();
5124 }
5125
5126 if (remove)
5127 *list_p = OMP_CLAUSE_CHAIN (c);
5128 else
5129 list_p = &OMP_CLAUSE_CHAIN (c);
5130 }
5131
5132 gimplify_omp_ctxp = ctx;
5133 }
5134
5135 /* For all variables that were not actually used within the context,
5136 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
5137
5138 static int
5139 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
5140 {
5141 tree *list_p = (tree *) data;
5142 tree decl = (tree) n->key;
5143 unsigned flags = n->value;
5144 enum omp_clause_code code;
5145 tree clause;
5146 bool private_debug;
5147
5148 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
5149 return 0;
5150 if ((flags & GOVD_SEEN) == 0)
5151 return 0;
5152 if (flags & GOVD_DEBUG_PRIVATE)
5153 {
5154 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
5155 private_debug = true;
5156 }
5157 else
5158 private_debug
5159 = lang_hooks.decls.omp_private_debug_clause (decl,
5160 !!(flags & GOVD_SHARED));
5161 if (private_debug)
5162 code = OMP_CLAUSE_PRIVATE;
5163 else if (flags & GOVD_SHARED)
5164 {
5165 if (is_global_var (decl))
5166 {
5167 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
5168 while (ctx != NULL)
5169 {
5170 splay_tree_node on
5171 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5172 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
5173 | GOVD_PRIVATE | GOVD_REDUCTION)) != 0)
5174 break;
5175 ctx = ctx->outer_context;
5176 }
5177 if (ctx == NULL)
5178 return 0;
5179 }
5180 code = OMP_CLAUSE_SHARED;
5181 }
5182 else if (flags & GOVD_PRIVATE)
5183 code = OMP_CLAUSE_PRIVATE;
5184 else if (flags & GOVD_FIRSTPRIVATE)
5185 code = OMP_CLAUSE_FIRSTPRIVATE;
5186 else
5187 gcc_unreachable ();
5188
5189 clause = build_omp_clause (code);
5190 OMP_CLAUSE_DECL (clause) = decl;
5191 OMP_CLAUSE_CHAIN (clause) = *list_p;
5192 if (private_debug)
5193 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
5194 *list_p = clause;
5195
5196 return 0;
5197 }
5198
5199 static void
5200 gimplify_adjust_omp_clauses (tree *list_p)
5201 {
5202 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
5203 tree c, decl;
5204
5205 while ((c = *list_p) != NULL)
5206 {
5207 splay_tree_node n;
5208 bool remove = false;
5209
5210 switch (OMP_CLAUSE_CODE (c))
5211 {
5212 case OMP_CLAUSE_PRIVATE:
5213 case OMP_CLAUSE_SHARED:
5214 case OMP_CLAUSE_FIRSTPRIVATE:
5215 decl = OMP_CLAUSE_DECL (c);
5216 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5217 remove = !(n->value & GOVD_SEEN);
5218 if (! remove)
5219 {
5220 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
5221 if ((n->value & GOVD_DEBUG_PRIVATE)
5222 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
5223 {
5224 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
5225 || ((n->value & GOVD_DATA_SHARE_CLASS)
5226 == GOVD_PRIVATE));
5227 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
5228 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
5229 }
5230 }
5231 break;
5232
5233 case OMP_CLAUSE_LASTPRIVATE:
5234 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
5235 accurately reflect the presence of a FIRSTPRIVATE clause. */
5236 decl = OMP_CLAUSE_DECL (c);
5237 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5238 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
5239 = (n->value & GOVD_FIRSTPRIVATE) != 0;
5240 break;
5241
5242 case OMP_CLAUSE_REDUCTION:
5243 case OMP_CLAUSE_COPYIN:
5244 case OMP_CLAUSE_COPYPRIVATE:
5245 case OMP_CLAUSE_IF:
5246 case OMP_CLAUSE_NUM_THREADS:
5247 case OMP_CLAUSE_SCHEDULE:
5248 case OMP_CLAUSE_NOWAIT:
5249 case OMP_CLAUSE_ORDERED:
5250 case OMP_CLAUSE_DEFAULT:
5251 break;
5252
5253 default:
5254 gcc_unreachable ();
5255 }
5256
5257 if (remove)
5258 *list_p = OMP_CLAUSE_CHAIN (c);
5259 else
5260 list_p = &OMP_CLAUSE_CHAIN (c);
5261 }
5262
5263 /* Add in any implicit data sharing. */
5264 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
5265
5266 gimplify_omp_ctxp = ctx->outer_context;
5267 delete_omp_context (ctx);
5268 }
5269
5270 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
5271 gimplification of the body, as well as scanning the body for used
5272 variables. We need to do this scan now, because variable-sized
5273 decls will be decomposed during gimplification. */
5274
5275 static enum gimplify_status
5276 gimplify_omp_parallel (tree *expr_p, tree *pre_p)
5277 {
5278 tree expr = *expr_p;
5279
5280 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p, true,
5281 OMP_PARALLEL_COMBINED (expr));
5282
5283 push_gimplify_context ();
5284
5285 gimplify_stmt (&OMP_PARALLEL_BODY (expr));
5286
5287 if (TREE_CODE (OMP_PARALLEL_BODY (expr)) == BIND_EXPR)
5288 pop_gimplify_context (OMP_PARALLEL_BODY (expr));
5289 else
5290 pop_gimplify_context (NULL_TREE);
5291
5292 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
5293
5294 return GS_ALL_DONE;
5295 }
5296
5297 /* Gimplify the gross structure of an OMP_FOR statement. */
5298
5299 static enum gimplify_status
5300 gimplify_omp_for (tree *expr_p, tree *pre_p)
5301 {
5302 tree for_stmt, decl, var, t;
5303 enum gimplify_status ret = GS_OK;
5304 tree body, init_decl = NULL_TREE;
5305
5306 for_stmt = *expr_p;
5307
5308 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, false, false);
5309
5310 t = OMP_FOR_INIT (for_stmt);
5311 gcc_assert (TREE_CODE (t) == MODIFY_EXPR
5312 || TREE_CODE (t) == GIMPLE_MODIFY_STMT);
5313 decl = GENERIC_TREE_OPERAND (t, 0);
5314 gcc_assert (DECL_P (decl));
5315 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl)));
5316
5317 /* Make sure the iteration variable is private. */
5318 if (omp_is_private (gimplify_omp_ctxp, decl))
5319 omp_notice_variable (gimplify_omp_ctxp, decl, true);
5320 else
5321 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
5322
5323 /* If DECL is not a gimple register, create a temporary variable to act as an
5324 iteration counter. This is valid, since DECL cannot be modified in the
5325 body of the loop. */
5326 if (!is_gimple_reg (decl))
5327 {
5328 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
5329 GENERIC_TREE_OPERAND (t, 0) = var;
5330
5331 init_decl = build_gimple_modify_stmt (decl, var);
5332 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
5333 }
5334 else
5335 var = decl;
5336
5337 /* If OMP_FOR is re-gimplified, ensure all variables in pre-body
5338 are noticed. */
5339 gimplify_stmt (&OMP_FOR_PRE_BODY (for_stmt));
5340
5341 ret |= gimplify_expr (&GENERIC_TREE_OPERAND (t, 1),
5342 &OMP_FOR_PRE_BODY (for_stmt),
5343 NULL, is_gimple_val, fb_rvalue);
5344
5345 tree_to_gimple_tuple (&OMP_FOR_INIT (for_stmt));
5346
5347 t = OMP_FOR_COND (for_stmt);
5348 gcc_assert (COMPARISON_CLASS_P (t));
5349 gcc_assert (GENERIC_TREE_OPERAND (t, 0) == decl);
5350 TREE_OPERAND (t, 0) = var;
5351
5352 ret |= gimplify_expr (&GENERIC_TREE_OPERAND (t, 1),
5353 &OMP_FOR_PRE_BODY (for_stmt),
5354 NULL, is_gimple_val, fb_rvalue);
5355
5356 tree_to_gimple_tuple (&OMP_FOR_INCR (for_stmt));
5357 t = OMP_FOR_INCR (for_stmt);
5358 switch (TREE_CODE (t))
5359 {
5360 case PREINCREMENT_EXPR:
5361 case POSTINCREMENT_EXPR:
5362 t = build_int_cst (TREE_TYPE (decl), 1);
5363 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
5364 t = build_gimple_modify_stmt (var, t);
5365 OMP_FOR_INCR (for_stmt) = t;
5366 break;
5367
5368 case PREDECREMENT_EXPR:
5369 case POSTDECREMENT_EXPR:
5370 t = build_int_cst (TREE_TYPE (decl), -1);
5371 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
5372 t = build_gimple_modify_stmt (var, t);
5373 OMP_FOR_INCR (for_stmt) = t;
5374 break;
5375
5376 case GIMPLE_MODIFY_STMT:
5377 gcc_assert (GIMPLE_STMT_OPERAND (t, 0) == decl);
5378 GIMPLE_STMT_OPERAND (t, 0) = var;
5379
5380 t = GIMPLE_STMT_OPERAND (t, 1);
5381 switch (TREE_CODE (t))
5382 {
5383 case PLUS_EXPR:
5384 if (TREE_OPERAND (t, 1) == decl)
5385 {
5386 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
5387 TREE_OPERAND (t, 0) = var;
5388 break;
5389 }
5390
5391 /* Fallthru. */
5392 case MINUS_EXPR:
5393 gcc_assert (TREE_OPERAND (t, 0) == decl);
5394 TREE_OPERAND (t, 0) = var;
5395 break;
5396 default:
5397 gcc_unreachable ();
5398 }
5399
5400 ret |= gimplify_expr (&TREE_OPERAND (t, 1), &OMP_FOR_PRE_BODY (for_stmt),
5401 NULL, is_gimple_val, fb_rvalue);
5402 break;
5403
5404 default:
5405 gcc_unreachable ();
5406 }
5407
5408 body = OMP_FOR_BODY (for_stmt);
5409 gimplify_to_stmt_list (&body);
5410 t = alloc_stmt_list ();
5411 if (init_decl)
5412 append_to_statement_list (init_decl, &t);
5413 append_to_statement_list (body, &t);
5414 OMP_FOR_BODY (for_stmt) = t;
5415 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
5416
5417 return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
5418 }
5419
5420 /* Gimplify the gross structure of other OpenMP worksharing constructs.
5421 In particular, OMP_SECTIONS and OMP_SINGLE. */
5422
5423 static enum gimplify_status
5424 gimplify_omp_workshare (tree *expr_p, tree *pre_p)
5425 {
5426 tree stmt = *expr_p;
5427
5428 gimplify_scan_omp_clauses (&OMP_CLAUSES (stmt), pre_p, false, false);
5429 gimplify_to_stmt_list (&OMP_BODY (stmt));
5430 gimplify_adjust_omp_clauses (&OMP_CLAUSES (stmt));
5431
5432 return GS_ALL_DONE;
5433 }
5434
5435 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
5436 stabilized the lhs of the atomic operation as *ADDR. Return true if
5437 EXPR is this stabilized form. */
5438
5439 static bool
5440 goa_lhs_expr_p (tree expr, tree addr)
5441 {
5442 /* Also include casts to other type variants. The C front end is fond
5443 of adding these for e.g. volatile variables. This is like
5444 STRIP_TYPE_NOPS but includes the main variant lookup. */
5445 while ((TREE_CODE (expr) == NOP_EXPR
5446 || TREE_CODE (expr) == CONVERT_EXPR
5447 || TREE_CODE (expr) == NON_LVALUE_EXPR)
5448 && TREE_OPERAND (expr, 0) != error_mark_node
5449 && (TYPE_MAIN_VARIANT (TREE_TYPE (expr))
5450 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))))
5451 expr = TREE_OPERAND (expr, 0);
5452
5453 if (TREE_CODE (expr) == INDIRECT_REF)
5454 {
5455 expr = TREE_OPERAND (expr, 0);
5456 while (expr != addr
5457 && (TREE_CODE (expr) == NOP_EXPR
5458 || TREE_CODE (expr) == CONVERT_EXPR
5459 || TREE_CODE (expr) == NON_LVALUE_EXPR)
5460 && TREE_CODE (expr) == TREE_CODE (addr)
5461 && TYPE_MAIN_VARIANT (TREE_TYPE (expr))
5462 == TYPE_MAIN_VARIANT (TREE_TYPE (addr)))
5463 {
5464 expr = TREE_OPERAND (expr, 0);
5465 addr = TREE_OPERAND (addr, 0);
5466 }
5467 if (expr == addr)
5468 return true;
5469 return (TREE_CODE (addr) == ADDR_EXPR
5470 && TREE_CODE (expr) == ADDR_EXPR
5471 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
5472 }
5473 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
5474 return true;
5475 return false;
5476 }
5477
5478 /* Walk *EXPR_P and replace
5479 appearances of *LHS_ADDR with LHS_VAR. If an expression does not involve
5480 the lhs, evaluate it into a temporary. Return 1 if the lhs appeared as
5481 a subexpression, 0 if it did not, or -1 if an error was encountered. */
5482
5483 static int
5484 goa_stabilize_expr (tree *expr_p, tree *pre_p, tree lhs_addr, tree lhs_var)
5485 {
5486 tree expr = *expr_p;
5487 int saw_lhs;
5488
5489 if (goa_lhs_expr_p (expr, lhs_addr))
5490 {
5491 *expr_p = lhs_var;
5492 return 1;
5493 }
5494 if (is_gimple_val (expr))
5495 return 0;
5496
5497 saw_lhs = 0;
5498 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
5499 {
5500 case tcc_binary:
5501 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
5502 lhs_addr, lhs_var);
5503 case tcc_unary:
5504 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
5505 lhs_addr, lhs_var);
5506 break;
5507 default:
5508 break;
5509 }
5510
5511 if (saw_lhs == 0)
5512 {
5513 enum gimplify_status gs;
5514 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
5515 if (gs != GS_ALL_DONE)
5516 saw_lhs = -1;
5517 }
5518
5519 return saw_lhs;
5520 }
5521
5522 /* Gimplify an OMP_ATOMIC statement. */
5523
5524 static enum gimplify_status
5525 gimplify_omp_atomic (tree *expr_p, tree *pre_p)
5526 {
5527 tree addr = TREE_OPERAND (*expr_p, 0);
5528 tree rhs = TREE_OPERAND (*expr_p, 1);
5529 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
5530 tree tmp_load, load, store;
5531
5532 tmp_load = create_tmp_var (type, NULL);
5533 if (goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
5534 return GS_ERROR;
5535
5536 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
5537 != GS_ALL_DONE)
5538 return GS_ERROR;
5539
5540 load = build2 (OMP_ATOMIC_LOAD, void_type_node, tmp_load, addr);
5541 append_to_statement_list (load, pre_p);
5542 if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
5543 != GS_ALL_DONE)
5544 return GS_ERROR;
5545 store = build1 (OMP_ATOMIC_STORE, void_type_node, rhs);
5546 *expr_p = store;
5547
5548 return GS_ALL_DONE;
5549
5550 }
5551
5552 /* Gimplifies the expression tree pointed to by EXPR_P. Return 0 if
5553 gimplification failed.
5554
5555 PRE_P points to the list where side effects that must happen before
5556 EXPR should be stored.
5557
5558 POST_P points to the list where side effects that must happen after
5559 EXPR should be stored, or NULL if there is no suitable list. In
5560 that case, we copy the result to a temporary, emit the
5561 post-effects, and then return the temporary.
5562
5563 GIMPLE_TEST_F points to a function that takes a tree T and
5564 returns nonzero if T is in the GIMPLE form requested by the
5565 caller. The GIMPLE predicates are in tree-gimple.c.
5566
5567 This test is used twice. Before gimplification, the test is
5568 invoked to determine whether *EXPR_P is already gimple enough. If
5569 that fails, *EXPR_P is gimplified according to its code and
5570 GIMPLE_TEST_F is called again. If the test still fails, then a new
5571 temporary variable is created and assigned the value of the
5572 gimplified expression.
5573
5574 FALLBACK tells the function what sort of a temporary we want. If the 1
5575 bit is set, an rvalue is OK. If the 2 bit is set, an lvalue is OK.
5576 If both are set, either is OK, but an lvalue is preferable.
5577
5578 The return value is either GS_ERROR or GS_ALL_DONE, since this function
5579 iterates until solution. */
5580
5581 enum gimplify_status
5582 gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
5583 bool (* gimple_test_f) (tree), fallback_t fallback)
5584 {
5585 tree tmp;
5586 tree internal_pre = NULL_TREE;
5587 tree internal_post = NULL_TREE;
5588 tree save_expr;
5589 int is_statement = (pre_p == NULL);
5590 location_t saved_location;
5591 enum gimplify_status ret;
5592
5593 save_expr = *expr_p;
5594 if (save_expr == NULL_TREE)
5595 return GS_ALL_DONE;
5596
5597 /* We used to check the predicate here and return immediately if it
5598 succeeds. This is wrong; the design is for gimplification to be
5599 idempotent, and for the predicates to only test for valid forms, not
5600 whether they are fully simplified. */
5601
5602 /* Set up our internal queues if needed. */
5603 if (pre_p == NULL)
5604 pre_p = &internal_pre;
5605 if (post_p == NULL)
5606 post_p = &internal_post;
5607
5608 saved_location = input_location;
5609 if (save_expr != error_mark_node
5610 && EXPR_HAS_LOCATION (*expr_p))
5611 input_location = EXPR_LOCATION (*expr_p);
5612
5613 /* Loop over the specific gimplifiers until the toplevel node
5614 remains the same. */
5615 do
5616 {
5617 /* Strip away as many useless type conversions as possible
5618 at the toplevel. */
5619 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
5620
5621 /* Remember the expr. */
5622 save_expr = *expr_p;
5623
5624 /* Die, die, die, my darling. */
5625 if (save_expr == error_mark_node
5626 || (!GIMPLE_STMT_P (save_expr)
5627 && TREE_TYPE (save_expr)
5628 && TREE_TYPE (save_expr) == error_mark_node))
5629 {
5630 ret = GS_ERROR;
5631 break;
5632 }
5633
5634 /* Do any language-specific gimplification. */
5635 ret = lang_hooks.gimplify_expr (expr_p, pre_p, post_p);
5636 if (ret == GS_OK)
5637 {
5638 if (*expr_p == NULL_TREE)
5639 break;
5640 if (*expr_p != save_expr)
5641 continue;
5642 }
5643 else if (ret != GS_UNHANDLED)
5644 break;
5645
5646 ret = GS_OK;
5647 switch (TREE_CODE (*expr_p))
5648 {
5649 /* First deal with the special cases. */
5650
5651 case POSTINCREMENT_EXPR:
5652 case POSTDECREMENT_EXPR:
5653 case PREINCREMENT_EXPR:
5654 case PREDECREMENT_EXPR:
5655 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
5656 fallback != fb_none);
5657 break;
5658
5659 case ARRAY_REF:
5660 case ARRAY_RANGE_REF:
5661 case REALPART_EXPR:
5662 case IMAGPART_EXPR:
5663 case COMPONENT_REF:
5664 case VIEW_CONVERT_EXPR:
5665 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
5666 fallback ? fallback : fb_rvalue);
5667 break;
5668
5669 case COND_EXPR:
5670 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
5671 /* C99 code may assign to an array in a structure value of a
5672 conditional expression, and this has undefined behavior
5673 only on execution, so create a temporary if an lvalue is
5674 required. */
5675 if (fallback == fb_lvalue)
5676 {
5677 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5678 mark_addressable (*expr_p);
5679 }
5680 break;
5681
5682 case CALL_EXPR:
5683 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
5684 /* C99 code may assign to an array in a structure returned
5685 from a function, and this has undefined behavior only on
5686 execution, so create a temporary if an lvalue is
5687 required. */
5688 if (fallback == fb_lvalue)
5689 {
5690 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5691 mark_addressable (*expr_p);
5692 }
5693 break;
5694
5695 case TREE_LIST:
5696 gcc_unreachable ();
5697
5698 case COMPOUND_EXPR:
5699 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
5700 break;
5701
5702 case MODIFY_EXPR:
5703 case GIMPLE_MODIFY_STMT:
5704 case INIT_EXPR:
5705 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
5706 fallback != fb_none);
5707
5708 if (*expr_p)
5709 {
5710 /* The distinction between MODIFY_EXPR and INIT_EXPR is no longer
5711 useful. */
5712 if (TREE_CODE (*expr_p) == INIT_EXPR)
5713 TREE_SET_CODE (*expr_p, MODIFY_EXPR);
5714
5715 /* Convert MODIFY_EXPR to GIMPLE_MODIFY_STMT. */
5716 if (TREE_CODE (*expr_p) == MODIFY_EXPR)
5717 tree_to_gimple_tuple (expr_p);
5718 }
5719
5720 break;
5721
5722 case TRUTH_ANDIF_EXPR:
5723 case TRUTH_ORIF_EXPR:
5724 ret = gimplify_boolean_expr (expr_p);
5725 break;
5726
5727 case TRUTH_NOT_EXPR:
5728 TREE_OPERAND (*expr_p, 0)
5729 = gimple_boolify (TREE_OPERAND (*expr_p, 0));
5730 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5731 is_gimple_val, fb_rvalue);
5732 recalculate_side_effects (*expr_p);
5733 break;
5734
5735 case ADDR_EXPR:
5736 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
5737 break;
5738
5739 case VA_ARG_EXPR:
5740 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
5741 break;
5742
5743 case CONVERT_EXPR:
5744 case NOP_EXPR:
5745 if (IS_EMPTY_STMT (*expr_p))
5746 {
5747 ret = GS_ALL_DONE;
5748 break;
5749 }
5750
5751 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
5752 || fallback == fb_none)
5753 {
5754 /* Just strip a conversion to void (or in void context) and
5755 try again. */
5756 *expr_p = TREE_OPERAND (*expr_p, 0);
5757 break;
5758 }
5759
5760 ret = gimplify_conversion (expr_p);
5761 if (ret == GS_ERROR)
5762 break;
5763 if (*expr_p != save_expr)
5764 break;
5765 /* FALLTHRU */
5766
5767 case FIX_TRUNC_EXPR:
5768 /* unary_expr: ... | '(' cast ')' val | ... */
5769 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5770 is_gimple_val, fb_rvalue);
5771 recalculate_side_effects (*expr_p);
5772 break;
5773
5774 case INDIRECT_REF:
5775 *expr_p = fold_indirect_ref (*expr_p);
5776 if (*expr_p != save_expr)
5777 break;
5778 /* else fall through. */
5779 case ALIGN_INDIRECT_REF:
5780 case MISALIGNED_INDIRECT_REF:
5781 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5782 is_gimple_reg, fb_rvalue);
5783 recalculate_side_effects (*expr_p);
5784 break;
5785
5786 /* Constants need not be gimplified. */
5787 case INTEGER_CST:
5788 case REAL_CST:
5789 case FIXED_CST:
5790 case STRING_CST:
5791 case COMPLEX_CST:
5792 case VECTOR_CST:
5793 ret = GS_ALL_DONE;
5794 break;
5795
5796 case CONST_DECL:
5797 /* If we require an lvalue, such as for ADDR_EXPR, retain the
5798 CONST_DECL node. Otherwise the decl is replaceable by its
5799 value. */
5800 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
5801 if (fallback & fb_lvalue)
5802 ret = GS_ALL_DONE;
5803 else
5804 *expr_p = DECL_INITIAL (*expr_p);
5805 break;
5806
5807 case DECL_EXPR:
5808 ret = gimplify_decl_expr (expr_p);
5809 break;
5810
5811 case EXC_PTR_EXPR:
5812 /* FIXME make this a decl. */
5813 ret = GS_ALL_DONE;
5814 break;
5815
5816 case BIND_EXPR:
5817 ret = gimplify_bind_expr (expr_p, pre_p);
5818 break;
5819
5820 case LOOP_EXPR:
5821 ret = gimplify_loop_expr (expr_p, pre_p);
5822 break;
5823
5824 case SWITCH_EXPR:
5825 ret = gimplify_switch_expr (expr_p, pre_p);
5826 break;
5827
5828 case EXIT_EXPR:
5829 ret = gimplify_exit_expr (expr_p);
5830 break;
5831
5832 case GOTO_EXPR:
5833 /* If the target is not LABEL, then it is a computed jump
5834 and the target needs to be gimplified. */
5835 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
5836 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
5837 NULL, is_gimple_val, fb_rvalue);
5838 break;
5839
5840 case LABEL_EXPR:
5841 ret = GS_ALL_DONE;
5842 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
5843 == current_function_decl);
5844 break;
5845
5846 case CASE_LABEL_EXPR:
5847 ret = gimplify_case_label_expr (expr_p);
5848 break;
5849
5850 case RETURN_EXPR:
5851 ret = gimplify_return_expr (*expr_p, pre_p);
5852 break;
5853
5854 case CONSTRUCTOR:
5855 /* Don't reduce this in place; let gimplify_init_constructor work its
5856 magic. Buf if we're just elaborating this for side effects, just
5857 gimplify any element that has side-effects. */
5858 if (fallback == fb_none)
5859 {
5860 unsigned HOST_WIDE_INT ix;
5861 constructor_elt *ce;
5862 tree temp = NULL_TREE;
5863 for (ix = 0;
5864 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (*expr_p),
5865 ix, ce);
5866 ix++)
5867 if (TREE_SIDE_EFFECTS (ce->value))
5868 append_to_statement_list (ce->value, &temp);
5869
5870 *expr_p = temp;
5871 ret = GS_OK;
5872 }
5873 /* C99 code may assign to an array in a constructed
5874 structure or union, and this has undefined behavior only
5875 on execution, so create a temporary if an lvalue is
5876 required. */
5877 else if (fallback == fb_lvalue)
5878 {
5879 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5880 mark_addressable (*expr_p);
5881 }
5882 else
5883 ret = GS_ALL_DONE;
5884 break;
5885
5886 /* The following are special cases that are not handled by the
5887 original GIMPLE grammar. */
5888
5889 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
5890 eliminated. */
5891 case SAVE_EXPR:
5892 ret = gimplify_save_expr (expr_p, pre_p, post_p);
5893 break;
5894
5895 case BIT_FIELD_REF:
5896 {
5897 enum gimplify_status r0, r1, r2;
5898
5899 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5900 is_gimple_lvalue, fb_either);
5901 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
5902 is_gimple_val, fb_rvalue);
5903 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p, post_p,
5904 is_gimple_val, fb_rvalue);
5905 recalculate_side_effects (*expr_p);
5906
5907 ret = MIN (r0, MIN (r1, r2));
5908 }
5909 break;
5910
5911 case NON_LVALUE_EXPR:
5912 /* This should have been stripped above. */
5913 gcc_unreachable ();
5914
5915 case ASM_EXPR:
5916 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
5917 break;
5918
5919 case TRY_FINALLY_EXPR:
5920 case TRY_CATCH_EXPR:
5921 gimplify_to_stmt_list (&TREE_OPERAND (*expr_p, 0));
5922 gimplify_to_stmt_list (&TREE_OPERAND (*expr_p, 1));
5923 ret = GS_ALL_DONE;
5924 break;
5925
5926 case CLEANUP_POINT_EXPR:
5927 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
5928 break;
5929
5930 case TARGET_EXPR:
5931 ret = gimplify_target_expr (expr_p, pre_p, post_p);
5932 break;
5933
5934 case CATCH_EXPR:
5935 gimplify_to_stmt_list (&CATCH_BODY (*expr_p));
5936 ret = GS_ALL_DONE;
5937 break;
5938
5939 case EH_FILTER_EXPR:
5940 gimplify_to_stmt_list (&EH_FILTER_FAILURE (*expr_p));
5941 ret = GS_ALL_DONE;
5942 break;
5943
5944 case CHANGE_DYNAMIC_TYPE_EXPR:
5945 ret = gimplify_expr (&CHANGE_DYNAMIC_TYPE_LOCATION (*expr_p),
5946 pre_p, post_p, is_gimple_reg, fb_lvalue);
5947 break;
5948
5949 case OBJ_TYPE_REF:
5950 {
5951 enum gimplify_status r0, r1;
5952 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p, post_p,
5953 is_gimple_val, fb_rvalue);
5954 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p, post_p,
5955 is_gimple_val, fb_rvalue);
5956 ret = MIN (r0, r1);
5957 }
5958 break;
5959
5960 case LABEL_DECL:
5961 /* We get here when taking the address of a label. We mark
5962 the label as "forced"; meaning it can never be removed and
5963 it is a potential target for any computed goto. */
5964 FORCED_LABEL (*expr_p) = 1;
5965 ret = GS_ALL_DONE;
5966 break;
5967
5968 case STATEMENT_LIST:
5969 ret = gimplify_statement_list (expr_p, pre_p);
5970 break;
5971
5972 case WITH_SIZE_EXPR:
5973 {
5974 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
5975 post_p == &internal_post ? NULL : post_p,
5976 gimple_test_f, fallback);
5977 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
5978 is_gimple_val, fb_rvalue);
5979 }
5980 break;
5981
5982 case VAR_DECL:
5983 case PARM_DECL:
5984 ret = gimplify_var_or_parm_decl (expr_p);
5985 break;
5986
5987 case RESULT_DECL:
5988 /* When within an OpenMP context, notice uses of variables. */
5989 if (gimplify_omp_ctxp)
5990 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
5991 ret = GS_ALL_DONE;
5992 break;
5993
5994 case SSA_NAME:
5995 /* Allow callbacks into the gimplifier during optimization. */
5996 ret = GS_ALL_DONE;
5997 break;
5998
5999 case OMP_PARALLEL:
6000 ret = gimplify_omp_parallel (expr_p, pre_p);
6001 break;
6002
6003 case OMP_FOR:
6004 ret = gimplify_omp_for (expr_p, pre_p);
6005 break;
6006
6007 case OMP_SECTIONS:
6008 case OMP_SINGLE:
6009 ret = gimplify_omp_workshare (expr_p, pre_p);
6010 break;
6011
6012 case OMP_SECTION:
6013 case OMP_MASTER:
6014 case OMP_ORDERED:
6015 case OMP_CRITICAL:
6016 gimplify_to_stmt_list (&OMP_BODY (*expr_p));
6017 break;
6018
6019 case OMP_ATOMIC:
6020 ret = gimplify_omp_atomic (expr_p, pre_p);
6021 break;
6022
6023 case OMP_RETURN:
6024 case OMP_CONTINUE:
6025 case OMP_ATOMIC_LOAD:
6026 case OMP_ATOMIC_STORE:
6027
6028 ret = GS_ALL_DONE;
6029 break;
6030
6031 case POINTER_PLUS_EXPR:
6032 /* Convert ((type *)A)+offset into &A->field_of_type_and_offset.
6033 The second is gimple immediate saving a need for extra statement.
6034 */
6035 if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
6036 && (tmp = maybe_fold_offset_to_reference
6037 (TREE_OPERAND (*expr_p, 0), TREE_OPERAND (*expr_p, 1),
6038 TREE_TYPE (TREE_TYPE (*expr_p)))))
6039 {
6040 tree ptr_type = build_pointer_type (TREE_TYPE (tmp));
6041 if (useless_type_conversion_p (TREE_TYPE (*expr_p), ptr_type))
6042 {
6043 *expr_p = build_fold_addr_expr_with_type (tmp, ptr_type);
6044 break;
6045 }
6046 }
6047 /* Convert (void *)&a + 4 into (void *)&a[1]. */
6048 if (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == NOP_EXPR
6049 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
6050 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p,
6051 0),0)))
6052 && (tmp = maybe_fold_offset_to_reference
6053 (TREE_OPERAND (TREE_OPERAND (*expr_p, 0), 0),
6054 TREE_OPERAND (*expr_p, 1),
6055 TREE_TYPE (TREE_TYPE
6056 (TREE_OPERAND (TREE_OPERAND (*expr_p, 0),
6057 0))))))
6058 {
6059 tmp = build_fold_addr_expr (tmp);
6060 *expr_p = fold_convert (TREE_TYPE (*expr_p), tmp);
6061 break;
6062 }
6063 /* FALLTHRU */
6064 default:
6065 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
6066 {
6067 case tcc_comparison:
6068 /* Handle comparison of objects of non scalar mode aggregates
6069 with a call to memcmp. It would be nice to only have to do
6070 this for variable-sized objects, but then we'd have to allow
6071 the same nest of reference nodes we allow for MODIFY_EXPR and
6072 that's too complex.
6073
6074 Compare scalar mode aggregates as scalar mode values. Using
6075 memcmp for them would be very inefficient at best, and is
6076 plain wrong if bitfields are involved. */
6077
6078 {
6079 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
6080
6081 if (!AGGREGATE_TYPE_P (type))
6082 goto expr_2;
6083 else if (TYPE_MODE (type) != BLKmode)
6084 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
6085 else
6086 ret = gimplify_variable_sized_compare (expr_p);
6087
6088 break;
6089 }
6090
6091 /* If *EXPR_P does not need to be special-cased, handle it
6092 according to its class. */
6093 case tcc_unary:
6094 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6095 post_p, is_gimple_val, fb_rvalue);
6096 break;
6097
6098 case tcc_binary:
6099 expr_2:
6100 {
6101 enum gimplify_status r0, r1;
6102
6103 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
6104 post_p, is_gimple_val, fb_rvalue);
6105 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
6106 post_p, is_gimple_val, fb_rvalue);
6107
6108 ret = MIN (r0, r1);
6109 break;
6110 }
6111
6112 case tcc_declaration:
6113 case tcc_constant:
6114 ret = GS_ALL_DONE;
6115 goto dont_recalculate;
6116
6117 default:
6118 gcc_assert (TREE_CODE (*expr_p) == TRUTH_AND_EXPR
6119 || TREE_CODE (*expr_p) == TRUTH_OR_EXPR
6120 || TREE_CODE (*expr_p) == TRUTH_XOR_EXPR);
6121 goto expr_2;
6122 }
6123
6124 recalculate_side_effects (*expr_p);
6125 dont_recalculate:
6126 break;
6127 }
6128
6129 /* If we replaced *expr_p, gimplify again. */
6130 if (ret == GS_OK && (*expr_p == NULL || *expr_p == save_expr))
6131 ret = GS_ALL_DONE;
6132 }
6133 while (ret == GS_OK);
6134
6135 /* If we encountered an error_mark somewhere nested inside, either
6136 stub out the statement or propagate the error back out. */
6137 if (ret == GS_ERROR)
6138 {
6139 if (is_statement)
6140 *expr_p = NULL;
6141 goto out;
6142 }
6143
6144 /* This was only valid as a return value from the langhook, which
6145 we handled. Make sure it doesn't escape from any other context. */
6146 gcc_assert (ret != GS_UNHANDLED);
6147
6148 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
6149 {
6150 /* We aren't looking for a value, and we don't have a valid
6151 statement. If it doesn't have side-effects, throw it away. */
6152 if (!TREE_SIDE_EFFECTS (*expr_p))
6153 *expr_p = NULL;
6154 else if (!TREE_THIS_VOLATILE (*expr_p))
6155 {
6156 /* This is probably a _REF that contains something nested that
6157 has side effects. Recurse through the operands to find it. */
6158 enum tree_code code = TREE_CODE (*expr_p);
6159
6160 switch (code)
6161 {
6162 case COMPONENT_REF:
6163 case REALPART_EXPR:
6164 case IMAGPART_EXPR:
6165 case VIEW_CONVERT_EXPR:
6166 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6167 gimple_test_f, fallback);
6168 break;
6169
6170 case ARRAY_REF:
6171 case ARRAY_RANGE_REF:
6172 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6173 gimple_test_f, fallback);
6174 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
6175 gimple_test_f, fallback);
6176 break;
6177
6178 default:
6179 /* Anything else with side-effects must be converted to
6180 a valid statement before we get here. */
6181 gcc_unreachable ();
6182 }
6183
6184 *expr_p = NULL;
6185 }
6186 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
6187 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
6188 {
6189 /* Historically, the compiler has treated a bare reference
6190 to a non-BLKmode volatile lvalue as forcing a load. */
6191 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
6192 /* Normally, we do not want to create a temporary for a
6193 TREE_ADDRESSABLE type because such a type should not be
6194 copied by bitwise-assignment. However, we make an
6195 exception here, as all we are doing here is ensuring that
6196 we read the bytes that make up the type. We use
6197 create_tmp_var_raw because create_tmp_var will abort when
6198 given a TREE_ADDRESSABLE type. */
6199 tree tmp = create_tmp_var_raw (type, "vol");
6200 gimple_add_tmp_var (tmp);
6201 *expr_p = build_gimple_modify_stmt (tmp, *expr_p);
6202 }
6203 else
6204 /* We can't do anything useful with a volatile reference to
6205 an incomplete type, so just throw it away. Likewise for
6206 a BLKmode type, since any implicit inner load should
6207 already have been turned into an explicit one by the
6208 gimplification process. */
6209 *expr_p = NULL;
6210 }
6211
6212 /* If we are gimplifying at the statement level, we're done. Tack
6213 everything together and replace the original statement with the
6214 gimplified form. */
6215 if (fallback == fb_none || is_statement)
6216 {
6217 if (internal_pre || internal_post)
6218 {
6219 append_to_statement_list (*expr_p, &internal_pre);
6220 append_to_statement_list (internal_post, &internal_pre);
6221 annotate_all_with_locus (&internal_pre, input_location);
6222 *expr_p = internal_pre;
6223 }
6224 else if (!*expr_p)
6225 ;
6226 else if (TREE_CODE (*expr_p) == STATEMENT_LIST)
6227 annotate_all_with_locus (expr_p, input_location);
6228 else
6229 annotate_one_with_locus (*expr_p, input_location);
6230 goto out;
6231 }
6232
6233 /* Otherwise we're gimplifying a subexpression, so the resulting value is
6234 interesting. */
6235
6236 /* If it's sufficiently simple already, we're done. Unless we are
6237 handling some post-effects internally; if that's the case, we need to
6238 copy into a temp before adding the post-effects to the tree. */
6239 if (!internal_post && (*gimple_test_f) (*expr_p))
6240 goto out;
6241
6242 /* Otherwise, we need to create a new temporary for the gimplified
6243 expression. */
6244
6245 /* We can't return an lvalue if we have an internal postqueue. The
6246 object the lvalue refers to would (probably) be modified by the
6247 postqueue; we need to copy the value out first, which means an
6248 rvalue. */
6249 if ((fallback & fb_lvalue) && !internal_post
6250 && is_gimple_addressable (*expr_p))
6251 {
6252 /* An lvalue will do. Take the address of the expression, store it
6253 in a temporary, and replace the expression with an INDIRECT_REF of
6254 that temporary. */
6255 tmp = build_fold_addr_expr (*expr_p);
6256 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
6257 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (tmp)), tmp);
6258 }
6259 else if ((fallback & fb_rvalue) && is_gimple_formal_tmp_rhs (*expr_p))
6260 {
6261 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
6262
6263 /* An rvalue will do. Assign the gimplified expression into a new
6264 temporary TMP and replace the original expression with TMP. */
6265
6266 if (internal_post || (fallback & fb_lvalue))
6267 /* The postqueue might change the value of the expression between
6268 the initialization and use of the temporary, so we can't use a
6269 formal temp. FIXME do we care? */
6270 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6271 else
6272 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
6273
6274 if (TREE_CODE (*expr_p) != SSA_NAME)
6275 DECL_GIMPLE_FORMAL_TEMP_P (*expr_p) = 1;
6276 }
6277 else
6278 {
6279 #ifdef ENABLE_CHECKING
6280 if (!(fallback & fb_mayfail))
6281 {
6282 fprintf (stderr, "gimplification failed:\n");
6283 print_generic_expr (stderr, *expr_p, 0);
6284 debug_tree (*expr_p);
6285 internal_error ("gimplification failed");
6286 }
6287 #endif
6288 gcc_assert (fallback & fb_mayfail);
6289 /* If this is an asm statement, and the user asked for the
6290 impossible, don't die. Fail and let gimplify_asm_expr
6291 issue an error. */
6292 ret = GS_ERROR;
6293 goto out;
6294 }
6295
6296 /* Make sure the temporary matches our predicate. */
6297 gcc_assert ((*gimple_test_f) (*expr_p));
6298
6299 if (internal_post)
6300 {
6301 annotate_all_with_locus (&internal_post, input_location);
6302 append_to_statement_list (internal_post, pre_p);
6303 }
6304
6305 out:
6306 input_location = saved_location;
6307 return ret;
6308 }
6309
6310 /* Look through TYPE for variable-sized objects and gimplify each such
6311 size that we find. Add to LIST_P any statements generated. */
6312
6313 void
6314 gimplify_type_sizes (tree type, tree *list_p)
6315 {
6316 tree field, t;
6317
6318 if (type == NULL || type == error_mark_node)
6319 return;
6320
6321 /* We first do the main variant, then copy into any other variants. */
6322 type = TYPE_MAIN_VARIANT (type);
6323
6324 /* Avoid infinite recursion. */
6325 if (TYPE_SIZES_GIMPLIFIED (type))
6326 return;
6327
6328 TYPE_SIZES_GIMPLIFIED (type) = 1;
6329
6330 switch (TREE_CODE (type))
6331 {
6332 case INTEGER_TYPE:
6333 case ENUMERAL_TYPE:
6334 case BOOLEAN_TYPE:
6335 case REAL_TYPE:
6336 case FIXED_POINT_TYPE:
6337 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
6338 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
6339
6340 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6341 {
6342 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
6343 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
6344 }
6345 break;
6346
6347 case ARRAY_TYPE:
6348 /* These types may not have declarations, so handle them here. */
6349 gimplify_type_sizes (TREE_TYPE (type), list_p);
6350 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
6351 break;
6352
6353 case RECORD_TYPE:
6354 case UNION_TYPE:
6355 case QUAL_UNION_TYPE:
6356 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
6357 if (TREE_CODE (field) == FIELD_DECL)
6358 {
6359 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
6360 gimplify_type_sizes (TREE_TYPE (field), list_p);
6361 }
6362 break;
6363
6364 case POINTER_TYPE:
6365 case REFERENCE_TYPE:
6366 /* We used to recurse on the pointed-to type here, which turned out to
6367 be incorrect because its definition might refer to variables not
6368 yet initialized at this point if a forward declaration is involved.
6369
6370 It was actually useful for anonymous pointed-to types to ensure
6371 that the sizes evaluation dominates every possible later use of the
6372 values. Restricting to such types here would be safe since there
6373 is no possible forward declaration around, but would introduce an
6374 undesirable middle-end semantic to anonymity. We then defer to
6375 front-ends the responsibility of ensuring that the sizes are
6376 evaluated both early and late enough, e.g. by attaching artificial
6377 type declarations to the tree. */
6378 break;
6379
6380 default:
6381 break;
6382 }
6383
6384 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
6385 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
6386
6387 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6388 {
6389 TYPE_SIZE (t) = TYPE_SIZE (type);
6390 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
6391 TYPE_SIZES_GIMPLIFIED (t) = 1;
6392 }
6393 }
6394
6395 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
6396 a size or position, has had all of its SAVE_EXPRs evaluated.
6397 We add any required statements to STMT_P. */
6398
6399 void
6400 gimplify_one_sizepos (tree *expr_p, tree *stmt_p)
6401 {
6402 tree type, expr = *expr_p;
6403
6404 /* We don't do anything if the value isn't there, is constant, or contains
6405 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
6406 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
6407 will want to replace it with a new variable, but that will cause problems
6408 if this type is from outside the function. It's OK to have that here. */
6409 if (expr == NULL_TREE || TREE_CONSTANT (expr)
6410 || TREE_CODE (expr) == VAR_DECL
6411 || CONTAINS_PLACEHOLDER_P (expr))
6412 return;
6413
6414 type = TREE_TYPE (expr);
6415 *expr_p = unshare_expr (expr);
6416
6417 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
6418 expr = *expr_p;
6419
6420 /* Verify that we've an exact type match with the original expression.
6421 In particular, we do not wish to drop a "sizetype" in favour of a
6422 type of similar dimensions. We don't want to pollute the generic
6423 type-stripping code with this knowledge because it doesn't matter
6424 for the bulk of GENERIC/GIMPLE. It only matters that TYPE_SIZE_UNIT
6425 and friends retain their "sizetype-ness". */
6426 if (TREE_TYPE (expr) != type
6427 && TREE_CODE (type) == INTEGER_TYPE
6428 && TYPE_IS_SIZETYPE (type))
6429 {
6430 tree tmp;
6431
6432 *expr_p = create_tmp_var (type, NULL);
6433 tmp = build1 (NOP_EXPR, type, expr);
6434 tmp = build_gimple_modify_stmt (*expr_p, tmp);
6435 if (EXPR_HAS_LOCATION (expr))
6436 SET_EXPR_LOCUS (tmp, EXPR_LOCUS (expr));
6437 else
6438 SET_EXPR_LOCATION (tmp, input_location);
6439
6440 gimplify_and_add (tmp, stmt_p);
6441 }
6442 }
6443 \f
6444
6445 /* Gimplify the body of statements pointed to by BODY_P. FNDECL is the
6446 function decl containing BODY. */
6447
6448 void
6449 gimplify_body (tree *body_p, tree fndecl, bool do_parms)
6450 {
6451 location_t saved_location = input_location;
6452 tree body, parm_stmts;
6453
6454 timevar_push (TV_TREE_GIMPLIFY);
6455
6456 gcc_assert (gimplify_ctxp == NULL);
6457 push_gimplify_context ();
6458
6459 /* Unshare most shared trees in the body and in that of any nested functions.
6460 It would seem we don't have to do this for nested functions because
6461 they are supposed to be output and then the outer function gimplified
6462 first, but the g++ front end doesn't always do it that way. */
6463 unshare_body (body_p, fndecl);
6464 unvisit_body (body_p, fndecl);
6465
6466 /* Make sure input_location isn't set to something wierd. */
6467 input_location = DECL_SOURCE_LOCATION (fndecl);
6468
6469 /* Resolve callee-copies. This has to be done before processing
6470 the body so that DECL_VALUE_EXPR gets processed correctly. */
6471 parm_stmts = do_parms ? gimplify_parameters () : NULL;
6472
6473 /* Gimplify the function's body. */
6474 gimplify_stmt (body_p);
6475 body = *body_p;
6476
6477 if (!body)
6478 body = alloc_stmt_list ();
6479 else if (TREE_CODE (body) == STATEMENT_LIST)
6480 {
6481 tree t = expr_only (*body_p);
6482 if (t)
6483 body = t;
6484 }
6485
6486 /* If there isn't an outer BIND_EXPR, add one. */
6487 if (TREE_CODE (body) != BIND_EXPR)
6488 {
6489 tree b = build3 (BIND_EXPR, void_type_node, NULL_TREE,
6490 NULL_TREE, NULL_TREE);
6491 TREE_SIDE_EFFECTS (b) = 1;
6492 append_to_statement_list_force (body, &BIND_EXPR_BODY (b));
6493 body = b;
6494 }
6495
6496 /* If we had callee-copies statements, insert them at the beginning
6497 of the function. */
6498 if (parm_stmts)
6499 {
6500 append_to_statement_list_force (BIND_EXPR_BODY (body), &parm_stmts);
6501 BIND_EXPR_BODY (body) = parm_stmts;
6502 }
6503
6504 /* Unshare again, in case gimplification was sloppy. */
6505 unshare_all_trees (body);
6506
6507 *body_p = body;
6508
6509 pop_gimplify_context (body);
6510 gcc_assert (gimplify_ctxp == NULL);
6511
6512 #ifdef ENABLE_TYPES_CHECKING
6513 if (!errorcount && !sorrycount)
6514 verify_gimple_1 (BIND_EXPR_BODY (*body_p));
6515 #endif
6516
6517 timevar_pop (TV_TREE_GIMPLIFY);
6518 input_location = saved_location;
6519 }
6520
6521 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
6522 node for the function we want to gimplify. */
6523
6524 void
6525 gimplify_function_tree (tree fndecl)
6526 {
6527 tree oldfn, parm, ret;
6528
6529 oldfn = current_function_decl;
6530 current_function_decl = fndecl;
6531 if (DECL_STRUCT_FUNCTION (fndecl))
6532 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
6533 else
6534 push_struct_function (fndecl);
6535
6536 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = TREE_CHAIN (parm))
6537 {
6538 /* Preliminarily mark non-addressed complex variables as eligible
6539 for promotion to gimple registers. We'll transform their uses
6540 as we find them. */
6541 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
6542 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
6543 && !TREE_THIS_VOLATILE (parm)
6544 && !needs_to_live_in_memory (parm))
6545 DECL_GIMPLE_REG_P (parm) = 1;
6546 }
6547
6548 ret = DECL_RESULT (fndecl);
6549 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
6550 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
6551 && !needs_to_live_in_memory (ret))
6552 DECL_GIMPLE_REG_P (ret) = 1;
6553
6554 gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true);
6555
6556 /* If we're instrumenting function entry/exit, then prepend the call to
6557 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
6558 catch the exit hook. */
6559 /* ??? Add some way to ignore exceptions for this TFE. */
6560 if (flag_instrument_function_entry_exit
6561 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
6562 && !flag_instrument_functions_exclude_p (fndecl))
6563 {
6564 tree tf, x, bind;
6565
6566 tf = build2 (TRY_FINALLY_EXPR, void_type_node, NULL, NULL);
6567 TREE_SIDE_EFFECTS (tf) = 1;
6568 x = DECL_SAVED_TREE (fndecl);
6569 append_to_statement_list (x, &TREE_OPERAND (tf, 0));
6570 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT];
6571 x = build_call_expr (x, 0);
6572 append_to_statement_list (x, &TREE_OPERAND (tf, 1));
6573
6574 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
6575 TREE_SIDE_EFFECTS (bind) = 1;
6576 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER];
6577 x = build_call_expr (x, 0);
6578 append_to_statement_list (x, &BIND_EXPR_BODY (bind));
6579 append_to_statement_list (tf, &BIND_EXPR_BODY (bind));
6580
6581 DECL_SAVED_TREE (fndecl) = bind;
6582 }
6583
6584 cfun->gimplified = true;
6585 current_function_decl = oldfn;
6586 pop_cfun ();
6587 }
6588 \f
6589 /* Expands EXPR to list of gimple statements STMTS. If SIMPLE is true,
6590 force the result to be either ssa_name or an invariant, otherwise
6591 just force it to be a rhs expression. If VAR is not NULL, make the
6592 base variable of the final destination be VAR if suitable. */
6593
6594 tree
6595 force_gimple_operand (tree expr, tree *stmts, bool simple, tree var)
6596 {
6597 tree t;
6598 enum gimplify_status ret;
6599 gimple_predicate gimple_test_f;
6600
6601 *stmts = NULL_TREE;
6602
6603 if (is_gimple_val (expr))
6604 return expr;
6605
6606 gimple_test_f = simple ? is_gimple_val : is_gimple_reg_rhs;
6607
6608 push_gimplify_context ();
6609 gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
6610 gimplify_ctxp->allow_rhs_cond_expr = true;
6611
6612 if (var)
6613 expr = build_gimple_modify_stmt (var, expr);
6614
6615 if (TREE_CODE (expr) != GIMPLE_MODIFY_STMT
6616 && TREE_TYPE (expr) == void_type_node)
6617 {
6618 gimplify_and_add (expr, stmts);
6619 expr = NULL_TREE;
6620 }
6621 else
6622 {
6623 ret = gimplify_expr (&expr, stmts, NULL,
6624 gimple_test_f, fb_rvalue);
6625 gcc_assert (ret != GS_ERROR);
6626 }
6627
6628 if (gimple_referenced_vars (cfun))
6629 {
6630 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
6631 add_referenced_var (t);
6632 }
6633
6634 pop_gimplify_context (NULL);
6635
6636 return expr;
6637 }
6638
6639 /* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR. If
6640 some statements are produced, emits them at BSI. If BEFORE is true.
6641 the statements are appended before BSI, otherwise they are appended after
6642 it. M specifies the way BSI moves after insertion (BSI_SAME_STMT or
6643 BSI_CONTINUE_LINKING are the usual values). */
6644
6645 tree
6646 force_gimple_operand_bsi (block_stmt_iterator *bsi, tree expr,
6647 bool simple_p, tree var, bool before,
6648 enum bsi_iterator_update m)
6649 {
6650 tree stmts;
6651
6652 expr = force_gimple_operand (expr, &stmts, simple_p, var);
6653 if (stmts)
6654 {
6655 if (gimple_in_ssa_p (cfun))
6656 {
6657 tree_stmt_iterator tsi;
6658
6659 for (tsi = tsi_start (stmts); !tsi_end_p (tsi); tsi_next (&tsi))
6660 mark_symbols_for_renaming (tsi_stmt (tsi));
6661 }
6662
6663 if (before)
6664 bsi_insert_before (bsi, stmts, m);
6665 else
6666 bsi_insert_after (bsi, stmts, m);
6667 }
6668
6669 return expr;
6670 }
6671
6672 #include "gt-gimplify.h"
This page took 0.33757 seconds and 5 git commands to generate.