1/* A pass for lowering trees to RTL.
2   Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3   Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3.  If not see
19<http://www.gnu.org/licenses/>.  */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "rtl.h"
27#include "tm_p.h"
28#include "basic-block.h"
29#include "function.h"
30#include "expr.h"
31#include "langhooks.h"
32#include "tree-flow.h"
33#include "timevar.h"
34#include "tree-dump.h"
35#include "tree-pass.h"
36#include "except.h"
37#include "flags.h"
38#include "diagnostic.h"
39#include "tree-pretty-print.h"
40#include "gimple-pretty-print.h"
41#include "toplev.h"
42#include "debug.h"
43#include "params.h"
44#include "tree-inline.h"
45#include "value-prof.h"
46#include "target.h"
47#include "ssaexpand.h"
48#include "bitmap.h"
49#include "sbitmap.h"
50#include "insn-attr.h" /* For INSN_SCHEDULING.  */
51
52/* This variable holds information helping the rewriting of SSA trees
53   into RTL.  */
54struct ssaexpand SA;
55
56/* This variable holds the currently expanded gimple statement for purposes
57   of comminucating the profile info to the builtin expanders.  */
58gimple currently_expanding_gimple_stmt;
59
60static rtx expand_debug_expr (tree);
61
62/* Return an expression tree corresponding to the RHS of GIMPLE
63   statement STMT.  */
64
65tree
66gimple_assign_rhs_to_tree (gimple stmt)
67{
68  tree t;
69  enum gimple_rhs_class grhs_class;
70
71  grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
72
73  if (grhs_class == GIMPLE_TERNARY_RHS)
74    t = build3 (gimple_assign_rhs_code (stmt),
75		TREE_TYPE (gimple_assign_lhs (stmt)),
76		gimple_assign_rhs1 (stmt),
77		gimple_assign_rhs2 (stmt),
78		gimple_assign_rhs3 (stmt));
79  else if (grhs_class == GIMPLE_BINARY_RHS)
80    t = build2 (gimple_assign_rhs_code (stmt),
81		TREE_TYPE (gimple_assign_lhs (stmt)),
82		gimple_assign_rhs1 (stmt),
83		gimple_assign_rhs2 (stmt));
84  else if (grhs_class == GIMPLE_UNARY_RHS)
85    t = build1 (gimple_assign_rhs_code (stmt),
86		TREE_TYPE (gimple_assign_lhs (stmt)),
87		gimple_assign_rhs1 (stmt));
88  else if (grhs_class == GIMPLE_SINGLE_RHS)
89    {
90      t = gimple_assign_rhs1 (stmt);
91      /* Avoid modifying this tree in place below.  */
92      if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
93	   && gimple_location (stmt) != EXPR_LOCATION (t))
94	  || (gimple_block (stmt)
95	      && currently_expanding_to_rtl
96	      && EXPR_P (t)
97	      && gimple_block (stmt) != TREE_BLOCK (t)))
98	t = copy_node (t);
99    }
100  else
101    gcc_unreachable ();
102
103  if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
104    SET_EXPR_LOCATION (t, gimple_location (stmt));
105  if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
106    TREE_BLOCK (t) = gimple_block (stmt);
107
108  return t;
109}
110
111
112#ifndef STACK_ALIGNMENT_NEEDED
113#define STACK_ALIGNMENT_NEEDED 1
114#endif
115
116#define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
117
118/* Associate declaration T with storage space X.  If T is no
119   SSA name this is exactly SET_DECL_RTL, otherwise make the
120   partition of T associated with X.  */
121static inline void
122set_rtl (tree t, rtx x)
123{
124  if (TREE_CODE (t) == SSA_NAME)
125    {
126      SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
127      if (x && !MEM_P (x))
128	set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
129      /* For the benefit of debug information at -O0 (where vartracking
130         doesn't run) record the place also in the base DECL if it's
131	 a normal variable (not a parameter).  */
132      if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
133	{
134	  tree var = SSA_NAME_VAR (t);
135	  /* If we don't yet have something recorded, just record it now.  */
136	  if (!DECL_RTL_SET_P (var))
137	    SET_DECL_RTL (var, x);
138	  /* If we have it set already to "multiple places" don't
139	     change this.  */
140	  else if (DECL_RTL (var) == pc_rtx)
141	    ;
142	  /* If we have something recorded and it's not the same place
143	     as we want to record now, we have multiple partitions for the
144	     same base variable, with different places.  We can't just
145	     randomly chose one, hence we have to say that we don't know.
146	     This only happens with optimization, and there var-tracking
147	     will figure out the right thing.  */
148	  else if (DECL_RTL (var) != x)
149	    SET_DECL_RTL (var, pc_rtx);
150	}
151    }
152  else
153    SET_DECL_RTL (t, x);
154}
155
156/* This structure holds data relevant to one variable that will be
157   placed in a stack slot.  */
158struct stack_var
159{
160  /* The Variable.  */
161  tree decl;
162
163  /* Initially, the size of the variable.  Later, the size of the partition,
164     if this variable becomes it's partition's representative.  */
165  HOST_WIDE_INT size;
166
167  /* The *byte* alignment required for this variable.  Or as, with the
168     size, the alignment for this partition.  */
169  unsigned int alignb;
170
171  /* The partition representative.  */
172  size_t representative;
173
174  /* The next stack variable in the partition, or EOC.  */
175  size_t next;
176
177  /* The numbers of conflicting stack variables.  */
178  bitmap conflicts;
179};
180
181#define EOC  ((size_t)-1)
182
183/* We have an array of such objects while deciding allocation.  */
184static struct stack_var *stack_vars;
185static size_t stack_vars_alloc;
186static size_t stack_vars_num;
187static struct pointer_map_t *decl_to_stack_part;
188
189/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
190   is non-decreasing.  */
191static size_t *stack_vars_sorted;
192
193/* The phase of the stack frame.  This is the known misalignment of
194   virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY.  That is,
195   (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0.  */
196static int frame_phase;
197
198/* Used during expand_used_vars to remember if we saw any decls for
199   which we'd like to enable stack smashing protection.  */
200static bool has_protected_decls;
201
202/* Used during expand_used_vars.  Remember if we say a character buffer
203   smaller than our cutoff threshold.  Used for -Wstack-protector.  */
204static bool has_short_buffer;
205
206/* Compute the byte alignment to use for DECL.  Ignore alignment
207   we can't do with expected alignment of the stack boundary.  */
208
209static unsigned int
210align_local_variable (tree decl)
211{
212  unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
213  DECL_ALIGN (decl) = align;
214  return align / BITS_PER_UNIT;
215}
216
217/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
218   Return the frame offset.  */
219
220static HOST_WIDE_INT
221alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
222{
223  HOST_WIDE_INT offset, new_frame_offset;
224
225  new_frame_offset = frame_offset;
226  if (FRAME_GROWS_DOWNWARD)
227    {
228      new_frame_offset -= size + frame_phase;
229      new_frame_offset &= -align;
230      new_frame_offset += frame_phase;
231      offset = new_frame_offset;
232    }
233  else
234    {
235      new_frame_offset -= frame_phase;
236      new_frame_offset += align - 1;
237      new_frame_offset &= -align;
238      new_frame_offset += frame_phase;
239      offset = new_frame_offset;
240      new_frame_offset += size;
241    }
242  frame_offset = new_frame_offset;
243
244  if (frame_offset_overflow (frame_offset, cfun->decl))
245    frame_offset = offset = 0;
246
247  return offset;
248}
249
250/* Accumulate DECL into STACK_VARS.  */
251
252static void
253add_stack_var (tree decl)
254{
255  struct stack_var *v;
256
257  if (stack_vars_num >= stack_vars_alloc)
258    {
259      if (stack_vars_alloc)
260	stack_vars_alloc = stack_vars_alloc * 3 / 2;
261      else
262	stack_vars_alloc = 32;
263      stack_vars
264	= XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
265    }
266  if (!decl_to_stack_part)
267    decl_to_stack_part = pointer_map_create ();
268
269  v = &stack_vars[stack_vars_num];
270  * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
271
272  v->decl = decl;
273  v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
274  /* Ensure that all variables have size, so that &a != &b for any two
275     variables that are simultaneously live.  */
276  if (v->size == 0)
277    v->size = 1;
278  v->alignb = align_local_variable (SSAVAR (decl));
279  /* An alignment of zero can mightily confuse us later.  */
280  gcc_assert (v->alignb != 0);
281
282  /* All variables are initially in their own partition.  */
283  v->representative = stack_vars_num;
284  v->next = EOC;
285
286  /* All variables initially conflict with no other.  */
287  v->conflicts = NULL;
288
289  /* Ensure that this decl doesn't get put onto the list twice.  */
290  set_rtl (decl, pc_rtx);
291
292  stack_vars_num++;
293}
294
295/* Make the decls associated with luid's X and Y conflict.  */
296
297static void
298add_stack_var_conflict (size_t x, size_t y)
299{
300  struct stack_var *a = &stack_vars[x];
301  struct stack_var *b = &stack_vars[y];
302  if (!a->conflicts)
303    a->conflicts = BITMAP_ALLOC (NULL);
304  if (!b->conflicts)
305    b->conflicts = BITMAP_ALLOC (NULL);
306  bitmap_set_bit (a->conflicts, y);
307  bitmap_set_bit (b->conflicts, x);
308}
309
310/* Check whether the decls associated with luid's X and Y conflict.  */
311
312static bool
313stack_var_conflict_p (size_t x, size_t y)
314{
315  struct stack_var *a = &stack_vars[x];
316  struct stack_var *b = &stack_vars[y];
317  if (x == y)
318    return false;
319  /* Partitions containing an SSA name result from gimple registers
320     with things like unsupported modes.  They are top-level and
321     hence conflict with everything else.  */
322  if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
323    return true;
324
325  if (!a->conflicts || !b->conflicts)
326    return false;
327  return bitmap_bit_p (a->conflicts, y);
328}
329
330/* Returns true if TYPE is or contains a union type.  */
331
332static bool
333aggregate_contains_union_type (tree type)
334{
335  tree field;
336
337  if (TREE_CODE (type) == UNION_TYPE
338      || TREE_CODE (type) == QUAL_UNION_TYPE)
339    return true;
340  if (TREE_CODE (type) == ARRAY_TYPE)
341    return aggregate_contains_union_type (TREE_TYPE (type));
342  if (TREE_CODE (type) != RECORD_TYPE)
343    return false;
344
345  for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
346    if (TREE_CODE (field) == FIELD_DECL)
347      if (aggregate_contains_union_type (TREE_TYPE (field)))
348	return true;
349
350  return false;
351}
352
353/* A subroutine of expand_used_vars.  If two variables X and Y have alias
354   sets that do not conflict, then do add a conflict for these variables
355   in the interference graph.  We also need to make sure to add conflicts
356   for union containing structures.  Else RTL alias analysis comes along
357   and due to type based aliasing rules decides that for two overlapping
358   union temporaries { short s; int i; } accesses to the same mem through
359   different types may not alias and happily reorders stores across
360   life-time boundaries of the temporaries (See PR25654).  */
361
362static void
363add_alias_set_conflicts (void)
364{
365  size_t i, j, n = stack_vars_num;
366
367  for (i = 0; i < n; ++i)
368    {
369      tree type_i = TREE_TYPE (stack_vars[i].decl);
370      bool aggr_i = AGGREGATE_TYPE_P (type_i);
371      bool contains_union;
372
373      contains_union = aggregate_contains_union_type (type_i);
374      for (j = 0; j < i; ++j)
375	{
376	  tree type_j = TREE_TYPE (stack_vars[j].decl);
377	  bool aggr_j = AGGREGATE_TYPE_P (type_j);
378	  if (aggr_i != aggr_j
379	      /* Either the objects conflict by means of type based
380		 aliasing rules, or we need to add a conflict.  */
381	      || !objects_must_conflict_p (type_i, type_j)
382	      /* In case the types do not conflict ensure that access
383		 to elements will conflict.  In case of unions we have
384		 to be careful as type based aliasing rules may say
385		 access to the same memory does not conflict.  So play
386		 safe and add a conflict in this case when
387                 -fstrict-aliasing is used.  */
388              || (contains_union && flag_strict_aliasing))
389	    add_stack_var_conflict (i, j);
390	}
391    }
392}
393
394/* Callback for walk_stmt_ops.  If OP is a decl touched by add_stack_var
395   enter its partition number into bitmap DATA.  */
396
397static bool
398visit_op (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
399{
400  bitmap active = (bitmap)data;
401  op = get_base_address (op);
402  if (op
403      && DECL_P (op)
404      && DECL_RTL_IF_SET (op) == pc_rtx)
405    {
406      size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
407      if (v)
408	bitmap_set_bit (active, *v);
409    }
410  return false;
411}
412
413/* Callback for walk_stmt_ops.  If OP is a decl touched by add_stack_var
414   record conflicts between it and all currently active other partitions
415   from bitmap DATA.  */
416
417static bool
418visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
419{
420  bitmap active = (bitmap)data;
421  op = get_base_address (op);
422  if (op
423      && DECL_P (op)
424      && DECL_RTL_IF_SET (op) == pc_rtx)
425    {
426      size_t *v =
427	(size_t *) pointer_map_contains (decl_to_stack_part, op);
428      if (v && bitmap_set_bit (active, *v))
429	{
430	  size_t num = *v;
431	  bitmap_iterator bi;
432	  unsigned i;
433	  gcc_assert (num < stack_vars_num);
434	  EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
435	    add_stack_var_conflict (num, i);
436	}
437    }
438  return false;
439}
440
441/* Helper routine for add_scope_conflicts, calculating the active partitions
442   at the end of BB, leaving the result in WORK.  We're called to generate
443   conflicts when FOR_CONFLICT is true, otherwise we're just tracking
444   liveness.  */
445
446static void
447add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
448{
449  edge e;
450  edge_iterator ei;
451  gimple_stmt_iterator gsi;
452  bool (*visit)(gimple, tree, void *);
453
454  bitmap_clear (work);
455  FOR_EACH_EDGE (e, ei, bb->preds)
456    bitmap_ior_into (work, (bitmap)e->src->aux);
457
458  visit = visit_op;
459
460  for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
461    {
462      gimple stmt = gsi_stmt (gsi);
463      walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
464    }
465  for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
466    {
467      gimple stmt = gsi_stmt (gsi);
468
469      if (gimple_clobber_p (stmt))
470	{
471	  tree lhs = gimple_assign_lhs (stmt);
472	  size_t *v;
473	  /* Nested function lowering might introduce LHSs
474	     that are COMPONENT_REFs.  */
475	  if (TREE_CODE (lhs) != VAR_DECL)
476	    continue;
477	  if (DECL_RTL_IF_SET (lhs) == pc_rtx
478	      && (v = (size_t *)
479		  pointer_map_contains (decl_to_stack_part, lhs)))
480	    bitmap_clear_bit (work, *v);
481	}
482      else if (!is_gimple_debug (stmt))
483	{
484	  if (for_conflict
485	      && visit == visit_op)
486	    {
487	      /* If this is the first real instruction in this BB we need
488	         to add conflicts for everything live at this point now.
489		 Unlike classical liveness for named objects we can't
490		 rely on seeing a def/use of the names we're interested in.
491		 There might merely be indirect loads/stores.  We'd not add any
492		 conflicts for such partitions.  */
493	      bitmap_iterator bi;
494	      unsigned i;
495	      EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
496		{
497		  unsigned j;
498		  bitmap_iterator bj;
499		  EXECUTE_IF_SET_IN_BITMAP (work, i + 1, j, bj)
500		    add_stack_var_conflict (i, j);
501		}
502	      visit = visit_conflict;
503	    }
504	  walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
505	}
506    }
507}
508
509/* Generate stack partition conflicts between all partitions that are
510   simultaneously live.  */
511
512static void
513add_scope_conflicts (void)
514{
515  basic_block bb;
516  bool changed;
517  bitmap work = BITMAP_ALLOC (NULL);
518
519  /* We approximate the live range of a stack variable by taking the first
520     mention of its name as starting point(s), and by the end-of-scope
521     death clobber added by gimplify as ending point(s) of the range.
522     This overapproximates in the case we for instance moved an address-taken
523     operation upward, without also moving a dereference to it upwards.
524     But it's conservatively correct as a variable never can hold values
525     before its name is mentioned at least once.
526
527     We then do a mostly classical bitmap liveness algorithm.  */
528
529  FOR_ALL_BB (bb)
530    bb->aux = BITMAP_ALLOC (NULL);
531
532  changed = true;
533  while (changed)
534    {
535      changed = false;
536      FOR_EACH_BB (bb)
537	{
538	  bitmap active = (bitmap)bb->aux;
539	  add_scope_conflicts_1 (bb, work, false);
540	  if (bitmap_ior_into (active, work))
541	    changed = true;
542	}
543    }
544
545  FOR_EACH_BB (bb)
546    add_scope_conflicts_1 (bb, work, true);
547
548  BITMAP_FREE (work);
549  FOR_ALL_BB (bb)
550    BITMAP_FREE (bb->aux);
551}
552
553/* A subroutine of partition_stack_vars.  A comparison function for qsort,
554   sorting an array of indices by the properties of the object.  */
555
556static int
557stack_var_cmp (const void *a, const void *b)
558{
559  size_t ia = *(const size_t *)a;
560  size_t ib = *(const size_t *)b;
561  unsigned int aligna = stack_vars[ia].alignb;
562  unsigned int alignb = stack_vars[ib].alignb;
563  HOST_WIDE_INT sizea = stack_vars[ia].size;
564  HOST_WIDE_INT sizeb = stack_vars[ib].size;
565  tree decla = stack_vars[ia].decl;
566  tree declb = stack_vars[ib].decl;
567  bool largea, largeb;
568  unsigned int uida, uidb;
569
570  /* Primary compare on "large" alignment.  Large comes first.  */
571  largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
572  largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
573  if (largea != largeb)
574    return (int)largeb - (int)largea;
575
576  /* Secondary compare on size, decreasing  */
577  if (sizea > sizeb)
578    return -1;
579  if (sizea < sizeb)
580    return 1;
581
582  /* Tertiary compare on true alignment, decreasing.  */
583  if (aligna < alignb)
584    return -1;
585  if (aligna > alignb)
586    return 1;
587
588  /* Final compare on ID for sort stability, increasing.
589     Two SSA names are compared by their version, SSA names come before
590     non-SSA names, and two normal decls are compared by their DECL_UID.  */
591  if (TREE_CODE (decla) == SSA_NAME)
592    {
593      if (TREE_CODE (declb) == SSA_NAME)
594	uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
595      else
596	return -1;
597    }
598  else if (TREE_CODE (declb) == SSA_NAME)
599    return 1;
600  else
601    uida = DECL_UID (decla), uidb = DECL_UID (declb);
602  if (uida < uidb)
603    return 1;
604  if (uida > uidb)
605    return -1;
606  return 0;
607}
608
609
610/* If the points-to solution *PI points to variables that are in a partition
611   together with other variables add all partition members to the pointed-to
612   variables bitmap.  */
613
614static void
615add_partitioned_vars_to_ptset (struct pt_solution *pt,
616			       struct pointer_map_t *decls_to_partitions,
617			       struct pointer_set_t *visited, bitmap temp)
618{
619  bitmap_iterator bi;
620  unsigned i;
621  bitmap *part;
622
623  if (pt->anything
624      || pt->vars == NULL
625      /* The pointed-to vars bitmap is shared, it is enough to
626	 visit it once.  */
627      || pointer_set_insert(visited, pt->vars))
628    return;
629
630  bitmap_clear (temp);
631
632  /* By using a temporary bitmap to store all members of the partitions
633     we have to add we make sure to visit each of the partitions only
634     once.  */
635  EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
636    if ((!temp
637	 || !bitmap_bit_p (temp, i))
638	&& (part = (bitmap *) pointer_map_contains (decls_to_partitions,
639						    (void *)(size_t) i)))
640      bitmap_ior_into (temp, *part);
641  if (!bitmap_empty_p (temp))
642    bitmap_ior_into (pt->vars, temp);
643}
644
645/* Update points-to sets based on partition info, so we can use them on RTL.
646   The bitmaps representing stack partitions will be saved until expand,
647   where partitioned decls used as bases in memory expressions will be
648   rewritten.  */
649
650static void
651update_alias_info_with_stack_vars (void)
652{
653  struct pointer_map_t *decls_to_partitions = NULL;
654  size_t i, j;
655  tree var = NULL_TREE;
656
657  for (i = 0; i < stack_vars_num; i++)
658    {
659      bitmap part = NULL;
660      tree name;
661      struct ptr_info_def *pi;
662
663      /* Not interested in partitions with single variable.  */
664      if (stack_vars[i].representative != i
665          || stack_vars[i].next == EOC)
666        continue;
667
668      if (!decls_to_partitions)
669	{
670	  decls_to_partitions = pointer_map_create ();
671	  cfun->gimple_df->decls_to_pointers = pointer_map_create ();
672	}
673
674      /* Create an SSA_NAME that points to the partition for use
675         as base during alias-oracle queries on RTL for bases that
676	 have been partitioned.  */
677      if (var == NULL_TREE)
678	var = create_tmp_var (ptr_type_node, NULL);
679      name = make_ssa_name (var, NULL);
680
681      /* Create bitmaps representing partitions.  They will be used for
682         points-to sets later, so use GGC alloc.  */
683      part = BITMAP_GGC_ALLOC ();
684      for (j = i; j != EOC; j = stack_vars[j].next)
685	{
686	  tree decl = stack_vars[j].decl;
687	  unsigned int uid = DECL_PT_UID (decl);
688	  /* We should never end up partitioning SSA names (though they
689	     may end up on the stack).  Neither should we allocate stack
690	     space to something that is unused and thus unreferenced, except
691	     for -O0 where we are preserving even unreferenced variables.  */
692	  gcc_assert (DECL_P (decl)
693		      && (!optimize
694			  || referenced_var_lookup (cfun, DECL_UID (decl))));
695	  bitmap_set_bit (part, uid);
696	  *((bitmap *) pointer_map_insert (decls_to_partitions,
697					   (void *)(size_t) uid)) = part;
698	  *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
699					 decl)) = name;
700	  if (TREE_ADDRESSABLE (decl))
701	    TREE_ADDRESSABLE (name) = 1;
702	}
703
704      /* Make the SSA name point to all partition members.  */
705      pi = get_ptr_info (name);
706      pt_solution_set (&pi->pt, part, false);
707    }
708
709  /* Make all points-to sets that contain one member of a partition
710     contain all members of the partition.  */
711  if (decls_to_partitions)
712    {
713      unsigned i;
714      struct pointer_set_t *visited = pointer_set_create ();
715      bitmap temp = BITMAP_ALLOC (NULL);
716
717      for (i = 1; i < num_ssa_names; i++)
718	{
719	  tree name = ssa_name (i);
720	  struct ptr_info_def *pi;
721
722	  if (name
723	      && POINTER_TYPE_P (TREE_TYPE (name))
724	      && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
725	    add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
726					   visited, temp);
727	}
728
729      add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
730				     decls_to_partitions, visited, temp);
731
732      pointer_set_destroy (visited);
733      pointer_map_destroy (decls_to_partitions);
734      BITMAP_FREE (temp);
735    }
736}
737
738/* A subroutine of partition_stack_vars.  The UNION portion of a UNION/FIND
739   partitioning algorithm.  Partitions A and B are known to be non-conflicting.
740   Merge them into a single partition A.  */
741
742static void
743union_stack_vars (size_t a, size_t b)
744{
745  struct stack_var *vb = &stack_vars[b];
746  bitmap_iterator bi;
747  unsigned u;
748
749  gcc_assert (stack_vars[b].next == EOC);
750   /* Add B to A's partition.  */
751  stack_vars[b].next = stack_vars[a].next;
752  stack_vars[b].representative = a;
753  stack_vars[a].next = b;
754
755  /* Update the required alignment of partition A to account for B.  */
756  if (stack_vars[a].alignb < stack_vars[b].alignb)
757    stack_vars[a].alignb = stack_vars[b].alignb;
758
759  /* Update the interference graph and merge the conflicts.  */
760  if (vb->conflicts)
761    {
762      EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
763	add_stack_var_conflict (a, stack_vars[u].representative);
764      BITMAP_FREE (vb->conflicts);
765    }
766}
767
768/* A subroutine of expand_used_vars.  Binpack the variables into
769   partitions constrained by the interference graph.  The overall
770   algorithm used is as follows:
771
772	Sort the objects by size in descending order.
773	For each object A {
774	  S = size(A)
775	  O = 0
776	  loop {
777	    Look for the largest non-conflicting object B with size <= S.
778	    UNION (A, B)
779	  }
780	}
781*/
782
783static void
784partition_stack_vars (void)
785{
786  size_t si, sj, n = stack_vars_num;
787
788  stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
789  for (si = 0; si < n; ++si)
790    stack_vars_sorted[si] = si;
791
792  if (n == 1)
793    return;
794
795  qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
796
797  for (si = 0; si < n; ++si)
798    {
799      size_t i = stack_vars_sorted[si];
800      unsigned int ialign = stack_vars[i].alignb;
801
802      /* Ignore objects that aren't partition representatives. If we
803         see a var that is not a partition representative, it must
804         have been merged earlier.  */
805      if (stack_vars[i].representative != i)
806        continue;
807
808      for (sj = si + 1; sj < n; ++sj)
809	{
810	  size_t j = stack_vars_sorted[sj];
811	  unsigned int jalign = stack_vars[j].alignb;
812
813	  /* Ignore objects that aren't partition representatives.  */
814	  if (stack_vars[j].representative != j)
815	    continue;
816
817	  /* Ignore conflicting objects.  */
818	  if (stack_var_conflict_p (i, j))
819	    continue;
820
821	  /* Do not mix objects of "small" (supported) alignment
822	     and "large" (unsupported) alignment.  */
823	  if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
824	      != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
825	    continue;
826
827	  /* UNION the objects, placing J at OFFSET.  */
828	  union_stack_vars (i, j);
829	}
830    }
831
832  update_alias_info_with_stack_vars ();
833}
834
835/* A debugging aid for expand_used_vars.  Dump the generated partitions.  */
836
837static void
838dump_stack_var_partition (void)
839{
840  size_t si, i, j, n = stack_vars_num;
841
842  for (si = 0; si < n; ++si)
843    {
844      i = stack_vars_sorted[si];
845
846      /* Skip variables that aren't partition representatives, for now.  */
847      if (stack_vars[i].representative != i)
848	continue;
849
850      fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
851	       " align %u\n", (unsigned long) i, stack_vars[i].size,
852	       stack_vars[i].alignb);
853
854      for (j = i; j != EOC; j = stack_vars[j].next)
855	{
856	  fputc ('\t', dump_file);
857	  print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
858	}
859      fputc ('\n', dump_file);
860    }
861}
862
863/* Assign rtl to DECL at BASE + OFFSET.  */
864
865static void
866expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
867			 HOST_WIDE_INT offset)
868{
869  unsigned align;
870  rtx x;
871
872  /* If this fails, we've overflowed the stack frame.  Error nicely?  */
873  gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
874
875  x = plus_constant (base, offset);
876  x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
877
878  if (TREE_CODE (decl) != SSA_NAME)
879    {
880      /* Set alignment we actually gave this decl if it isn't an SSA name.
881         If it is we generate stack slots only accidentally so it isn't as
882	 important, we'll simply use the alignment that is already set.  */
883      if (base == virtual_stack_vars_rtx)
884	offset -= frame_phase;
885      align = offset & -offset;
886      align *= BITS_PER_UNIT;
887      if (align == 0 || align > base_align)
888	align = base_align;
889
890      /* One would think that we could assert that we're not decreasing
891	 alignment here, but (at least) the i386 port does exactly this
892	 via the MINIMUM_ALIGNMENT hook.  */
893
894      DECL_ALIGN (decl) = align;
895      DECL_USER_ALIGN (decl) = 0;
896    }
897
898  set_mem_attributes (x, SSAVAR (decl), true);
899  set_rtl (decl, x);
900}
901
902/* A subroutine of expand_used_vars.  Give each partition representative
903   a unique location within the stack frame.  Update each partition member
904   with that location.  */
905
906static void
907expand_stack_vars (bool (*pred) (tree))
908{
909  size_t si, i, j, n = stack_vars_num;
910  HOST_WIDE_INT large_size = 0, large_alloc = 0;
911  rtx large_base = NULL;
912  unsigned large_align = 0;
913  tree decl;
914
915  /* Determine if there are any variables requiring "large" alignment.
916     Since these are dynamically allocated, we only process these if
917     no predicate involved.  */
918  large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
919  if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
920    {
921      /* Find the total size of these variables.  */
922      for (si = 0; si < n; ++si)
923	{
924	  unsigned alignb;
925
926	  i = stack_vars_sorted[si];
927	  alignb = stack_vars[i].alignb;
928
929	  /* Stop when we get to the first decl with "small" alignment.  */
930	  if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
931	    break;
932
933	  /* Skip variables that aren't partition representatives.  */
934	  if (stack_vars[i].representative != i)
935	    continue;
936
937	  /* Skip variables that have already had rtl assigned.  See also
938	     add_stack_var where we perpetrate this pc_rtx hack.  */
939	  decl = stack_vars[i].decl;
940	  if ((TREE_CODE (decl) == SSA_NAME
941	      ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
942	      : DECL_RTL (decl)) != pc_rtx)
943	    continue;
944
945	  large_size += alignb - 1;
946	  large_size &= -(HOST_WIDE_INT)alignb;
947	  large_size += stack_vars[i].size;
948	}
949
950      /* If there were any, allocate space.  */
951      if (large_size > 0)
952	large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
953						   large_align, true);
954    }
955
956  for (si = 0; si < n; ++si)
957    {
958      rtx base;
959      unsigned base_align, alignb;
960      HOST_WIDE_INT offset;
961
962      i = stack_vars_sorted[si];
963
964      /* Skip variables that aren't partition representatives, for now.  */
965      if (stack_vars[i].representative != i)
966	continue;
967
968      /* Skip variables that have already had rtl assigned.  See also
969	 add_stack_var where we perpetrate this pc_rtx hack.  */
970      decl = stack_vars[i].decl;
971      if ((TREE_CODE (decl) == SSA_NAME
972	   ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
973	   : DECL_RTL (decl)) != pc_rtx)
974	continue;
975
976      /* Check the predicate to see whether this variable should be
977	 allocated in this pass.  */
978      if (pred && !pred (decl))
979	continue;
980
981      alignb = stack_vars[i].alignb;
982      if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
983	{
984	  offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
985	  base = virtual_stack_vars_rtx;
986	  base_align = crtl->max_used_stack_slot_alignment;
987	}
988      else
989	{
990	  /* Large alignment is only processed in the last pass.  */
991	  if (pred)
992	    continue;
993	  gcc_assert (large_base != NULL);
994
995	  large_alloc += alignb - 1;
996	  large_alloc &= -(HOST_WIDE_INT)alignb;
997	  offset = large_alloc;
998	  large_alloc += stack_vars[i].size;
999
1000	  base = large_base;
1001	  base_align = large_align;
1002	}
1003
1004      /* Create rtl for each variable based on their location within the
1005	 partition.  */
1006      for (j = i; j != EOC; j = stack_vars[j].next)
1007	{
1008	  expand_one_stack_var_at (stack_vars[j].decl,
1009				   base, base_align,
1010				   offset);
1011	}
1012    }
1013
1014  gcc_assert (large_alloc == large_size);
1015}
1016
1017/* Take into account all sizes of partitions and reset DECL_RTLs.  */
1018static HOST_WIDE_INT
1019account_stack_vars (void)
1020{
1021  size_t si, j, i, n = stack_vars_num;
1022  HOST_WIDE_INT size = 0;
1023
1024  for (si = 0; si < n; ++si)
1025    {
1026      i = stack_vars_sorted[si];
1027
1028      /* Skip variables that aren't partition representatives, for now.  */
1029      if (stack_vars[i].representative != i)
1030	continue;
1031
1032      size += stack_vars[i].size;
1033      for (j = i; j != EOC; j = stack_vars[j].next)
1034	set_rtl (stack_vars[j].decl, NULL);
1035    }
1036  return size;
1037}
1038
1039/* A subroutine of expand_one_var.  Called to immediately assign rtl
1040   to a variable to be allocated in the stack frame.  */
1041
1042static void
1043expand_one_stack_var (tree var)
1044{
1045  HOST_WIDE_INT size, offset;
1046  unsigned byte_align;
1047
1048  size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
1049  byte_align = align_local_variable (SSAVAR (var));
1050
1051  /* We handle highly aligned variables in expand_stack_vars.  */
1052  gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1053
1054  offset = alloc_stack_frame_space (size, byte_align);
1055
1056  expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1057			   crtl->max_used_stack_slot_alignment, offset);
1058}
1059
1060/* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
1061   that will reside in a hard register.  */
1062
1063static void
1064expand_one_hard_reg_var (tree var)
1065{
1066  rest_of_decl_compilation (var, 0, 0);
1067}
1068
1069/* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL
1070   that will reside in a pseudo register.  */
1071
1072static void
1073expand_one_register_var (tree var)
1074{
1075  tree decl = SSAVAR (var);
1076  tree type = TREE_TYPE (decl);
1077  enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1078  rtx x = gen_reg_rtx (reg_mode);
1079
1080  set_rtl (var, x);
1081
1082  /* Note if the object is a user variable.  */
1083  if (!DECL_ARTIFICIAL (decl))
1084    mark_user_reg (x);
1085
1086  if (POINTER_TYPE_P (type))
1087    mark_reg_pointer (x, get_pointer_alignment (var));
1088}
1089
1090/* A subroutine of expand_one_var.  Called to assign rtl to a VAR_DECL that
1091   has some associated error, e.g. its type is error-mark.  We just need
1092   to pick something that won't crash the rest of the compiler.  */
1093
1094static void
1095expand_one_error_var (tree var)
1096{
1097  enum machine_mode mode = DECL_MODE (var);
1098  rtx x;
1099
1100  if (mode == BLKmode)
1101    x = gen_rtx_MEM (BLKmode, const0_rtx);
1102  else if (mode == VOIDmode)
1103    x = const0_rtx;
1104  else
1105    x = gen_reg_rtx (mode);
1106
1107  SET_DECL_RTL (var, x);
1108}
1109
1110/* A subroutine of expand_one_var.  VAR is a variable that will be
1111   allocated to the local stack frame.  Return true if we wish to
1112   add VAR to STACK_VARS so that it will be coalesced with other
1113   variables.  Return false to allocate VAR immediately.
1114
1115   This function is used to reduce the number of variables considered
1116   for coalescing, which reduces the size of the quadratic problem.  */
1117
1118static bool
1119defer_stack_allocation (tree var, bool toplevel)
1120{
1121  /* If stack protection is enabled, *all* stack variables must be deferred,
1122     so that we can re-order the strings to the top of the frame.  */
1123  if (flag_stack_protect)
1124    return true;
1125
1126  /* We handle "large" alignment via dynamic allocation.  We want to handle
1127     this extra complication in only one place, so defer them.  */
1128  if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1129    return true;
1130
1131  /* Variables in the outermost scope automatically conflict with
1132     every other variable.  The only reason to want to defer them
1133     at all is that, after sorting, we can more efficiently pack
1134     small variables in the stack frame.  Continue to defer at -O2.  */
1135  if (toplevel && optimize < 2)
1136    return false;
1137
1138  /* Without optimization, *most* variables are allocated from the
1139     stack, which makes the quadratic problem large exactly when we
1140     want compilation to proceed as quickly as possible.  On the
1141     other hand, we don't want the function's stack frame size to
1142     get completely out of hand.  So we avoid adding scalars and
1143     "small" aggregates to the list at all.  */
1144  if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1145    return false;
1146
1147  return true;
1148}
1149
1150/* A subroutine of expand_used_vars.  Expand one variable according to
1151   its flavor.  Variables to be placed on the stack are not actually
1152   expanded yet, merely recorded.
1153   When REALLY_EXPAND is false, only add stack values to be allocated.
1154   Return stack usage this variable is supposed to take.
1155*/
1156
1157static HOST_WIDE_INT
1158expand_one_var (tree var, bool toplevel, bool really_expand)
1159{
1160  unsigned int align = BITS_PER_UNIT;
1161  tree origvar = var;
1162
1163  var = SSAVAR (var);
1164
1165  if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1166    {
1167      /* Because we don't know if VAR will be in register or on stack,
1168	 we conservatively assume it will be on stack even if VAR is
1169	 eventually put into register after RA pass.  For non-automatic
1170	 variables, which won't be on stack, we collect alignment of
1171	 type and ignore user specified alignment.  */
1172      if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1173	align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1174				   TYPE_MODE (TREE_TYPE (var)),
1175				   TYPE_ALIGN (TREE_TYPE (var)));
1176      else if (DECL_HAS_VALUE_EXPR_P (var)
1177	       || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1178	/* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1179	   or variables which were assigned a stack slot already by
1180	   expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1181	   changed from the offset chosen to it.  */
1182	align = crtl->stack_alignment_estimated;
1183      else
1184	align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1185
1186      /* If the variable alignment is very large we'll dynamicaly allocate
1187	 it, which means that in-frame portion is just a pointer.  */
1188      if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1189	align = POINTER_SIZE;
1190    }
1191
1192  if (SUPPORTS_STACK_ALIGNMENT
1193      && crtl->stack_alignment_estimated < align)
1194    {
1195      /* stack_alignment_estimated shouldn't change after stack
1196         realign decision made */
1197      gcc_assert(!crtl->stack_realign_processed);
1198      crtl->stack_alignment_estimated = align;
1199    }
1200
1201  /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1202     So here we only make sure stack_alignment_needed >= align.  */
1203  if (crtl->stack_alignment_needed < align)
1204    crtl->stack_alignment_needed = align;
1205  if (crtl->max_used_stack_slot_alignment < align)
1206    crtl->max_used_stack_slot_alignment = align;
1207
1208  if (TREE_CODE (origvar) == SSA_NAME)
1209    {
1210      gcc_assert (TREE_CODE (var) != VAR_DECL
1211		  || (!DECL_EXTERNAL (var)
1212		      && !DECL_HAS_VALUE_EXPR_P (var)
1213		      && !TREE_STATIC (var)
1214		      && TREE_TYPE (var) != error_mark_node
1215		      && !DECL_HARD_REGISTER (var)
1216		      && really_expand));
1217    }
1218  if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1219    ;
1220  else if (DECL_EXTERNAL (var))
1221    ;
1222  else if (DECL_HAS_VALUE_EXPR_P (var))
1223    ;
1224  else if (TREE_STATIC (var))
1225    ;
1226  else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1227    ;
1228  else if (TREE_TYPE (var) == error_mark_node)
1229    {
1230      if (really_expand)
1231        expand_one_error_var (var);
1232    }
1233  else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1234    {
1235      if (really_expand)
1236        expand_one_hard_reg_var (var);
1237    }
1238  else if (use_register_for_decl (var))
1239    {
1240      if (really_expand)
1241        expand_one_register_var (origvar);
1242    }
1243  else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
1244    {
1245      if (really_expand)
1246	{
1247	  error ("size of variable %q+D is too large", var);
1248	  expand_one_error_var (var);
1249	}
1250    }
1251  else if (defer_stack_allocation (var, toplevel))
1252    add_stack_var (origvar);
1253  else
1254    {
1255      if (really_expand)
1256        expand_one_stack_var (origvar);
1257      return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1258    }
1259  return 0;
1260}
1261
1262/* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
1263   expanding variables.  Those variables that can be put into registers
1264   are allocated pseudos; those that can't are put on the stack.
1265
1266   TOPLEVEL is true if this is the outermost BLOCK.  */
1267
1268static void
1269expand_used_vars_for_block (tree block, bool toplevel)
1270{
1271  tree t;
1272
1273  /* Expand all variables at this level.  */
1274  for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1275    if (TREE_USED (t)
1276        && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1277	    || !DECL_NONSHAREABLE (t)))
1278      expand_one_var (t, toplevel, true);
1279
1280  /* Expand all variables at containing levels.  */
1281  for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1282    expand_used_vars_for_block (t, false);
1283}
1284
1285/* A subroutine of expand_used_vars.  Walk down through the BLOCK tree
1286   and clear TREE_USED on all local variables.  */
1287
1288static void
1289clear_tree_used (tree block)
1290{
1291  tree t;
1292
1293  for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1294    /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1295    if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1296	|| !DECL_NONSHAREABLE (t))
1297      TREE_USED (t) = 0;
1298
1299  for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1300    clear_tree_used (t);
1301}
1302
1303/* Examine TYPE and determine a bit mask of the following features.  */
1304
1305#define SPCT_HAS_LARGE_CHAR_ARRAY	1
1306#define SPCT_HAS_SMALL_CHAR_ARRAY	2
1307#define SPCT_HAS_ARRAY			4
1308#define SPCT_HAS_AGGREGATE		8
1309
1310static unsigned int
1311stack_protect_classify_type (tree type)
1312{
1313  unsigned int ret = 0;
1314  tree t;
1315
1316  switch (TREE_CODE (type))
1317    {
1318    case ARRAY_TYPE:
1319      t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1320      if (t == char_type_node
1321	  || t == signed_char_type_node
1322	  || t == unsigned_char_type_node)
1323	{
1324	  unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1325	  unsigned HOST_WIDE_INT len;
1326
1327	  if (!TYPE_SIZE_UNIT (type)
1328	      || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1329	    len = max;
1330	  else
1331	    len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1332
1333	  if (len < max)
1334	    ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1335	  else
1336	    ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1337	}
1338      else
1339	ret = SPCT_HAS_ARRAY;
1340      break;
1341
1342    case UNION_TYPE:
1343    case QUAL_UNION_TYPE:
1344    case RECORD_TYPE:
1345      ret = SPCT_HAS_AGGREGATE;
1346      for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1347	if (TREE_CODE (t) == FIELD_DECL)
1348	  ret |= stack_protect_classify_type (TREE_TYPE (t));
1349      break;
1350
1351    default:
1352      break;
1353    }
1354
1355  return ret;
1356}
1357
1358/* Return nonzero if DECL should be segregated into the "vulnerable" upper
1359   part of the local stack frame.  Remember if we ever return nonzero for
1360   any variable in this function.  The return value is the phase number in
1361   which the variable should be allocated.  */
1362
1363static int
1364stack_protect_decl_phase (tree decl)
1365{
1366  unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1367  int ret = 0;
1368
1369  if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1370    has_short_buffer = true;
1371
1372  if (flag_stack_protect == 2)
1373    {
1374      if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1375	  && !(bits & SPCT_HAS_AGGREGATE))
1376	ret = 1;
1377      else if (bits & SPCT_HAS_ARRAY)
1378	ret = 2;
1379    }
1380  else
1381    ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1382
1383  if (ret)
1384    has_protected_decls = true;
1385
1386  return ret;
1387}
1388
1389/* Two helper routines that check for phase 1 and phase 2.  These are used
1390   as callbacks for expand_stack_vars.  */
1391
1392static bool
1393stack_protect_decl_phase_1 (tree decl)
1394{
1395  return stack_protect_decl_phase (decl) == 1;
1396}
1397
1398static bool
1399stack_protect_decl_phase_2 (tree decl)
1400{
1401  return stack_protect_decl_phase (decl) == 2;
1402}
1403
1404/* Ensure that variables in different stack protection phases conflict
1405   so that they are not merged and share the same stack slot.  */
1406
1407static void
1408add_stack_protection_conflicts (void)
1409{
1410  size_t i, j, n = stack_vars_num;
1411  unsigned char *phase;
1412
1413  phase = XNEWVEC (unsigned char, n);
1414  for (i = 0; i < n; ++i)
1415    phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1416
1417  for (i = 0; i < n; ++i)
1418    {
1419      unsigned char ph_i = phase[i];
1420      for (j = 0; j < i; ++j)
1421	if (ph_i != phase[j])
1422	  add_stack_var_conflict (i, j);
1423    }
1424
1425  XDELETEVEC (phase);
1426}
1427
1428/* Create a decl for the guard at the top of the stack frame.  */
1429
1430static void
1431create_stack_guard (void)
1432{
1433  tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1434			   VAR_DECL, NULL, ptr_type_node);
1435  TREE_THIS_VOLATILE (guard) = 1;
1436  TREE_USED (guard) = 1;
1437  expand_one_stack_var (guard);
1438  crtl->stack_protect_guard = guard;
1439}
1440
1441/* Prepare for expanding variables.  */
1442static void
1443init_vars_expansion (void)
1444{
1445  tree t;
1446  unsigned ix;
1447  /* Set TREE_USED on all variables in the local_decls.  */
1448  FOR_EACH_LOCAL_DECL (cfun, ix, t)
1449    TREE_USED (t) = 1;
1450
1451  /* Clear TREE_USED on all variables associated with a block scope.  */
1452  clear_tree_used (DECL_INITIAL (current_function_decl));
1453
1454  /* Initialize local stack smashing state.  */
1455  has_protected_decls = false;
1456  has_short_buffer = false;
1457}
1458
1459/* Free up stack variable graph data.  */
1460static void
1461fini_vars_expansion (void)
1462{
1463  size_t i, n = stack_vars_num;
1464  for (i = 0; i < n; i++)
1465    BITMAP_FREE (stack_vars[i].conflicts);
1466  XDELETEVEC (stack_vars);
1467  XDELETEVEC (stack_vars_sorted);
1468  stack_vars = NULL;
1469  stack_vars_alloc = stack_vars_num = 0;
1470  pointer_map_destroy (decl_to_stack_part);
1471  decl_to_stack_part = NULL;
1472}
1473
1474/* Make a fair guess for the size of the stack frame of the function
1475   in NODE.  This doesn't have to be exact, the result is only used in
1476   the inline heuristics.  So we don't want to run the full stack var
1477   packing algorithm (which is quadratic in the number of stack vars).
1478   Instead, we calculate the total size of all stack vars.  This turns
1479   out to be a pretty fair estimate -- packing of stack vars doesn't
1480   happen very often.  */
1481
1482HOST_WIDE_INT
1483estimated_stack_frame_size (struct cgraph_node *node)
1484{
1485  HOST_WIDE_INT size = 0;
1486  size_t i;
1487  tree var;
1488  tree old_cur_fun_decl = current_function_decl;
1489  referenced_var_iterator rvi;
1490  struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1491
1492  current_function_decl = node->decl;
1493  push_cfun (fn);
1494
1495  gcc_checking_assert (gimple_referenced_vars (fn));
1496  FOR_EACH_REFERENCED_VAR (fn, var, rvi)
1497    size += expand_one_var (var, true, false);
1498
1499  if (stack_vars_num > 0)
1500    {
1501      /* Fake sorting the stack vars for account_stack_vars ().  */
1502      stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1503      for (i = 0; i < stack_vars_num; ++i)
1504	stack_vars_sorted[i] = i;
1505      size += account_stack_vars ();
1506      fini_vars_expansion ();
1507    }
1508  pop_cfun ();
1509  current_function_decl = old_cur_fun_decl;
1510  return size;
1511}
1512
1513/* Expand all variables used in the function.  */
1514
1515static void
1516expand_used_vars (void)
1517{
1518  tree var, outer_block = DECL_INITIAL (current_function_decl);
1519  VEC(tree,heap) *maybe_local_decls = NULL;
1520  unsigned i;
1521  unsigned len;
1522
1523  /* Compute the phase of the stack frame for this function.  */
1524  {
1525    int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1526    int off = STARTING_FRAME_OFFSET % align;
1527    frame_phase = off ? align - off : 0;
1528  }
1529
1530  init_vars_expansion ();
1531
1532  for (i = 0; i < SA.map->num_partitions; i++)
1533    {
1534      tree var = partition_to_var (SA.map, i);
1535
1536      gcc_assert (is_gimple_reg (var));
1537      if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1538	expand_one_var (var, true, true);
1539      else
1540	{
1541	  /* This is a PARM_DECL or RESULT_DECL.  For those partitions that
1542	     contain the default def (representing the parm or result itself)
1543	     we don't do anything here.  But those which don't contain the
1544	     default def (representing a temporary based on the parm/result)
1545	     we need to allocate space just like for normal VAR_DECLs.  */
1546	  if (!bitmap_bit_p (SA.partition_has_default_def, i))
1547	    {
1548	      expand_one_var (var, true, true);
1549	      gcc_assert (SA.partition_to_pseudo[i]);
1550	    }
1551	}
1552    }
1553
1554  /* At this point all variables on the local_decls with TREE_USED
1555     set are not associated with any block scope.  Lay them out.  */
1556
1557  len = VEC_length (tree, cfun->local_decls);
1558  FOR_EACH_LOCAL_DECL (cfun, i, var)
1559    {
1560      bool expand_now = false;
1561
1562      /* Expanded above already.  */
1563      if (is_gimple_reg (var))
1564	{
1565	  TREE_USED (var) = 0;
1566	  goto next;
1567	}
1568      /* We didn't set a block for static or extern because it's hard
1569	 to tell the difference between a global variable (re)declared
1570	 in a local scope, and one that's really declared there to
1571	 begin with.  And it doesn't really matter much, since we're
1572	 not giving them stack space.  Expand them now.  */
1573      else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1574	expand_now = true;
1575
1576      /* If the variable is not associated with any block, then it
1577	 was created by the optimizers, and could be live anywhere
1578	 in the function.  */
1579      else if (TREE_USED (var))
1580	expand_now = true;
1581
1582      /* Finally, mark all variables on the list as used.  We'll use
1583	 this in a moment when we expand those associated with scopes.  */
1584      TREE_USED (var) = 1;
1585
1586      if (expand_now)
1587	expand_one_var (var, true, true);
1588
1589    next:
1590      if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1591	{
1592	  rtx rtl = DECL_RTL_IF_SET (var);
1593
1594	  /* Keep artificial non-ignored vars in cfun->local_decls
1595	     chain until instantiate_decls.  */
1596	  if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1597	    add_local_decl (cfun, var);
1598	  else if (rtl == NULL_RTX)
1599	    /* If rtl isn't set yet, which can happen e.g. with
1600	       -fstack-protector, retry before returning from this
1601	       function.  */
1602	    VEC_safe_push (tree, heap, maybe_local_decls, var);
1603	}
1604    }
1605
1606  /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1607
1608     +-----------------+-----------------+
1609     | ...processed... | ...duplicates...|
1610     +-----------------+-----------------+
1611                       ^
1612		       +-- LEN points here.
1613
1614     We just want the duplicates, as those are the artificial
1615     non-ignored vars that we want to keep until instantiate_decls.
1616     Move them down and truncate the array.  */
1617  if (!VEC_empty (tree, cfun->local_decls))
1618    VEC_block_remove (tree, cfun->local_decls, 0, len);
1619
1620  /* At this point, all variables within the block tree with TREE_USED
1621     set are actually used by the optimized function.  Lay them out.  */
1622  expand_used_vars_for_block (outer_block, true);
1623
1624  if (stack_vars_num > 0)
1625    {
1626      add_scope_conflicts ();
1627      /* Due to the way alias sets work, no variables with non-conflicting
1628	 alias sets may be assigned the same address.  Add conflicts to
1629	 reflect this.  */
1630      add_alias_set_conflicts ();
1631
1632      /* If stack protection is enabled, we don't share space between
1633	 vulnerable data and non-vulnerable data.  */
1634      if (flag_stack_protect)
1635	add_stack_protection_conflicts ();
1636
1637      /* Now that we have collected all stack variables, and have computed a
1638	 minimal interference graph, attempt to save some stack space.  */
1639      partition_stack_vars ();
1640      if (dump_file)
1641	dump_stack_var_partition ();
1642    }
1643
1644  /* There are several conditions under which we should create a
1645     stack guard: protect-all, alloca used, protected decls present.  */
1646  if (flag_stack_protect == 2
1647      || (flag_stack_protect
1648	  && (cfun->calls_alloca || has_protected_decls)))
1649    create_stack_guard ();
1650
1651  /* Assign rtl to each variable based on these partitions.  */
1652  if (stack_vars_num > 0)
1653    {
1654      /* Reorder decls to be protected by iterating over the variables
1655	 array multiple times, and allocating out of each phase in turn.  */
1656      /* ??? We could probably integrate this into the qsort we did
1657	 earlier, such that we naturally see these variables first,
1658	 and thus naturally allocate things in the right order.  */
1659      if (has_protected_decls)
1660	{
1661	  /* Phase 1 contains only character arrays.  */
1662	  expand_stack_vars (stack_protect_decl_phase_1);
1663
1664	  /* Phase 2 contains other kinds of arrays.  */
1665	  if (flag_stack_protect == 2)
1666	    expand_stack_vars (stack_protect_decl_phase_2);
1667	}
1668
1669      expand_stack_vars (NULL);
1670
1671      fini_vars_expansion ();
1672    }
1673
1674  /* If there were any artificial non-ignored vars without rtl
1675     found earlier, see if deferred stack allocation hasn't assigned
1676     rtl to them.  */
1677  FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
1678    {
1679      rtx rtl = DECL_RTL_IF_SET (var);
1680
1681      /* Keep artificial non-ignored vars in cfun->local_decls
1682	 chain until instantiate_decls.  */
1683      if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1684	add_local_decl (cfun, var);
1685    }
1686  VEC_free (tree, heap, maybe_local_decls);
1687
1688  /* If the target requires that FRAME_OFFSET be aligned, do it.  */
1689  if (STACK_ALIGNMENT_NEEDED)
1690    {
1691      HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1692      if (!FRAME_GROWS_DOWNWARD)
1693	frame_offset += align - 1;
1694      frame_offset &= -align;
1695    }
1696}
1697
1698
1699/* If we need to produce a detailed dump, print the tree representation
1700   for STMT to the dump file.  SINCE is the last RTX after which the RTL
1701   generated for STMT should have been appended.  */
1702
1703static void
1704maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1705{
1706  if (dump_file && (dump_flags & TDF_DETAILS))
1707    {
1708      fprintf (dump_file, "\n;; ");
1709      print_gimple_stmt (dump_file, stmt, 0,
1710			 TDF_SLIM | (dump_flags & TDF_LINENO));
1711      fprintf (dump_file, "\n");
1712
1713      print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1714    }
1715}
1716
1717/* Maps the blocks that do not contain tree labels to rtx labels.  */
1718
1719static struct pointer_map_t *lab_rtx_for_bb;
1720
1721/* Returns the label_rtx expression for a label starting basic block BB.  */
1722
1723static rtx
1724label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1725{
1726  gimple_stmt_iterator gsi;
1727  tree lab;
1728  gimple lab_stmt;
1729  void **elt;
1730
1731  if (bb->flags & BB_RTL)
1732    return block_label (bb);
1733
1734  elt = pointer_map_contains (lab_rtx_for_bb, bb);
1735  if (elt)
1736    return (rtx) *elt;
1737
1738  /* Find the tree label if it is present.  */
1739
1740  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1741    {
1742      lab_stmt = gsi_stmt (gsi);
1743      if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1744	break;
1745
1746      lab = gimple_label_label (lab_stmt);
1747      if (DECL_NONLOCAL (lab))
1748	break;
1749
1750      return label_rtx (lab);
1751    }
1752
1753  elt = pointer_map_insert (lab_rtx_for_bb, bb);
1754  *elt = gen_label_rtx ();
1755  return (rtx) *elt;
1756}
1757
1758
1759/* A subroutine of expand_gimple_cond.  Given E, a fallthrough edge
1760   of a basic block where we just expanded the conditional at the end,
1761   possibly clean up the CFG and instruction sequence.  LAST is the
1762   last instruction before the just emitted jump sequence.  */
1763
1764static void
1765maybe_cleanup_end_of_block (edge e, rtx last)
1766{
1767  /* Special case: when jumpif decides that the condition is
1768     trivial it emits an unconditional jump (and the necessary
1769     barrier).  But we still have two edges, the fallthru one is
1770     wrong.  purge_dead_edges would clean this up later.  Unfortunately
1771     we have to insert insns (and split edges) before
1772     find_many_sub_basic_blocks and hence before purge_dead_edges.
1773     But splitting edges might create new blocks which depend on the
1774     fact that if there are two edges there's no barrier.  So the
1775     barrier would get lost and verify_flow_info would ICE.  Instead
1776     of auditing all edge splitters to care for the barrier (which
1777     normally isn't there in a cleaned CFG), fix it here.  */
1778  if (BARRIER_P (get_last_insn ()))
1779    {
1780      rtx insn;
1781      remove_edge (e);
1782      /* Now, we have a single successor block, if we have insns to
1783	 insert on the remaining edge we potentially will insert
1784	 it at the end of this block (if the dest block isn't feasible)
1785	 in order to avoid splitting the edge.  This insertion will take
1786	 place in front of the last jump.  But we might have emitted
1787	 multiple jumps (conditional and one unconditional) to the
1788	 same destination.  Inserting in front of the last one then
1789	 is a problem.  See PR 40021.  We fix this by deleting all
1790	 jumps except the last unconditional one.  */
1791      insn = PREV_INSN (get_last_insn ());
1792      /* Make sure we have an unconditional jump.  Otherwise we're
1793	 confused.  */
1794      gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1795      for (insn = PREV_INSN (insn); insn != last;)
1796	{
1797	  insn = PREV_INSN (insn);
1798	  if (JUMP_P (NEXT_INSN (insn)))
1799	    {
1800	      if (!any_condjump_p (NEXT_INSN (insn)))
1801		{
1802		  gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1803		  delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1804		}
1805	      delete_insn (NEXT_INSN (insn));
1806	    }
1807	}
1808    }
1809}
1810
1811/* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_COND.
1812   Returns a new basic block if we've terminated the current basic
1813   block and created a new one.  */
1814
1815static basic_block
1816expand_gimple_cond (basic_block bb, gimple stmt)
1817{
1818  basic_block new_bb, dest;
1819  edge new_edge;
1820  edge true_edge;
1821  edge false_edge;
1822  rtx last2, last;
1823  enum tree_code code;
1824  tree op0, op1;
1825
1826  code = gimple_cond_code (stmt);
1827  op0 = gimple_cond_lhs (stmt);
1828  op1 = gimple_cond_rhs (stmt);
1829  /* We're sometimes presented with such code:
1830       D.123_1 = x < y;
1831       if (D.123_1 != 0)
1832         ...
1833     This would expand to two comparisons which then later might
1834     be cleaned up by combine.  But some pattern matchers like if-conversion
1835     work better when there's only one compare, so make up for this
1836     here as special exception if TER would have made the same change.  */
1837  if (gimple_cond_single_var_p (stmt)
1838      && SA.values
1839      && TREE_CODE (op0) == SSA_NAME
1840      && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1841    {
1842      gimple second = SSA_NAME_DEF_STMT (op0);
1843      if (gimple_code (second) == GIMPLE_ASSIGN)
1844	{
1845	  enum tree_code code2 = gimple_assign_rhs_code (second);
1846	  if (TREE_CODE_CLASS (code2) == tcc_comparison)
1847	    {
1848	      code = code2;
1849	      op0 = gimple_assign_rhs1 (second);
1850	      op1 = gimple_assign_rhs2 (second);
1851	    }
1852	  /* If jumps are cheap turn some more codes into
1853	     jumpy sequences.  */
1854	  else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1855	    {
1856	      if ((code2 == BIT_AND_EXPR
1857		   && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1858		   && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1859		  || code2 == TRUTH_AND_EXPR)
1860		{
1861		  code = TRUTH_ANDIF_EXPR;
1862		  op0 = gimple_assign_rhs1 (second);
1863		  op1 = gimple_assign_rhs2 (second);
1864		}
1865	      else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1866		{
1867		  code = TRUTH_ORIF_EXPR;
1868		  op0 = gimple_assign_rhs1 (second);
1869		  op1 = gimple_assign_rhs2 (second);
1870		}
1871	    }
1872	}
1873    }
1874
1875  last2 = last = get_last_insn ();
1876
1877  extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1878  set_curr_insn_source_location (gimple_location (stmt));
1879  set_curr_insn_block (gimple_block (stmt));
1880
1881  /* These flags have no purpose in RTL land.  */
1882  true_edge->flags &= ~EDGE_TRUE_VALUE;
1883  false_edge->flags &= ~EDGE_FALSE_VALUE;
1884
1885  /* We can either have a pure conditional jump with one fallthru edge or
1886     two-way jump that needs to be decomposed into two basic blocks.  */
1887  if (false_edge->dest == bb->next_bb)
1888    {
1889      jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1890		true_edge->probability);
1891      maybe_dump_rtl_for_gimple_stmt (stmt, last);
1892      if (true_edge->goto_locus)
1893	{
1894	  set_curr_insn_source_location (true_edge->goto_locus);
1895	  set_curr_insn_block (true_edge->goto_block);
1896	  true_edge->goto_locus = curr_insn_locator ();
1897	}
1898      true_edge->goto_block = NULL;
1899      false_edge->flags |= EDGE_FALLTHRU;
1900      maybe_cleanup_end_of_block (false_edge, last);
1901      return NULL;
1902    }
1903  if (true_edge->dest == bb->next_bb)
1904    {
1905      jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1906		   false_edge->probability);
1907      maybe_dump_rtl_for_gimple_stmt (stmt, last);
1908      if (false_edge->goto_locus)
1909	{
1910	  set_curr_insn_source_location (false_edge->goto_locus);
1911	  set_curr_insn_block (false_edge->goto_block);
1912	  false_edge->goto_locus = curr_insn_locator ();
1913	}
1914      false_edge->goto_block = NULL;
1915      true_edge->flags |= EDGE_FALLTHRU;
1916      maybe_cleanup_end_of_block (true_edge, last);
1917      return NULL;
1918    }
1919
1920  jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1921	    true_edge->probability);
1922  last = get_last_insn ();
1923  if (false_edge->goto_locus)
1924    {
1925      set_curr_insn_source_location (false_edge->goto_locus);
1926      set_curr_insn_block (false_edge->goto_block);
1927      false_edge->goto_locus = curr_insn_locator ();
1928    }
1929  false_edge->goto_block = NULL;
1930  emit_jump (label_rtx_for_bb (false_edge->dest));
1931
1932  BB_END (bb) = last;
1933  if (BARRIER_P (BB_END (bb)))
1934    BB_END (bb) = PREV_INSN (BB_END (bb));
1935  update_bb_for_insn (bb);
1936
1937  new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1938  dest = false_edge->dest;
1939  redirect_edge_succ (false_edge, new_bb);
1940  false_edge->flags |= EDGE_FALLTHRU;
1941  new_bb->count = false_edge->count;
1942  new_bb->frequency = EDGE_FREQUENCY (false_edge);
1943  new_edge = make_edge (new_bb, dest, 0);
1944  new_edge->probability = REG_BR_PROB_BASE;
1945  new_edge->count = new_bb->count;
1946  if (BARRIER_P (BB_END (new_bb)))
1947    BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1948  update_bb_for_insn (new_bb);
1949
1950  maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1951
1952  if (true_edge->goto_locus)
1953    {
1954      set_curr_insn_source_location (true_edge->goto_locus);
1955      set_curr_insn_block (true_edge->goto_block);
1956      true_edge->goto_locus = curr_insn_locator ();
1957    }
1958  true_edge->goto_block = NULL;
1959
1960  return new_bb;
1961}
1962
1963/* Mark all calls that can have a transaction restart.  */
1964
1965static void
1966mark_transaction_restart_calls (gimple stmt)
1967{
1968  struct tm_restart_node dummy;
1969  void **slot;
1970
1971  if (!cfun->gimple_df->tm_restart)
1972    return;
1973
1974  dummy.stmt = stmt;
1975  slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
1976  if (slot)
1977    {
1978      struct tm_restart_node *n = (struct tm_restart_node *) *slot;
1979      tree list = n->label_or_list;
1980      rtx insn;
1981
1982      for (insn = next_real_insn (get_last_insn ());
1983	   !CALL_P (insn);
1984	   insn = next_real_insn (insn))
1985	continue;
1986
1987      if (TREE_CODE (list) == LABEL_DECL)
1988	add_reg_note (insn, REG_TM, label_rtx (list));
1989      else
1990	for (; list ; list = TREE_CHAIN (list))
1991	  add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
1992    }
1993}
1994
1995/* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1996   statement STMT.  */
1997
1998static void
1999expand_call_stmt (gimple stmt)
2000{
2001  tree exp, decl, lhs;
2002  bool builtin_p;
2003  size_t i;
2004
2005  if (gimple_call_internal_p (stmt))
2006    {
2007      expand_internal_call (stmt);
2008      return;
2009    }
2010
2011  exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2012
2013  CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2014  decl = gimple_call_fndecl (stmt);
2015  builtin_p = decl && DECL_BUILT_IN (decl);
2016
2017  /* If this is not a builtin function, the function type through which the
2018     call is made may be different from the type of the function.  */
2019  if (!builtin_p)
2020    CALL_EXPR_FN (exp)
2021      = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2022		      CALL_EXPR_FN (exp));
2023
2024  TREE_TYPE (exp) = gimple_call_return_type (stmt);
2025  CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2026
2027  for (i = 0; i < gimple_call_num_args (stmt); i++)
2028    {
2029      tree arg = gimple_call_arg (stmt, i);
2030      gimple def;
2031      /* TER addresses into arguments of builtin functions so we have a
2032	 chance to infer more correct alignment information.  See PR39954.  */
2033      if (builtin_p
2034	  && TREE_CODE (arg) == SSA_NAME
2035	  && (def = get_gimple_for_ssa_name (arg))
2036	  && gimple_assign_rhs_code (def) == ADDR_EXPR)
2037	arg = gimple_assign_rhs1 (def);
2038      CALL_EXPR_ARG (exp, i) = arg;
2039    }
2040
2041  if (gimple_has_side_effects (stmt))
2042    TREE_SIDE_EFFECTS (exp) = 1;
2043
2044  if (gimple_call_nothrow_p (stmt))
2045    TREE_NOTHROW (exp) = 1;
2046
2047  CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2048  CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2049  if (decl
2050      && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2051      && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2052	  || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2053    CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2054  else
2055    CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2056  CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2057  SET_EXPR_LOCATION (exp, gimple_location (stmt));
2058  TREE_BLOCK (exp) = gimple_block (stmt);
2059
2060  /* Ensure RTL is created for debug args.  */
2061  if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2062    {
2063      VEC(tree, gc) **debug_args = decl_debug_args_lookup (decl);
2064      unsigned int ix;
2065      tree dtemp;
2066
2067      if (debug_args)
2068	for (ix = 1; VEC_iterate (tree, *debug_args, ix, dtemp); ix += 2)
2069	  {
2070	    gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2071	    expand_debug_expr (dtemp);
2072	  }
2073    }
2074
2075  lhs = gimple_call_lhs (stmt);
2076  if (lhs)
2077    expand_assignment (lhs, exp, false);
2078  else
2079    expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
2080
2081  mark_transaction_restart_calls (stmt);
2082}
2083
2084/* A subroutine of expand_gimple_stmt, expanding one gimple statement
2085   STMT that doesn't require special handling for outgoing edges.  That
2086   is no tailcalls and no GIMPLE_COND.  */
2087
2088static void
2089expand_gimple_stmt_1 (gimple stmt)
2090{
2091  tree op0;
2092
2093  set_curr_insn_source_location (gimple_location (stmt));
2094  set_curr_insn_block (gimple_block (stmt));
2095
2096  switch (gimple_code (stmt))
2097    {
2098    case GIMPLE_GOTO:
2099      op0 = gimple_goto_dest (stmt);
2100      if (TREE_CODE (op0) == LABEL_DECL)
2101	expand_goto (op0);
2102      else
2103	expand_computed_goto (op0);
2104      break;
2105    case GIMPLE_LABEL:
2106      expand_label (gimple_label_label (stmt));
2107      break;
2108    case GIMPLE_NOP:
2109    case GIMPLE_PREDICT:
2110      break;
2111    case GIMPLE_SWITCH:
2112      expand_case (stmt);
2113      break;
2114    case GIMPLE_ASM:
2115      expand_asm_stmt (stmt);
2116      break;
2117    case GIMPLE_CALL:
2118      expand_call_stmt (stmt);
2119      break;
2120
2121    case GIMPLE_RETURN:
2122      op0 = gimple_return_retval (stmt);
2123
2124      if (op0 && op0 != error_mark_node)
2125	{
2126	  tree result = DECL_RESULT (current_function_decl);
2127
2128	  /* If we are not returning the current function's RESULT_DECL,
2129	     build an assignment to it.  */
2130	  if (op0 != result)
2131	    {
2132	      /* I believe that a function's RESULT_DECL is unique.  */
2133	      gcc_assert (TREE_CODE (op0) != RESULT_DECL);
2134
2135	      /* ??? We'd like to use simply expand_assignment here,
2136	         but this fails if the value is of BLKmode but the return
2137		 decl is a register.  expand_return has special handling
2138		 for this combination, which eventually should move
2139		 to common code.  See comments there.  Until then, let's
2140		 build a modify expression :-/  */
2141	      op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
2142			    result, op0);
2143	    }
2144	}
2145      if (!op0)
2146	expand_null_return ();
2147      else
2148	expand_return (op0);
2149      break;
2150
2151    case GIMPLE_ASSIGN:
2152      {
2153	tree lhs = gimple_assign_lhs (stmt);
2154
2155	/* Tree expand used to fiddle with |= and &= of two bitfield
2156	   COMPONENT_REFs here.  This can't happen with gimple, the LHS
2157	   of binary assigns must be a gimple reg.  */
2158
2159	if (TREE_CODE (lhs) != SSA_NAME
2160	    || get_gimple_rhs_class (gimple_expr_code (stmt))
2161	       == GIMPLE_SINGLE_RHS)
2162	  {
2163	    tree rhs = gimple_assign_rhs1 (stmt);
2164	    gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
2165			== GIMPLE_SINGLE_RHS);
2166	    if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
2167	      SET_EXPR_LOCATION (rhs, gimple_location (stmt));
2168	    if (TREE_CLOBBER_P (rhs))
2169	      /* This is a clobber to mark the going out of scope for
2170		 this LHS.  */
2171	      ;
2172	    else
2173	      expand_assignment (lhs, rhs,
2174				 gimple_assign_nontemporal_move_p (stmt));
2175	  }
2176	else
2177	  {
2178	    rtx target, temp;
2179	    bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
2180	    struct separate_ops ops;
2181	    bool promoted = false;
2182
2183	    target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2184	    if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2185	      promoted = true;
2186
2187	    ops.code = gimple_assign_rhs_code (stmt);
2188	    ops.type = TREE_TYPE (lhs);
2189	    switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2190	      {
2191		case GIMPLE_TERNARY_RHS:
2192		  ops.op2 = gimple_assign_rhs3 (stmt);
2193		  /* Fallthru */
2194		case GIMPLE_BINARY_RHS:
2195		  ops.op1 = gimple_assign_rhs2 (stmt);
2196		  /* Fallthru */
2197		case GIMPLE_UNARY_RHS:
2198		  ops.op0 = gimple_assign_rhs1 (stmt);
2199		  break;
2200		default:
2201		  gcc_unreachable ();
2202	      }
2203	    ops.location = gimple_location (stmt);
2204
2205	    /* If we want to use a nontemporal store, force the value to
2206	       register first.  If we store into a promoted register,
2207	       don't directly expand to target.  */
2208	    temp = nontemporal || promoted ? NULL_RTX : target;
2209	    temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2210				       EXPAND_NORMAL);
2211
2212	    if (temp == target)
2213	      ;
2214	    else if (promoted)
2215	      {
2216		int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2217		/* If TEMP is a VOIDmode constant, use convert_modes to make
2218		   sure that we properly convert it.  */
2219		if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2220		  {
2221		    temp = convert_modes (GET_MODE (target),
2222					  TYPE_MODE (ops.type),
2223					  temp, unsignedp);
2224		    temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2225					  GET_MODE (target), temp, unsignedp);
2226		  }
2227
2228		convert_move (SUBREG_REG (target), temp, unsignedp);
2229	      }
2230	    else if (nontemporal && emit_storent_insn (target, temp))
2231	      ;
2232	    else
2233	      {
2234		temp = force_operand (temp, target);
2235		if (temp != target)
2236		  emit_move_insn (target, temp);
2237	      }
2238	  }
2239      }
2240      break;
2241
2242    default:
2243      gcc_unreachable ();
2244    }
2245}
2246
2247/* Expand one gimple statement STMT and return the last RTL instruction
2248   before any of the newly generated ones.
2249
2250   In addition to generating the necessary RTL instructions this also
2251   sets REG_EH_REGION notes if necessary and sets the current source
2252   location for diagnostics.  */
2253
2254static rtx
2255expand_gimple_stmt (gimple stmt)
2256{
2257  location_t saved_location = input_location;
2258  rtx last = get_last_insn ();
2259  int lp_nr;
2260
2261  gcc_assert (cfun);
2262
2263  /* We need to save and restore the current source location so that errors
2264     discovered during expansion are emitted with the right location.  But
2265     it would be better if the diagnostic routines used the source location
2266     embedded in the tree nodes rather than globals.  */
2267  if (gimple_has_location (stmt))
2268    input_location = gimple_location (stmt);
2269
2270  expand_gimple_stmt_1 (stmt);
2271
2272  /* Free any temporaries used to evaluate this statement.  */
2273  free_temp_slots ();
2274
2275  input_location = saved_location;
2276
2277  /* Mark all insns that may trap.  */
2278  lp_nr = lookup_stmt_eh_lp (stmt);
2279  if (lp_nr)
2280    {
2281      rtx insn;
2282      for (insn = next_real_insn (last); insn;
2283	   insn = next_real_insn (insn))
2284	{
2285	  if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2286	      /* If we want exceptions for non-call insns, any
2287		 may_trap_p instruction may throw.  */
2288	      && GET_CODE (PATTERN (insn)) != CLOBBER
2289	      && GET_CODE (PATTERN (insn)) != USE
2290	      && insn_could_throw_p (insn))
2291	    make_reg_eh_region_note (insn, 0, lp_nr);
2292	}
2293    }
2294
2295  return last;
2296}
2297
2298/* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_CALL
2299   that has CALL_EXPR_TAILCALL set.  Returns non-null if we actually
2300   generated a tail call (something that might be denied by the ABI
2301   rules governing the call; see calls.c).
2302
2303   Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2304   can still reach the rest of BB.  The case here is __builtin_sqrt,
2305   where the NaN result goes through the external function (with a
2306   tailcall) and the normal result happens via a sqrt instruction.  */
2307
2308static basic_block
2309expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2310{
2311  rtx last2, last;
2312  edge e;
2313  edge_iterator ei;
2314  int probability;
2315  gcov_type count;
2316
2317  last2 = last = expand_gimple_stmt (stmt);
2318
2319  for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2320    if (CALL_P (last) && SIBLING_CALL_P (last))
2321      goto found;
2322
2323  maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2324
2325  *can_fallthru = true;
2326  return NULL;
2327
2328 found:
2329  /* ??? Wouldn't it be better to just reset any pending stack adjust?
2330     Any instructions emitted here are about to be deleted.  */
2331  do_pending_stack_adjust ();
2332
2333  /* Remove any non-eh, non-abnormal edges that don't go to exit.  */
2334  /* ??? I.e. the fallthrough edge.  HOWEVER!  If there were to be
2335     EH or abnormal edges, we shouldn't have created a tail call in
2336     the first place.  So it seems to me we should just be removing
2337     all edges here, or redirecting the existing fallthru edge to
2338     the exit block.  */
2339
2340  probability = 0;
2341  count = 0;
2342
2343  for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2344    {
2345      if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2346	{
2347	  if (e->dest != EXIT_BLOCK_PTR)
2348	    {
2349	      e->dest->count -= e->count;
2350	      e->dest->frequency -= EDGE_FREQUENCY (e);
2351	      if (e->dest->count < 0)
2352		e->dest->count = 0;
2353	      if (e->dest->frequency < 0)
2354		e->dest->frequency = 0;
2355	    }
2356	  count += e->count;
2357	  probability += e->probability;
2358	  remove_edge (e);
2359	}
2360      else
2361	ei_next (&ei);
2362    }
2363
2364  /* This is somewhat ugly: the call_expr expander often emits instructions
2365     after the sibcall (to perform the function return).  These confuse the
2366     find_many_sub_basic_blocks code, so we need to get rid of these.  */
2367  last = NEXT_INSN (last);
2368  gcc_assert (BARRIER_P (last));
2369
2370  *can_fallthru = false;
2371  while (NEXT_INSN (last))
2372    {
2373      /* For instance an sqrt builtin expander expands if with
2374	 sibcall in the then and label for `else`.  */
2375      if (LABEL_P (NEXT_INSN (last)))
2376	{
2377	  *can_fallthru = true;
2378	  break;
2379	}
2380      delete_insn (NEXT_INSN (last));
2381    }
2382
2383  e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2384  e->probability += probability;
2385  e->count += count;
2386  BB_END (bb) = last;
2387  update_bb_for_insn (bb);
2388
2389  if (NEXT_INSN (last))
2390    {
2391      bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2392
2393      last = BB_END (bb);
2394      if (BARRIER_P (last))
2395	BB_END (bb) = PREV_INSN (last);
2396    }
2397
2398  maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2399
2400  return bb;
2401}
2402
2403/* Return the difference between the floor and the truncated result of
2404   a signed division by OP1 with remainder MOD.  */
2405static rtx
2406floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2407{
2408  /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2409  return gen_rtx_IF_THEN_ELSE
2410    (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2411     gen_rtx_IF_THEN_ELSE
2412     (mode, gen_rtx_LT (BImode,
2413			gen_rtx_DIV (mode, op1, mod),
2414			const0_rtx),
2415      constm1_rtx, const0_rtx),
2416     const0_rtx);
2417}
2418
2419/* Return the difference between the ceil and the truncated result of
2420   a signed division by OP1 with remainder MOD.  */
2421static rtx
2422ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2423{
2424  /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2425  return gen_rtx_IF_THEN_ELSE
2426    (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2427     gen_rtx_IF_THEN_ELSE
2428     (mode, gen_rtx_GT (BImode,
2429			gen_rtx_DIV (mode, op1, mod),
2430			const0_rtx),
2431      const1_rtx, const0_rtx),
2432     const0_rtx);
2433}
2434
2435/* Return the difference between the ceil and the truncated result of
2436   an unsigned division by OP1 with remainder MOD.  */
2437static rtx
2438ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2439{
2440  /* (mod != 0 ? 1 : 0) */
2441  return gen_rtx_IF_THEN_ELSE
2442    (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2443     const1_rtx, const0_rtx);
2444}
2445
2446/* Return the difference between the rounded and the truncated result
2447   of a signed division by OP1 with remainder MOD.  Halfway cases are
2448   rounded away from zero, rather than to the nearest even number.  */
2449static rtx
2450round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2451{
2452  /* (abs (mod) >= abs (op1) - abs (mod)
2453      ? (op1 / mod > 0 ? 1 : -1)
2454      : 0) */
2455  return gen_rtx_IF_THEN_ELSE
2456    (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2457		       gen_rtx_MINUS (mode,
2458				      gen_rtx_ABS (mode, op1),
2459				      gen_rtx_ABS (mode, mod))),
2460     gen_rtx_IF_THEN_ELSE
2461     (mode, gen_rtx_GT (BImode,
2462			gen_rtx_DIV (mode, op1, mod),
2463			const0_rtx),
2464      const1_rtx, constm1_rtx),
2465     const0_rtx);
2466}
2467
2468/* Return the difference between the rounded and the truncated result
2469   of a unsigned division by OP1 with remainder MOD.  Halfway cases
2470   are rounded away from zero, rather than to the nearest even
2471   number.  */
2472static rtx
2473round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2474{
2475  /* (mod >= op1 - mod ? 1 : 0) */
2476  return gen_rtx_IF_THEN_ELSE
2477    (mode, gen_rtx_GE (BImode, mod,
2478		       gen_rtx_MINUS (mode, op1, mod)),
2479     const1_rtx, const0_rtx);
2480}
2481
2482/* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2483   any rtl.  */
2484
2485static rtx
2486convert_debug_memory_address (enum machine_mode mode, rtx x,
2487			      addr_space_t as)
2488{
2489  enum machine_mode xmode = GET_MODE (x);
2490
2491#ifndef POINTERS_EXTEND_UNSIGNED
2492  gcc_assert (mode == Pmode
2493	      || mode == targetm.addr_space.address_mode (as));
2494  gcc_assert (xmode == mode || xmode == VOIDmode);
2495#else
2496  rtx temp;
2497
2498  gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
2499
2500  if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2501    return x;
2502
2503  if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
2504    x = simplify_gen_subreg (mode, x, xmode,
2505			     subreg_lowpart_offset
2506			     (mode, xmode));
2507  else if (POINTERS_EXTEND_UNSIGNED > 0)
2508    x = gen_rtx_ZERO_EXTEND (mode, x);
2509  else if (!POINTERS_EXTEND_UNSIGNED)
2510    x = gen_rtx_SIGN_EXTEND (mode, x);
2511  else
2512    {
2513      switch (GET_CODE (x))
2514	{
2515	case SUBREG:
2516	  if ((SUBREG_PROMOTED_VAR_P (x)
2517	       || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2518	       || (GET_CODE (SUBREG_REG (x)) == PLUS
2519		   && REG_P (XEXP (SUBREG_REG (x), 0))
2520		   && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2521		   && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2522	      && GET_MODE (SUBREG_REG (x)) == mode)
2523	    return SUBREG_REG (x);
2524	  break;
2525	case LABEL_REF:
2526	  temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2527	  LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2528	  return temp;
2529	case SYMBOL_REF:
2530	  temp = shallow_copy_rtx (x);
2531	  PUT_MODE (temp, mode);
2532	  return temp;
2533	case CONST:
2534	  temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2535	  if (temp)
2536	    temp = gen_rtx_CONST (mode, temp);
2537	  return temp;
2538	case PLUS:
2539	case MINUS:
2540	  if (CONST_INT_P (XEXP (x, 1)))
2541	    {
2542	      temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2543	      if (temp)
2544		return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2545	    }
2546	  break;
2547	default:
2548	  break;
2549	}
2550      /* Don't know how to express ptr_extend as operation in debug info.  */
2551      return NULL;
2552    }
2553#endif /* POINTERS_EXTEND_UNSIGNED */
2554
2555  return x;
2556}
2557
2558/* Return an RTX equivalent to the value of the parameter DECL.  */
2559
2560static rtx
2561expand_debug_parm_decl (tree decl)
2562{
2563  rtx incoming = DECL_INCOMING_RTL (decl);
2564
2565  if (incoming
2566      && GET_MODE (incoming) != BLKmode
2567      && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
2568	  || (MEM_P (incoming)
2569	      && REG_P (XEXP (incoming, 0))
2570	      && HARD_REGISTER_P (XEXP (incoming, 0)))))
2571    {
2572      rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
2573
2574#ifdef HAVE_window_save
2575      /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2576	 If the target machine has an explicit window save instruction, the
2577	 actual entry value is the corresponding OUTGOING_REGNO instead.  */
2578      if (REG_P (incoming)
2579	  && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
2580	incoming
2581	  = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
2582				OUTGOING_REGNO (REGNO (incoming)), 0);
2583      else if (MEM_P (incoming))
2584	{
2585	  rtx reg = XEXP (incoming, 0);
2586	  if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
2587	    {
2588	      reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
2589	      incoming = replace_equiv_address_nv (incoming, reg);
2590	    }
2591	  else
2592	    incoming = copy_rtx (incoming);
2593	}
2594#endif
2595
2596      ENTRY_VALUE_EXP (rtl) = incoming;
2597      return rtl;
2598    }
2599
2600  if (incoming
2601      && GET_MODE (incoming) != BLKmode
2602      && !TREE_ADDRESSABLE (decl)
2603      && MEM_P (incoming)
2604      && (XEXP (incoming, 0) == virtual_incoming_args_rtx
2605	  || (GET_CODE (XEXP (incoming, 0)) == PLUS
2606	      && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
2607	      && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
2608    return copy_rtx (incoming);
2609
2610  return NULL_RTX;
2611}
2612
2613/* Return an RTX equivalent to the value of the tree expression EXP.  */
2614
2615static rtx
2616expand_debug_expr (tree exp)
2617{
2618  rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2619  enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2620  enum machine_mode inner_mode = VOIDmode;
2621  int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2622  addr_space_t as;
2623
2624  switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2625    {
2626    case tcc_expression:
2627      switch (TREE_CODE (exp))
2628	{
2629	case COND_EXPR:
2630	case DOT_PROD_EXPR:
2631	case WIDEN_MULT_PLUS_EXPR:
2632	case WIDEN_MULT_MINUS_EXPR:
2633	case FMA_EXPR:
2634	  goto ternary;
2635
2636	case TRUTH_ANDIF_EXPR:
2637	case TRUTH_ORIF_EXPR:
2638	case TRUTH_AND_EXPR:
2639	case TRUTH_OR_EXPR:
2640	case TRUTH_XOR_EXPR:
2641	  goto binary;
2642
2643	case TRUTH_NOT_EXPR:
2644	  goto unary;
2645
2646	default:
2647	  break;
2648	}
2649      break;
2650
2651    ternary:
2652      op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2653      if (!op2)
2654	return NULL_RTX;
2655      /* Fall through.  */
2656
2657    binary:
2658    case tcc_binary:
2659    case tcc_comparison:
2660      op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2661      if (!op1)
2662	return NULL_RTX;
2663      /* Fall through.  */
2664
2665    unary:
2666    case tcc_unary:
2667      inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2668      op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2669      if (!op0)
2670	return NULL_RTX;
2671      break;
2672
2673    case tcc_type:
2674    case tcc_statement:
2675      gcc_unreachable ();
2676
2677    case tcc_constant:
2678    case tcc_exceptional:
2679    case tcc_declaration:
2680    case tcc_reference:
2681    case tcc_vl_exp:
2682      break;
2683    }
2684
2685  switch (TREE_CODE (exp))
2686    {
2687    case STRING_CST:
2688      if (!lookup_constant_def (exp))
2689	{
2690	  if (strlen (TREE_STRING_POINTER (exp)) + 1
2691	      != (size_t) TREE_STRING_LENGTH (exp))
2692	    return NULL_RTX;
2693	  op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2694	  op0 = gen_rtx_MEM (BLKmode, op0);
2695	  set_mem_attributes (op0, exp, 0);
2696	  return op0;
2697	}
2698      /* Fall through...  */
2699
2700    case INTEGER_CST:
2701    case REAL_CST:
2702    case FIXED_CST:
2703      op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2704      return op0;
2705
2706    case COMPLEX_CST:
2707      gcc_assert (COMPLEX_MODE_P (mode));
2708      op0 = expand_debug_expr (TREE_REALPART (exp));
2709      op1 = expand_debug_expr (TREE_IMAGPART (exp));
2710      return gen_rtx_CONCAT (mode, op0, op1);
2711
2712    case DEBUG_EXPR_DECL:
2713      op0 = DECL_RTL_IF_SET (exp);
2714
2715      if (op0)
2716	return op0;
2717
2718      op0 = gen_rtx_DEBUG_EXPR (mode);
2719      DEBUG_EXPR_TREE_DECL (op0) = exp;
2720      SET_DECL_RTL (exp, op0);
2721
2722      return op0;
2723
2724    case VAR_DECL:
2725    case PARM_DECL:
2726    case FUNCTION_DECL:
2727    case LABEL_DECL:
2728    case CONST_DECL:
2729    case RESULT_DECL:
2730      op0 = DECL_RTL_IF_SET (exp);
2731
2732      /* This decl was probably optimized away.  */
2733      if (!op0)
2734	{
2735	  if (TREE_CODE (exp) != VAR_DECL
2736	      || DECL_EXTERNAL (exp)
2737	      || !TREE_STATIC (exp)
2738	      || !DECL_NAME (exp)
2739	      || DECL_HARD_REGISTER (exp)
2740	      || DECL_IN_CONSTANT_POOL (exp)
2741	      || mode == VOIDmode)
2742	    return NULL;
2743
2744	  op0 = make_decl_rtl_for_debug (exp);
2745	  if (!MEM_P (op0)
2746	      || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2747	      || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2748	    return NULL;
2749	}
2750      else
2751	op0 = copy_rtx (op0);
2752
2753      if (GET_MODE (op0) == BLKmode
2754	  /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2755	     below would ICE.  While it is likely a FE bug,
2756	     try to be robust here.  See PR43166.  */
2757	  || mode == BLKmode
2758	  || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2759	{
2760	  gcc_assert (MEM_P (op0));
2761	  op0 = adjust_address_nv (op0, mode, 0);
2762	  return op0;
2763	}
2764
2765      /* Fall through.  */
2766
2767    adjust_mode:
2768    case PAREN_EXPR:
2769    case NOP_EXPR:
2770    case CONVERT_EXPR:
2771      {
2772	inner_mode = GET_MODE (op0);
2773
2774	if (mode == inner_mode)
2775	  return op0;
2776
2777	if (inner_mode == VOIDmode)
2778	  {
2779	    if (TREE_CODE (exp) == SSA_NAME)
2780	      inner_mode = TYPE_MODE (TREE_TYPE (exp));
2781	    else
2782	      inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2783	    if (mode == inner_mode)
2784	      return op0;
2785	  }
2786
2787	if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2788	  {
2789	    if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2790	      op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2791	    else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2792	      op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2793	    else
2794	      op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2795	  }
2796	else if (FLOAT_MODE_P (mode))
2797	  {
2798	    gcc_assert (TREE_CODE (exp) != SSA_NAME);
2799	    if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2800	      op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2801	    else
2802	      op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2803	  }
2804	else if (FLOAT_MODE_P (inner_mode))
2805	  {
2806	    if (unsignedp)
2807	      op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2808	    else
2809	      op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2810	  }
2811	else if (CONSTANT_P (op0)
2812		 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
2813	  op0 = simplify_gen_subreg (mode, op0, inner_mode,
2814				     subreg_lowpart_offset (mode,
2815							    inner_mode));
2816	else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2817		 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2818		 : unsignedp)
2819	  op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
2820	else
2821	  op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
2822
2823	return op0;
2824      }
2825
2826    case MEM_REF:
2827      if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2828	{
2829	  tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2830				     TREE_OPERAND (exp, 0),
2831				     TREE_OPERAND (exp, 1));
2832	  if (newexp)
2833	    return expand_debug_expr (newexp);
2834	}
2835      /* FALLTHROUGH */
2836    case INDIRECT_REF:
2837      op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2838      if (!op0)
2839	return NULL;
2840
2841      if (TREE_CODE (exp) == MEM_REF)
2842	{
2843	  if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2844	      || (GET_CODE (op0) == PLUS
2845		  && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2846	    /* (mem (debug_implicit_ptr)) might confuse aliasing.
2847	       Instead just use get_inner_reference.  */
2848	    goto component_ref;
2849
2850	  op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2851	  if (!op1 || !CONST_INT_P (op1))
2852	    return NULL;
2853
2854	  op0 = plus_constant (op0, INTVAL (op1));
2855	}
2856
2857      if (POINTER_TYPE_P (TREE_TYPE (exp)))
2858	as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2859      else
2860	as = ADDR_SPACE_GENERIC;
2861
2862      op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2863					  op0, as);
2864      if (op0 == NULL_RTX)
2865	return NULL;
2866
2867      op0 = gen_rtx_MEM (mode, op0);
2868      set_mem_attributes (op0, exp, 0);
2869      if (TREE_CODE (exp) == MEM_REF
2870	  && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2871	set_mem_expr (op0, NULL_TREE);
2872      set_mem_addr_space (op0, as);
2873
2874      return op0;
2875
2876    case TARGET_MEM_REF:
2877      if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2878	  && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2879	return NULL;
2880
2881      op0 = expand_debug_expr
2882	    (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2883      if (!op0)
2884	return NULL;
2885
2886      if (POINTER_TYPE_P (TREE_TYPE (exp)))
2887	as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2888      else
2889	as = ADDR_SPACE_GENERIC;
2890
2891      op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2892					  op0, as);
2893      if (op0 == NULL_RTX)
2894	return NULL;
2895
2896      op0 = gen_rtx_MEM (mode, op0);
2897
2898      set_mem_attributes (op0, exp, 0);
2899      set_mem_addr_space (op0, as);
2900
2901      return op0;
2902
2903    component_ref:
2904    case ARRAY_REF:
2905    case ARRAY_RANGE_REF:
2906    case COMPONENT_REF:
2907    case BIT_FIELD_REF:
2908    case REALPART_EXPR:
2909    case IMAGPART_EXPR:
2910    case VIEW_CONVERT_EXPR:
2911      {
2912	enum machine_mode mode1;
2913	HOST_WIDE_INT bitsize, bitpos;
2914	tree offset;
2915	int volatilep = 0;
2916	tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2917					&mode1, &unsignedp, &volatilep, false);
2918	rtx orig_op0;
2919
2920	if (bitsize == 0)
2921	  return NULL;
2922
2923	orig_op0 = op0 = expand_debug_expr (tem);
2924
2925	if (!op0)
2926	  return NULL;
2927
2928	if (offset)
2929	  {
2930	    enum machine_mode addrmode, offmode;
2931
2932	    if (!MEM_P (op0))
2933	      return NULL;
2934
2935	    op0 = XEXP (op0, 0);
2936	    addrmode = GET_MODE (op0);
2937	    if (addrmode == VOIDmode)
2938	      addrmode = Pmode;
2939
2940	    op1 = expand_debug_expr (offset);
2941	    if (!op1)
2942	      return NULL;
2943
2944	    offmode = GET_MODE (op1);
2945	    if (offmode == VOIDmode)
2946	      offmode = TYPE_MODE (TREE_TYPE (offset));
2947
2948	    if (addrmode != offmode)
2949	      op1 = simplify_gen_subreg (addrmode, op1, offmode,
2950					 subreg_lowpart_offset (addrmode,
2951								offmode));
2952
2953	    /* Don't use offset_address here, we don't need a
2954	       recognizable address, and we don't want to generate
2955	       code.  */
2956	    op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
2957							  op0, op1));
2958	  }
2959
2960	if (MEM_P (op0))
2961	  {
2962	    if (mode1 == VOIDmode)
2963	      /* Bitfield.  */
2964	      mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2965	    if (bitpos >= BITS_PER_UNIT)
2966	      {
2967		op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2968		bitpos %= BITS_PER_UNIT;
2969	      }
2970	    else if (bitpos < 0)
2971	      {
2972		HOST_WIDE_INT units
2973		  = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2974		op0 = adjust_address_nv (op0, mode1, units);
2975		bitpos += units * BITS_PER_UNIT;
2976	      }
2977	    else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2978	      op0 = adjust_address_nv (op0, mode, 0);
2979	    else if (GET_MODE (op0) != mode1)
2980	      op0 = adjust_address_nv (op0, mode1, 0);
2981	    else
2982	      op0 = copy_rtx (op0);
2983	    if (op0 == orig_op0)
2984	      op0 = shallow_copy_rtx (op0);
2985	    set_mem_attributes (op0, exp, 0);
2986	  }
2987
2988	if (bitpos == 0 && mode == GET_MODE (op0))
2989	  return op0;
2990
2991        if (bitpos < 0)
2992          return NULL;
2993
2994	if (GET_MODE (op0) == BLKmode)
2995	  return NULL;
2996
2997	if ((bitpos % BITS_PER_UNIT) == 0
2998	    && bitsize == GET_MODE_BITSIZE (mode1))
2999	  {
3000	    enum machine_mode opmode = GET_MODE (op0);
3001
3002	    if (opmode == VOIDmode)
3003	      opmode = TYPE_MODE (TREE_TYPE (tem));
3004
3005	    /* This condition may hold if we're expanding the address
3006	       right past the end of an array that turned out not to
3007	       be addressable (i.e., the address was only computed in
3008	       debug stmts).  The gen_subreg below would rightfully
3009	       crash, and the address doesn't really exist, so just
3010	       drop it.  */
3011	    if (bitpos >= GET_MODE_BITSIZE (opmode))
3012	      return NULL;
3013
3014	    if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
3015	      return simplify_gen_subreg (mode, op0, opmode,
3016					  bitpos / BITS_PER_UNIT);
3017	  }
3018
3019	return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
3020				     && TYPE_UNSIGNED (TREE_TYPE (exp))
3021				     ? SIGN_EXTRACT
3022				     : ZERO_EXTRACT, mode,
3023				     GET_MODE (op0) != VOIDmode
3024				     ? GET_MODE (op0)
3025				     : TYPE_MODE (TREE_TYPE (tem)),
3026				     op0, GEN_INT (bitsize), GEN_INT (bitpos));
3027      }
3028
3029    case ABS_EXPR:
3030      return simplify_gen_unary (ABS, mode, op0, mode);
3031
3032    case NEGATE_EXPR:
3033      return simplify_gen_unary (NEG, mode, op0, mode);
3034
3035    case BIT_NOT_EXPR:
3036      return simplify_gen_unary (NOT, mode, op0, mode);
3037
3038    case FLOAT_EXPR:
3039      return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3040									 0)))
3041				 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
3042				 inner_mode);
3043
3044    case FIX_TRUNC_EXPR:
3045      return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
3046				 inner_mode);
3047
3048    case POINTER_PLUS_EXPR:
3049      /* For the rare target where pointers are not the same size as
3050	 size_t, we need to check for mis-matched modes and correct
3051	 the addend.  */
3052      if (op0 && op1
3053	  && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
3054	  && GET_MODE (op0) != GET_MODE (op1))
3055	{
3056	  if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
3057	    op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
3058				      GET_MODE (op1));
3059	  else
3060	    /* We always sign-extend, regardless of the signedness of
3061	       the operand, because the operand is always unsigned
3062	       here even if the original C expression is signed.  */
3063	    op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
3064				      GET_MODE (op1));
3065	}
3066      /* Fall through.  */
3067    case PLUS_EXPR:
3068      return simplify_gen_binary (PLUS, mode, op0, op1);
3069
3070    case MINUS_EXPR:
3071      return simplify_gen_binary (MINUS, mode, op0, op1);
3072
3073    case MULT_EXPR:
3074      return simplify_gen_binary (MULT, mode, op0, op1);
3075
3076    case RDIV_EXPR:
3077    case TRUNC_DIV_EXPR:
3078    case EXACT_DIV_EXPR:
3079      if (unsignedp)
3080	return simplify_gen_binary (UDIV, mode, op0, op1);
3081      else
3082	return simplify_gen_binary (DIV, mode, op0, op1);
3083
3084    case TRUNC_MOD_EXPR:
3085      return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
3086
3087    case FLOOR_DIV_EXPR:
3088      if (unsignedp)
3089	return simplify_gen_binary (UDIV, mode, op0, op1);
3090      else
3091	{
3092	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3093	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3094	  rtx adj = floor_sdiv_adjust (mode, mod, op1);
3095	  return simplify_gen_binary (PLUS, mode, div, adj);
3096	}
3097
3098    case FLOOR_MOD_EXPR:
3099      if (unsignedp)
3100	return simplify_gen_binary (UMOD, mode, op0, op1);
3101      else
3102	{
3103	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3104	  rtx adj = floor_sdiv_adjust (mode, mod, op1);
3105	  adj = simplify_gen_unary (NEG, mode,
3106				    simplify_gen_binary (MULT, mode, adj, op1),
3107				    mode);
3108	  return simplify_gen_binary (PLUS, mode, mod, adj);
3109	}
3110
3111    case CEIL_DIV_EXPR:
3112      if (unsignedp)
3113	{
3114	  rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3115	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3116	  rtx adj = ceil_udiv_adjust (mode, mod, op1);
3117	  return simplify_gen_binary (PLUS, mode, div, adj);
3118	}
3119      else
3120	{
3121	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3122	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3123	  rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3124	  return simplify_gen_binary (PLUS, mode, div, adj);
3125	}
3126
3127    case CEIL_MOD_EXPR:
3128      if (unsignedp)
3129	{
3130	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3131	  rtx adj = ceil_udiv_adjust (mode, mod, op1);
3132	  adj = simplify_gen_unary (NEG, mode,
3133				    simplify_gen_binary (MULT, mode, adj, op1),
3134				    mode);
3135	  return simplify_gen_binary (PLUS, mode, mod, adj);
3136	}
3137      else
3138	{
3139	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3140	  rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3141	  adj = simplify_gen_unary (NEG, mode,
3142				    simplify_gen_binary (MULT, mode, adj, op1),
3143				    mode);
3144	  return simplify_gen_binary (PLUS, mode, mod, adj);
3145	}
3146
3147    case ROUND_DIV_EXPR:
3148      if (unsignedp)
3149	{
3150	  rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3151	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3152	  rtx adj = round_udiv_adjust (mode, mod, op1);
3153	  return simplify_gen_binary (PLUS, mode, div, adj);
3154	}
3155      else
3156	{
3157	  rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3158	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3159	  rtx adj = round_sdiv_adjust (mode, mod, op1);
3160	  return simplify_gen_binary (PLUS, mode, div, adj);
3161	}
3162
3163    case ROUND_MOD_EXPR:
3164      if (unsignedp)
3165	{
3166	  rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3167	  rtx adj = round_udiv_adjust (mode, mod, op1);
3168	  adj = simplify_gen_unary (NEG, mode,
3169				    simplify_gen_binary (MULT, mode, adj, op1),
3170				    mode);
3171	  return simplify_gen_binary (PLUS, mode, mod, adj);
3172	}
3173      else
3174	{
3175	  rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3176	  rtx adj = round_sdiv_adjust (mode, mod, op1);
3177	  adj = simplify_gen_unary (NEG, mode,
3178				    simplify_gen_binary (MULT, mode, adj, op1),
3179				    mode);
3180	  return simplify_gen_binary (PLUS, mode, mod, adj);
3181	}
3182
3183    case LSHIFT_EXPR:
3184      return simplify_gen_binary (ASHIFT, mode, op0, op1);
3185
3186    case RSHIFT_EXPR:
3187      if (unsignedp)
3188	return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
3189      else
3190	return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
3191
3192    case LROTATE_EXPR:
3193      return simplify_gen_binary (ROTATE, mode, op0, op1);
3194
3195    case RROTATE_EXPR:
3196      return simplify_gen_binary (ROTATERT, mode, op0, op1);
3197
3198    case MIN_EXPR:
3199      return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
3200
3201    case MAX_EXPR:
3202      return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
3203
3204    case BIT_AND_EXPR:
3205    case TRUTH_AND_EXPR:
3206      return simplify_gen_binary (AND, mode, op0, op1);
3207
3208    case BIT_IOR_EXPR:
3209    case TRUTH_OR_EXPR:
3210      return simplify_gen_binary (IOR, mode, op0, op1);
3211
3212    case BIT_XOR_EXPR:
3213    case TRUTH_XOR_EXPR:
3214      return simplify_gen_binary (XOR, mode, op0, op1);
3215
3216    case TRUTH_ANDIF_EXPR:
3217      return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
3218
3219    case TRUTH_ORIF_EXPR:
3220      return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
3221
3222    case TRUTH_NOT_EXPR:
3223      return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
3224
3225    case LT_EXPR:
3226      return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
3227				      op0, op1);
3228
3229    case LE_EXPR:
3230      return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
3231				      op0, op1);
3232
3233    case GT_EXPR:
3234      return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
3235				      op0, op1);
3236
3237    case GE_EXPR:
3238      return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
3239				      op0, op1);
3240
3241    case EQ_EXPR:
3242      return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
3243
3244    case NE_EXPR:
3245      return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
3246
3247    case UNORDERED_EXPR:
3248      return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
3249
3250    case ORDERED_EXPR:
3251      return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
3252
3253    case UNLT_EXPR:
3254      return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
3255
3256    case UNLE_EXPR:
3257      return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
3258
3259    case UNGT_EXPR:
3260      return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
3261
3262    case UNGE_EXPR:
3263      return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
3264
3265    case UNEQ_EXPR:
3266      return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
3267
3268    case LTGT_EXPR:
3269      return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
3270
3271    case COND_EXPR:
3272      return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3273
3274    case COMPLEX_EXPR:
3275      gcc_assert (COMPLEX_MODE_P (mode));
3276      if (GET_MODE (op0) == VOIDmode)
3277	op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3278      if (GET_MODE (op1) == VOIDmode)
3279	op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3280      return gen_rtx_CONCAT (mode, op0, op1);
3281
3282    case CONJ_EXPR:
3283      if (GET_CODE (op0) == CONCAT)
3284	return gen_rtx_CONCAT (mode, XEXP (op0, 0),
3285			       simplify_gen_unary (NEG, GET_MODE_INNER (mode),
3286						   XEXP (op0, 1),
3287						   GET_MODE_INNER (mode)));
3288      else
3289	{
3290	  enum machine_mode imode = GET_MODE_INNER (mode);
3291	  rtx re, im;
3292
3293	  if (MEM_P (op0))
3294	    {
3295	      re = adjust_address_nv (op0, imode, 0);
3296	      im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3297	    }
3298	  else
3299	    {
3300	      enum machine_mode ifmode = int_mode_for_mode (mode);
3301	      enum machine_mode ihmode = int_mode_for_mode (imode);
3302	      rtx halfsize;
3303	      if (ifmode == BLKmode || ihmode == BLKmode)
3304		return NULL;
3305	      halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3306	      re = op0;
3307	      if (mode != ifmode)
3308		re = gen_rtx_SUBREG (ifmode, re, 0);
3309	      re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3310	      if (imode != ihmode)
3311		re = gen_rtx_SUBREG (imode, re, 0);
3312	      im = copy_rtx (op0);
3313	      if (mode != ifmode)
3314		im = gen_rtx_SUBREG (ifmode, im, 0);
3315	      im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3316	      if (imode != ihmode)
3317		im = gen_rtx_SUBREG (imode, im, 0);
3318	    }
3319	  im = gen_rtx_NEG (imode, im);
3320	  return gen_rtx_CONCAT (mode, re, im);
3321	}
3322
3323    case ADDR_EXPR:
3324      op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3325      if (!op0 || !MEM_P (op0))
3326	{
3327	  if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3328	       || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3329	       || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3330	      && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
3331		  || target_for_debug_bind (TREE_OPERAND (exp, 0))))
3332	    return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3333
3334	  if (handled_component_p (TREE_OPERAND (exp, 0)))
3335	    {
3336	      HOST_WIDE_INT bitoffset, bitsize, maxsize;
3337	      tree decl
3338		= get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3339					   &bitoffset, &bitsize, &maxsize);
3340	      if ((TREE_CODE (decl) == VAR_DECL
3341		   || TREE_CODE (decl) == PARM_DECL
3342		   || TREE_CODE (decl) == RESULT_DECL)
3343		  && (!TREE_ADDRESSABLE (decl)
3344		      || target_for_debug_bind (decl))
3345		  && (bitoffset % BITS_PER_UNIT) == 0
3346		  && bitsize > 0
3347		  && bitsize == maxsize)
3348		return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl),
3349				      bitoffset / BITS_PER_UNIT);
3350	    }
3351
3352	  return NULL;
3353	}
3354
3355      as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3356      op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
3357
3358      return op0;
3359
3360    case VECTOR_CST:
3361      exp = build_constructor_from_list (TREE_TYPE (exp),
3362					 TREE_VECTOR_CST_ELTS (exp));
3363      /* Fall through.  */
3364
3365    case CONSTRUCTOR:
3366      if (TREE_CLOBBER_P (exp))
3367	return NULL;
3368      else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3369	{
3370	  unsigned i;
3371	  tree val;
3372
3373	  op0 = gen_rtx_CONCATN
3374	    (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3375
3376	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3377	    {
3378	      op1 = expand_debug_expr (val);
3379	      if (!op1)
3380		return NULL;
3381	      XVECEXP (op0, 0, i) = op1;
3382	    }
3383
3384	  if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3385	    {
3386	      op1 = expand_debug_expr
3387		(build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
3388
3389	      if (!op1)
3390		return NULL;
3391
3392	      for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3393		XVECEXP (op0, 0, i) = op1;
3394	    }
3395
3396	  return op0;
3397	}
3398      else
3399	goto flag_unsupported;
3400
3401    case CALL_EXPR:
3402      /* ??? Maybe handle some builtins?  */
3403      return NULL;
3404
3405    case SSA_NAME:
3406      {
3407	gimple g = get_gimple_for_ssa_name (exp);
3408	if (g)
3409	  {
3410	    op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3411	    if (!op0)
3412	      return NULL;
3413	  }
3414	else
3415	  {
3416	    int part = var_to_partition (SA.map, exp);
3417
3418	    if (part == NO_PARTITION)
3419	      {
3420		/* If this is a reference to an incoming value of parameter
3421		   that is never used in the code or where the incoming
3422		   value is never used in the code, use PARM_DECL's
3423		   DECL_RTL if set.  */
3424		if (SSA_NAME_IS_DEFAULT_DEF (exp)
3425		    && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3426		  {
3427		    op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
3428		    if (op0)
3429		      goto adjust_mode;
3430		    op0 = expand_debug_expr (SSA_NAME_VAR (exp));
3431		    if (op0)
3432		      goto adjust_mode;
3433		  }
3434		return NULL;
3435	      }
3436
3437	    gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3438
3439	    op0 = copy_rtx (SA.partition_to_pseudo[part]);
3440	  }
3441	goto adjust_mode;
3442      }
3443
3444    case ERROR_MARK:
3445      return NULL;
3446
3447    /* Vector stuff.  For most of the codes we don't have rtl codes.  */
3448    case REALIGN_LOAD_EXPR:
3449    case REDUC_MAX_EXPR:
3450    case REDUC_MIN_EXPR:
3451    case REDUC_PLUS_EXPR:
3452    case VEC_COND_EXPR:
3453    case VEC_LSHIFT_EXPR:
3454    case VEC_PACK_FIX_TRUNC_EXPR:
3455    case VEC_PACK_SAT_EXPR:
3456    case VEC_PACK_TRUNC_EXPR:
3457    case VEC_RSHIFT_EXPR:
3458    case VEC_UNPACK_FLOAT_HI_EXPR:
3459    case VEC_UNPACK_FLOAT_LO_EXPR:
3460    case VEC_UNPACK_HI_EXPR:
3461    case VEC_UNPACK_LO_EXPR:
3462    case VEC_WIDEN_MULT_HI_EXPR:
3463    case VEC_WIDEN_MULT_LO_EXPR:
3464    case VEC_WIDEN_LSHIFT_HI_EXPR:
3465    case VEC_WIDEN_LSHIFT_LO_EXPR:
3466    case VEC_PERM_EXPR:
3467      return NULL;
3468
3469   /* Misc codes.  */
3470    case ADDR_SPACE_CONVERT_EXPR:
3471    case FIXED_CONVERT_EXPR:
3472    case OBJ_TYPE_REF:
3473    case WITH_SIZE_EXPR:
3474      return NULL;
3475
3476    case DOT_PROD_EXPR:
3477      if (SCALAR_INT_MODE_P (GET_MODE (op0))
3478	  && SCALAR_INT_MODE_P (mode))
3479	{
3480	  op0
3481	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3482									  0)))
3483				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3484				  inner_mode);
3485	  op1
3486	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3487									  1)))
3488				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
3489				  inner_mode);
3490	  op0 = simplify_gen_binary (MULT, mode, op0, op1);
3491	  return simplify_gen_binary (PLUS, mode, op0, op2);
3492	}
3493      return NULL;
3494
3495    case WIDEN_MULT_EXPR:
3496    case WIDEN_MULT_PLUS_EXPR:
3497    case WIDEN_MULT_MINUS_EXPR:
3498      if (SCALAR_INT_MODE_P (GET_MODE (op0))
3499	  && SCALAR_INT_MODE_P (mode))
3500	{
3501	  inner_mode = GET_MODE (op0);
3502	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3503	    op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3504	  else
3505	    op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3506	  if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3507	    op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3508	  else
3509	    op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3510	  op0 = simplify_gen_binary (MULT, mode, op0, op1);
3511	  if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3512	    return op0;
3513	  else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3514	    return simplify_gen_binary (PLUS, mode, op0, op2);
3515	  else
3516	    return simplify_gen_binary (MINUS, mode, op2, op0);
3517	}
3518      return NULL;
3519
3520    case WIDEN_SUM_EXPR:
3521    case WIDEN_LSHIFT_EXPR:
3522      if (SCALAR_INT_MODE_P (GET_MODE (op0))
3523	  && SCALAR_INT_MODE_P (mode))
3524	{
3525	  op0
3526	    = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3527									  0)))
3528				  ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3529				  inner_mode);
3530	  return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
3531				      ? ASHIFT : PLUS, mode, op0, op1);
3532	}
3533      return NULL;
3534
3535    case FMA_EXPR:
3536      return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
3537
3538    default:
3539    flag_unsupported:
3540#ifdef ENABLE_CHECKING
3541      debug_tree (exp);
3542      gcc_unreachable ();
3543#else
3544      return NULL;
3545#endif
3546    }
3547}
3548
3549/* Return an RTX equivalent to the source bind value of the tree expression
3550   EXP.  */
3551
3552static rtx
3553expand_debug_source_expr (tree exp)
3554{
3555  rtx op0 = NULL_RTX;
3556  enum machine_mode mode = VOIDmode, inner_mode;
3557
3558  switch (TREE_CODE (exp))
3559    {
3560    case PARM_DECL:
3561      {
3562	mode = DECL_MODE (exp);
3563	op0 = expand_debug_parm_decl (exp);
3564	if (op0)
3565	   break;
3566	/* See if this isn't an argument that has been completely
3567	   optimized out.  */
3568	if (!DECL_RTL_SET_P (exp)
3569	    && !DECL_INCOMING_RTL (exp)
3570	    && DECL_ABSTRACT_ORIGIN (current_function_decl))
3571	  {
3572	    tree aexp = exp;
3573	    if (DECL_ABSTRACT_ORIGIN (exp))
3574	      aexp = DECL_ABSTRACT_ORIGIN (exp);
3575	    if (DECL_CONTEXT (aexp)
3576		== DECL_ABSTRACT_ORIGIN (current_function_decl))
3577	      {
3578		VEC(tree, gc) **debug_args;
3579		unsigned int ix;
3580		tree ddecl;
3581#ifdef ENABLE_CHECKING
3582		tree parm;
3583		for (parm = DECL_ARGUMENTS (current_function_decl);
3584		     parm; parm = DECL_CHAIN (parm))
3585		  gcc_assert (parm != exp
3586			      && DECL_ABSTRACT_ORIGIN (parm) != aexp);
3587#endif
3588		debug_args = decl_debug_args_lookup (current_function_decl);
3589		if (debug_args != NULL)
3590		  {
3591		    for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl);
3592			 ix += 2)
3593		      if (ddecl == aexp)
3594			return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
3595		  }
3596	      }
3597	  }
3598	break;
3599      }
3600    default:
3601      break;
3602    }
3603
3604  if (op0 == NULL_RTX)
3605    return NULL_RTX;
3606
3607  inner_mode = GET_MODE (op0);
3608  if (mode == inner_mode)
3609    return op0;
3610
3611  if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3612    {
3613      if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3614	op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3615      else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3616	op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3617      else
3618	op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3619    }
3620  else if (FLOAT_MODE_P (mode))
3621    gcc_unreachable ();
3622  else if (FLOAT_MODE_P (inner_mode))
3623    {
3624      if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3625	op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3626      else
3627	op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3628    }
3629  else if (CONSTANT_P (op0)
3630	   || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3631    op0 = simplify_gen_subreg (mode, op0, inner_mode,
3632			       subreg_lowpart_offset (mode, inner_mode));
3633  else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3634    op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3635  else
3636    op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3637
3638  return op0;
3639}
3640
3641/* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
3642   Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
3643   deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN.  */
3644
3645static void
3646avoid_complex_debug_insns (rtx insn, rtx *exp_p, int depth)
3647{
3648  rtx exp = *exp_p;
3649  const char *format_ptr;
3650  int i, j;
3651
3652  if (exp == NULL_RTX)
3653    return;
3654
3655  if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
3656    return;
3657
3658  if (depth == 4)
3659    {
3660      /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL).  */
3661      rtx dval = make_debug_expr_from_rtl (exp);
3662
3663      /* Emit a debug bind insn before INSN.  */
3664      rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
3665				       DEBUG_EXPR_TREE_DECL (dval), exp,
3666				       VAR_INIT_STATUS_INITIALIZED);
3667
3668      emit_debug_insn_before (bind, insn);
3669      *exp_p = dval;
3670      return;
3671    }
3672
3673  format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
3674  for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
3675    switch (*format_ptr++)
3676      {
3677      case 'e':
3678	avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
3679	break;
3680
3681      case 'E':
3682      case 'V':
3683	for (j = 0; j < XVECLEN (exp, i); j++)
3684	  avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
3685	break;
3686
3687      default:
3688	break;
3689      }
3690}
3691
3692/* Expand the _LOCs in debug insns.  We run this after expanding all
3693   regular insns, so that any variables referenced in the function
3694   will have their DECL_RTLs set.  */
3695
3696static void
3697expand_debug_locations (void)
3698{
3699  rtx insn;
3700  rtx last = get_last_insn ();
3701  int save_strict_alias = flag_strict_aliasing;
3702
3703  /* New alias sets while setting up memory attributes cause
3704     -fcompare-debug failures, even though it doesn't bring about any
3705     codegen changes.  */
3706  flag_strict_aliasing = 0;
3707
3708  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3709    if (DEBUG_INSN_P (insn))
3710      {
3711	tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3712	rtx val, prev_insn, insn2;
3713	enum machine_mode mode;
3714
3715	if (value == NULL_TREE)
3716	  val = NULL_RTX;
3717	else
3718	  {
3719	    if (INSN_VAR_LOCATION_STATUS (insn)
3720		== VAR_INIT_STATUS_UNINITIALIZED)
3721	      val = expand_debug_source_expr (value);
3722	    else
3723	      val = expand_debug_expr (value);
3724	    gcc_assert (last == get_last_insn ());
3725	  }
3726
3727	if (!val)
3728	  val = gen_rtx_UNKNOWN_VAR_LOC ();
3729	else
3730	  {
3731	    mode = GET_MODE (INSN_VAR_LOCATION (insn));
3732
3733	    gcc_assert (mode == GET_MODE (val)
3734			|| (GET_MODE (val) == VOIDmode
3735			    && (CONST_INT_P (val)
3736				|| GET_CODE (val) == CONST_FIXED
3737				|| GET_CODE (val) == CONST_DOUBLE
3738				|| GET_CODE (val) == LABEL_REF)));
3739	  }
3740
3741	INSN_VAR_LOCATION_LOC (insn) = val;
3742	prev_insn = PREV_INSN (insn);
3743	for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
3744	  avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
3745      }
3746
3747  flag_strict_aliasing = save_strict_alias;
3748}
3749
3750/* Expand basic block BB from GIMPLE trees to RTL.  */
3751
3752static basic_block
3753expand_gimple_basic_block (basic_block bb)
3754{
3755  gimple_stmt_iterator gsi;
3756  gimple_seq stmts;
3757  gimple stmt = NULL;
3758  rtx note, last;
3759  edge e;
3760  edge_iterator ei;
3761  void **elt;
3762
3763  if (dump_file)
3764    fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3765	     bb->index);
3766
3767  /* Note that since we are now transitioning from GIMPLE to RTL, we
3768     cannot use the gsi_*_bb() routines because they expect the basic
3769     block to be in GIMPLE, instead of RTL.  Therefore, we need to
3770     access the BB sequence directly.  */
3771  stmts = bb_seq (bb);
3772  bb->il.gimple = NULL;
3773  rtl_profile_for_bb (bb);
3774  init_rtl_bb_info (bb);
3775  bb->flags |= BB_RTL;
3776
3777  /* Remove the RETURN_EXPR if we may fall though to the exit
3778     instead.  */
3779  gsi = gsi_last (stmts);
3780  if (!gsi_end_p (gsi)
3781      && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3782    {
3783      gimple ret_stmt = gsi_stmt (gsi);
3784
3785      gcc_assert (single_succ_p (bb));
3786      gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3787
3788      if (bb->next_bb == EXIT_BLOCK_PTR
3789	  && !gimple_return_retval (ret_stmt))
3790	{
3791	  gsi_remove (&gsi, false);
3792	  single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3793	}
3794    }
3795
3796  gsi = gsi_start (stmts);
3797  if (!gsi_end_p (gsi))
3798    {
3799      stmt = gsi_stmt (gsi);
3800      if (gimple_code (stmt) != GIMPLE_LABEL)
3801	stmt = NULL;
3802    }
3803
3804  elt = pointer_map_contains (lab_rtx_for_bb, bb);
3805
3806  if (stmt || elt)
3807    {
3808      last = get_last_insn ();
3809
3810      if (stmt)
3811	{
3812	  expand_gimple_stmt (stmt);
3813	  gsi_next (&gsi);
3814	}
3815
3816      if (elt)
3817	emit_label ((rtx) *elt);
3818
3819      /* Java emits line number notes in the top of labels.
3820	 ??? Make this go away once line number notes are obsoleted.  */
3821      BB_HEAD (bb) = NEXT_INSN (last);
3822      if (NOTE_P (BB_HEAD (bb)))
3823	BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3824      note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3825
3826      maybe_dump_rtl_for_gimple_stmt (stmt, last);
3827    }
3828  else
3829    note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3830
3831  NOTE_BASIC_BLOCK (note) = bb;
3832
3833  for (; !gsi_end_p (gsi); gsi_next (&gsi))
3834    {
3835      basic_block new_bb;
3836
3837      stmt = gsi_stmt (gsi);
3838
3839      /* If this statement is a non-debug one, and we generate debug
3840	 insns, then this one might be the last real use of a TERed
3841	 SSA_NAME, but where there are still some debug uses further
3842	 down.  Expanding the current SSA name in such further debug
3843	 uses by their RHS might lead to wrong debug info, as coalescing
3844	 might make the operands of such RHS be placed into the same
3845	 pseudo as something else.  Like so:
3846	   a_1 = a_0 + 1;   // Assume a_1 is TERed and a_0 is dead
3847	   use(a_1);
3848	   a_2 = ...
3849           #DEBUG ... => a_1
3850	 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3851	 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3852	 the write to a_2 would actually have clobbered the place which
3853	 formerly held a_0.
3854
3855	 So, instead of that, we recognize the situation, and generate
3856	 debug temporaries at the last real use of TERed SSA names:
3857	   a_1 = a_0 + 1;
3858           #DEBUG #D1 => a_1
3859	   use(a_1);
3860	   a_2 = ...
3861           #DEBUG ... => #D1
3862	 */
3863      if (MAY_HAVE_DEBUG_INSNS
3864	  && SA.values
3865	  && !is_gimple_debug (stmt))
3866	{
3867	  ssa_op_iter iter;
3868	  tree op;
3869	  gimple def;
3870
3871	  location_t sloc = get_curr_insn_source_location ();
3872	  tree sblock = get_curr_insn_block ();
3873
3874	  /* Look for SSA names that have their last use here (TERed
3875	     names always have only one real use).  */
3876	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3877	    if ((def = get_gimple_for_ssa_name (op)))
3878	      {
3879		imm_use_iterator imm_iter;
3880		use_operand_p use_p;
3881		bool have_debug_uses = false;
3882
3883		FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3884		  {
3885		    if (gimple_debug_bind_p (USE_STMT (use_p)))
3886		      {
3887			have_debug_uses = true;
3888			break;
3889		      }
3890		  }
3891
3892		if (have_debug_uses)
3893		  {
3894		    /* OP is a TERed SSA name, with DEF it's defining
3895		       statement, and where OP is used in further debug
3896		       instructions.  Generate a debug temporary, and
3897		       replace all uses of OP in debug insns with that
3898		       temporary.  */
3899		    gimple debugstmt;
3900		    tree value = gimple_assign_rhs_to_tree (def);
3901		    tree vexpr = make_node (DEBUG_EXPR_DECL);
3902		    rtx val;
3903		    enum machine_mode mode;
3904
3905		    set_curr_insn_source_location (gimple_location (def));
3906		    set_curr_insn_block (gimple_block (def));
3907
3908		    DECL_ARTIFICIAL (vexpr) = 1;
3909		    TREE_TYPE (vexpr) = TREE_TYPE (value);
3910		    if (DECL_P (value))
3911		      mode = DECL_MODE (value);
3912		    else
3913		      mode = TYPE_MODE (TREE_TYPE (value));
3914		    DECL_MODE (vexpr) = mode;
3915
3916		    val = gen_rtx_VAR_LOCATION
3917			(mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3918
3919		    emit_debug_insn (val);
3920
3921		    FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3922		      {
3923			if (!gimple_debug_bind_p (debugstmt))
3924			  continue;
3925
3926			FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3927			  SET_USE (use_p, vexpr);
3928
3929			update_stmt (debugstmt);
3930		      }
3931		  }
3932	      }
3933	  set_curr_insn_source_location (sloc);
3934	  set_curr_insn_block (sblock);
3935	}
3936
3937      currently_expanding_gimple_stmt = stmt;
3938
3939      /* Expand this statement, then evaluate the resulting RTL and
3940	 fixup the CFG accordingly.  */
3941      if (gimple_code (stmt) == GIMPLE_COND)
3942	{
3943	  new_bb = expand_gimple_cond (bb, stmt);
3944	  if (new_bb)
3945	    return new_bb;
3946	}
3947      else if (gimple_debug_bind_p (stmt))
3948	{
3949	  location_t sloc = get_curr_insn_source_location ();
3950	  tree sblock = get_curr_insn_block ();
3951	  gimple_stmt_iterator nsi = gsi;
3952
3953	  for (;;)
3954	    {
3955	      tree var = gimple_debug_bind_get_var (stmt);
3956	      tree value;
3957	      rtx val;
3958	      enum machine_mode mode;
3959
3960	      if (TREE_CODE (var) != DEBUG_EXPR_DECL
3961		  && TREE_CODE (var) != LABEL_DECL
3962		  && !target_for_debug_bind (var))
3963		goto delink_debug_stmt;
3964
3965	      if (gimple_debug_bind_has_value_p (stmt))
3966		value = gimple_debug_bind_get_value (stmt);
3967	      else
3968		value = NULL_TREE;
3969
3970	      last = get_last_insn ();
3971
3972	      set_curr_insn_source_location (gimple_location (stmt));
3973	      set_curr_insn_block (gimple_block (stmt));
3974
3975	      if (DECL_P (var))
3976		mode = DECL_MODE (var);
3977	      else
3978		mode = TYPE_MODE (TREE_TYPE (var));
3979
3980	      val = gen_rtx_VAR_LOCATION
3981		(mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3982
3983	      emit_debug_insn (val);
3984
3985	      if (dump_file && (dump_flags & TDF_DETAILS))
3986		{
3987		  /* We can't dump the insn with a TREE where an RTX
3988		     is expected.  */
3989		  PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3990		  maybe_dump_rtl_for_gimple_stmt (stmt, last);
3991		  PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3992		}
3993
3994	    delink_debug_stmt:
3995	      /* In order not to generate too many debug temporaries,
3996	         we delink all uses of debug statements we already expanded.
3997		 Therefore debug statements between definition and real
3998		 use of TERed SSA names will continue to use the SSA name,
3999		 and not be replaced with debug temps.  */
4000	      delink_stmt_imm_use (stmt);
4001
4002	      gsi = nsi;
4003	      gsi_next (&nsi);
4004	      if (gsi_end_p (nsi))
4005		break;
4006	      stmt = gsi_stmt (nsi);
4007	      if (!gimple_debug_bind_p (stmt))
4008		break;
4009	    }
4010
4011	  set_curr_insn_source_location (sloc);
4012	  set_curr_insn_block (sblock);
4013	}
4014      else if (gimple_debug_source_bind_p (stmt))
4015	{
4016	  location_t sloc = get_curr_insn_source_location ();
4017	  tree sblock = get_curr_insn_block ();
4018	  tree var = gimple_debug_source_bind_get_var (stmt);
4019	  tree value = gimple_debug_source_bind_get_value (stmt);
4020	  rtx val;
4021	  enum machine_mode mode;
4022
4023	  last = get_last_insn ();
4024
4025	  set_curr_insn_source_location (gimple_location (stmt));
4026	  set_curr_insn_block (gimple_block (stmt));
4027
4028	  mode = DECL_MODE (var);
4029
4030	  val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
4031				      VAR_INIT_STATUS_UNINITIALIZED);
4032
4033	  emit_debug_insn (val);
4034
4035	  if (dump_file && (dump_flags & TDF_DETAILS))
4036	    {
4037	      /* We can't dump the insn with a TREE where an RTX
4038		 is expected.  */
4039	      PAT_VAR_LOCATION_LOC (val) = const0_rtx;
4040	      maybe_dump_rtl_for_gimple_stmt (stmt, last);
4041	      PAT_VAR_LOCATION_LOC (val) = (rtx)value;
4042	    }
4043
4044	  set_curr_insn_source_location (sloc);
4045	  set_curr_insn_block (sblock);
4046	}
4047      else
4048	{
4049	  if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
4050	    {
4051	      bool can_fallthru;
4052	      new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
4053	      if (new_bb)
4054		{
4055		  if (can_fallthru)
4056		    bb = new_bb;
4057		  else
4058		    return new_bb;
4059		}
4060	    }
4061	  else
4062	    {
4063	      def_operand_p def_p;
4064	      def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
4065
4066	      if (def_p != NULL)
4067		{
4068		  /* Ignore this stmt if it is in the list of
4069		     replaceable expressions.  */
4070		  if (SA.values
4071		      && bitmap_bit_p (SA.values,
4072				       SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4073		    continue;
4074		}
4075	      last = expand_gimple_stmt (stmt);
4076	      maybe_dump_rtl_for_gimple_stmt (stmt, last);
4077	    }
4078	}
4079    }
4080
4081  currently_expanding_gimple_stmt = NULL;
4082
4083  /* Expand implicit goto and convert goto_locus.  */
4084  FOR_EACH_EDGE (e, ei, bb->succs)
4085    {
4086      if (e->goto_locus && e->goto_block)
4087	{
4088	  set_curr_insn_source_location (e->goto_locus);
4089	  set_curr_insn_block (e->goto_block);
4090	  e->goto_locus = curr_insn_locator ();
4091	}
4092      e->goto_block = NULL;
4093      if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
4094	{
4095	  emit_jump (label_rtx_for_bb (e->dest));
4096	  e->flags &= ~EDGE_FALLTHRU;
4097	}
4098    }
4099
4100  /* Expanded RTL can create a jump in the last instruction of block.
4101     This later might be assumed to be a jump to successor and break edge insertion.
4102     We need to insert dummy move to prevent this. PR41440. */
4103  if (single_succ_p (bb)
4104      && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
4105      && (last = get_last_insn ())
4106      && JUMP_P (last))
4107    {
4108      rtx dummy = gen_reg_rtx (SImode);
4109      emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
4110    }
4111
4112  do_pending_stack_adjust ();
4113
4114  /* Find the block tail.  The last insn in the block is the insn
4115     before a barrier and/or table jump insn.  */
4116  last = get_last_insn ();
4117  if (BARRIER_P (last))
4118    last = PREV_INSN (last);
4119  if (JUMP_TABLE_DATA_P (last))
4120    last = PREV_INSN (PREV_INSN (last));
4121  BB_END (bb) = last;
4122
4123  update_bb_for_insn (bb);
4124
4125  return bb;
4126}
4127
4128
4129/* Create a basic block for initialization code.  */
4130
4131static basic_block
4132construct_init_block (void)
4133{
4134  basic_block init_block, first_block;
4135  edge e = NULL;
4136  int flags;
4137
4138  /* Multiple entry points not supported yet.  */
4139  gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
4140  init_rtl_bb_info (ENTRY_BLOCK_PTR);
4141  init_rtl_bb_info (EXIT_BLOCK_PTR);
4142  ENTRY_BLOCK_PTR->flags |= BB_RTL;
4143  EXIT_BLOCK_PTR->flags |= BB_RTL;
4144
4145  e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
4146
4147  /* When entry edge points to first basic block, we don't need jump,
4148     otherwise we have to jump into proper target.  */
4149  if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
4150    {
4151      tree label = gimple_block_label (e->dest);
4152
4153      emit_jump (label_rtx (label));
4154      flags = 0;
4155    }
4156  else
4157    flags = EDGE_FALLTHRU;
4158
4159  init_block = create_basic_block (NEXT_INSN (get_insns ()),
4160				   get_last_insn (),
4161				   ENTRY_BLOCK_PTR);
4162  init_block->frequency = ENTRY_BLOCK_PTR->frequency;
4163  init_block->count = ENTRY_BLOCK_PTR->count;
4164  if (e)
4165    {
4166      first_block = e->dest;
4167      redirect_edge_succ (e, init_block);
4168      e = make_edge (init_block, first_block, flags);
4169    }
4170  else
4171    e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4172  e->probability = REG_BR_PROB_BASE;
4173  e->count = ENTRY_BLOCK_PTR->count;
4174
4175  update_bb_for_insn (init_block);
4176  return init_block;
4177}
4178
4179/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
4180   found in the block tree.  */
4181
4182static void
4183set_block_levels (tree block, int level)
4184{
4185  while (block)
4186    {
4187      BLOCK_NUMBER (block) = level;
4188      set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
4189      block = BLOCK_CHAIN (block);
4190    }
4191}
4192
4193/* Create a block containing landing pads and similar stuff.  */
4194
4195static void
4196construct_exit_block (void)
4197{
4198  rtx head = get_last_insn ();
4199  rtx end;
4200  basic_block exit_block;
4201  edge e, e2;
4202  unsigned ix;
4203  edge_iterator ei;
4204  rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
4205
4206  rtl_profile_for_bb (EXIT_BLOCK_PTR);
4207
4208  /* Make sure the locus is set to the end of the function, so that
4209     epilogue line numbers and warnings are set properly.  */
4210  if (cfun->function_end_locus != UNKNOWN_LOCATION)
4211    input_location = cfun->function_end_locus;
4212
4213  /* The following insns belong to the top scope.  */
4214  set_curr_insn_block (DECL_INITIAL (current_function_decl));
4215
4216  /* Generate rtl for function exit.  */
4217  expand_function_end ();
4218
4219  end = get_last_insn ();
4220  if (head == end)
4221    return;
4222  /* While emitting the function end we could move end of the last basic block.
4223   */
4224  BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4225  while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
4226    head = NEXT_INSN (head);
4227  exit_block = create_basic_block (NEXT_INSN (head), end,
4228				   EXIT_BLOCK_PTR->prev_bb);
4229  exit_block->frequency = EXIT_BLOCK_PTR->frequency;
4230  exit_block->count = EXIT_BLOCK_PTR->count;
4231
4232  ix = 0;
4233  while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
4234    {
4235      e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
4236      if (!(e->flags & EDGE_ABNORMAL))
4237	redirect_edge_succ (e, exit_block);
4238      else
4239	ix++;
4240    }
4241
4242  e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4243  e->probability = REG_BR_PROB_BASE;
4244  e->count = EXIT_BLOCK_PTR->count;
4245  FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
4246    if (e2 != e)
4247      {
4248	e->count -= e2->count;
4249	exit_block->count -= e2->count;
4250	exit_block->frequency -= EDGE_FREQUENCY (e2);
4251      }
4252  if (e->count < 0)
4253    e->count = 0;
4254  if (exit_block->count < 0)
4255    exit_block->count = 0;
4256  if (exit_block->frequency < 0)
4257    exit_block->frequency = 0;
4258  update_bb_for_insn (exit_block);
4259}
4260
4261/* Helper function for discover_nonconstant_array_refs.
4262   Look for ARRAY_REF nodes with non-constant indexes and mark them
4263   addressable.  */
4264
4265static tree
4266discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
4267				   void *data ATTRIBUTE_UNUSED)
4268{
4269  tree t = *tp;
4270
4271  if (IS_TYPE_OR_DECL_P (t))
4272    *walk_subtrees = 0;
4273  else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4274    {
4275      while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4276	      && is_gimple_min_invariant (TREE_OPERAND (t, 1))
4277	      && (!TREE_OPERAND (t, 2)
4278		  || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4279	     || (TREE_CODE (t) == COMPONENT_REF
4280		 && (!TREE_OPERAND (t,2)
4281		     || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4282	     || TREE_CODE (t) == BIT_FIELD_REF
4283	     || TREE_CODE (t) == REALPART_EXPR
4284	     || TREE_CODE (t) == IMAGPART_EXPR
4285	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
4286	     || CONVERT_EXPR_P (t))
4287	t = TREE_OPERAND (t, 0);
4288
4289      if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4290	{
4291	  t = get_base_address (t);
4292	  if (t && DECL_P (t)
4293              && DECL_MODE (t) != BLKmode)
4294	    TREE_ADDRESSABLE (t) = 1;
4295	}
4296
4297      *walk_subtrees = 0;
4298    }
4299
4300  return NULL_TREE;
4301}
4302
4303/* RTL expansion is not able to compile array references with variable
4304   offsets for arrays stored in single register.  Discover such
4305   expressions and mark variables as addressable to avoid this
4306   scenario.  */
4307
4308static void
4309discover_nonconstant_array_refs (void)
4310{
4311  basic_block bb;
4312  gimple_stmt_iterator gsi;
4313
4314  FOR_EACH_BB (bb)
4315    for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4316      {
4317	gimple stmt = gsi_stmt (gsi);
4318	if (!is_gimple_debug (stmt))
4319	  walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
4320      }
4321}
4322
4323/* This function sets crtl->args.internal_arg_pointer to a virtual
4324   register if DRAP is needed.  Local register allocator will replace
4325   virtual_incoming_args_rtx with the virtual register.  */
4326
4327static void
4328expand_stack_alignment (void)
4329{
4330  rtx drap_rtx;
4331  unsigned int preferred_stack_boundary;
4332
4333  if (! SUPPORTS_STACK_ALIGNMENT)
4334    return;
4335
4336  if (cfun->calls_alloca
4337      || cfun->has_nonlocal_label
4338      || crtl->has_nonlocal_goto)
4339    crtl->need_drap = true;
4340
4341  /* Call update_stack_boundary here again to update incoming stack
4342     boundary.  It may set incoming stack alignment to a different
4343     value after RTL expansion.  TARGET_FUNCTION_OK_FOR_SIBCALL may
4344     use the minimum incoming stack alignment to check if it is OK
4345     to perform sibcall optimization since sibcall optimization will
4346     only align the outgoing stack to incoming stack boundary.  */
4347  if (targetm.calls.update_stack_boundary)
4348    targetm.calls.update_stack_boundary ();
4349
4350  /* The incoming stack frame has to be aligned at least at
4351     parm_stack_boundary.  */
4352  gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
4353
4354  /* Update crtl->stack_alignment_estimated and use it later to align
4355     stack.  We check PREFERRED_STACK_BOUNDARY if there may be non-call
4356     exceptions since callgraph doesn't collect incoming stack alignment
4357     in this case.  */
4358  if (cfun->can_throw_non_call_exceptions
4359      && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
4360    preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4361  else
4362    preferred_stack_boundary = crtl->preferred_stack_boundary;
4363  if (preferred_stack_boundary > crtl->stack_alignment_estimated)
4364    crtl->stack_alignment_estimated = preferred_stack_boundary;
4365  if (preferred_stack_boundary > crtl->stack_alignment_needed)
4366    crtl->stack_alignment_needed = preferred_stack_boundary;
4367
4368  gcc_assert (crtl->stack_alignment_needed
4369	      <= crtl->stack_alignment_estimated);
4370
4371  crtl->stack_realign_needed
4372    = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
4373  crtl->stack_realign_tried = crtl->stack_realign_needed;
4374
4375  crtl->stack_realign_processed = true;
4376
4377  /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4378     alignment.  */
4379  gcc_assert (targetm.calls.get_drap_rtx != NULL);
4380  drap_rtx = targetm.calls.get_drap_rtx ();
4381
4382  /* stack_realign_drap and drap_rtx must match.  */
4383  gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
4384
4385  /* Do nothing if NULL is returned, which means DRAP is not needed.  */
4386  if (NULL != drap_rtx)
4387    {
4388      crtl->args.internal_arg_pointer = drap_rtx;
4389
4390      /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4391         needed. */
4392      fixup_tail_calls ();
4393    }
4394}
4395
4396/* Translate the intermediate representation contained in the CFG
4397   from GIMPLE trees to RTL.
4398
4399   We do conversion per basic block and preserve/update the tree CFG.
4400   This implies we have to do some magic as the CFG can simultaneously
4401   consist of basic blocks containing RTL and GIMPLE trees.  This can
4402   confuse the CFG hooks, so be careful to not manipulate CFG during
4403   the expansion.  */
4404
4405static unsigned int
4406gimple_expand_cfg (void)
4407{
4408  basic_block bb, init_block;
4409  sbitmap blocks;
4410  edge_iterator ei;
4411  edge e;
4412  rtx var_seq;
4413  unsigned i;
4414
4415  timevar_push (TV_OUT_OF_SSA);
4416  rewrite_out_of_ssa (&SA);
4417  timevar_pop (TV_OUT_OF_SSA);
4418  SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
4419					   sizeof (rtx));
4420
4421  /* Some backends want to know that we are expanding to RTL.  */
4422  currently_expanding_to_rtl = 1;
4423
4424  rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4425
4426  insn_locators_alloc ();
4427  if (!DECL_IS_BUILTIN (current_function_decl))
4428    {
4429      /* Eventually, all FEs should explicitly set function_start_locus.  */
4430      if (cfun->function_start_locus == UNKNOWN_LOCATION)
4431       set_curr_insn_source_location
4432         (DECL_SOURCE_LOCATION (current_function_decl));
4433      else
4434       set_curr_insn_source_location (cfun->function_start_locus);
4435    }
4436  else
4437    set_curr_insn_source_location (UNKNOWN_LOCATION);
4438  set_curr_insn_block (DECL_INITIAL (current_function_decl));
4439  prologue_locator = curr_insn_locator ();
4440
4441#ifdef INSN_SCHEDULING
4442  init_sched_attrs ();
4443#endif
4444
4445  /* Make sure first insn is a note even if we don't want linenums.
4446     This makes sure the first insn will never be deleted.
4447     Also, final expects a note to appear there.  */
4448  emit_note (NOTE_INSN_DELETED);
4449
4450  /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE.  */
4451  discover_nonconstant_array_refs ();
4452
4453  targetm.expand_to_rtl_hook ();
4454  crtl->stack_alignment_needed = STACK_BOUNDARY;
4455  crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
4456  crtl->stack_alignment_estimated = 0;
4457  crtl->preferred_stack_boundary = STACK_BOUNDARY;
4458  cfun->cfg->max_jumptable_ents = 0;
4459
4460  /* Resovle the function section.  Some targets, like ARM EABI rely on knowledge
4461     of the function section at exapnsion time to predict distance of calls.  */
4462  resolve_unique_section (current_function_decl, 0, flag_function_sections);
4463
4464  /* Expand the variables recorded during gimple lowering.  */
4465  timevar_push (TV_VAR_EXPAND);
4466  start_sequence ();
4467
4468  expand_used_vars ();
4469
4470  var_seq = get_insns ();
4471  end_sequence ();
4472  timevar_pop (TV_VAR_EXPAND);
4473
4474  /* Honor stack protection warnings.  */
4475  if (warn_stack_protect)
4476    {
4477      if (cfun->calls_alloca)
4478	warning (OPT_Wstack_protector,
4479		 "stack protector not protecting local variables: "
4480                 "variable length buffer");
4481      if (has_short_buffer && !crtl->stack_protect_guard)
4482	warning (OPT_Wstack_protector,
4483		 "stack protector not protecting function: "
4484                 "all local arrays are less than %d bytes long",
4485		 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4486    }
4487
4488  /* Set up parameters and prepare for return, for the function.  */
4489  expand_function_start (current_function_decl);
4490
4491  /* If we emitted any instructions for setting up the variables,
4492     emit them before the FUNCTION_START note.  */
4493  if (var_seq)
4494    {
4495      emit_insn_before (var_seq, parm_birth_insn);
4496
4497      /* In expand_function_end we'll insert the alloca save/restore
4498	 before parm_birth_insn.  We've just insertted an alloca call.
4499	 Adjust the pointer to match.  */
4500      parm_birth_insn = var_seq;
4501    }
4502
4503  /* Now that we also have the parameter RTXs, copy them over to our
4504     partitions.  */
4505  for (i = 0; i < SA.map->num_partitions; i++)
4506    {
4507      tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4508
4509      if (TREE_CODE (var) != VAR_DECL
4510	  && !SA.partition_to_pseudo[i])
4511	SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4512      gcc_assert (SA.partition_to_pseudo[i]);
4513
4514      /* If this decl was marked as living in multiple places, reset
4515         this now to NULL.  */
4516      if (DECL_RTL_IF_SET (var) == pc_rtx)
4517	SET_DECL_RTL (var, NULL);
4518
4519      /* Some RTL parts really want to look at DECL_RTL(x) when x
4520         was a decl marked in REG_ATTR or MEM_ATTR.  We could use
4521	 SET_DECL_RTL here making this available, but that would mean
4522	 to select one of the potentially many RTLs for one DECL.  Instead
4523	 of doing that we simply reset the MEM_EXPR of the RTL in question,
4524	 then nobody can get at it and hence nobody can call DECL_RTL on it.  */
4525      if (!DECL_RTL_SET_P (var))
4526	{
4527	  if (MEM_P (SA.partition_to_pseudo[i]))
4528	    set_mem_expr (SA.partition_to_pseudo[i], NULL);
4529	}
4530    }
4531
4532  /* If we have a class containing differently aligned pointers
4533     we need to merge those into the corresponding RTL pointer
4534     alignment.  */
4535  for (i = 1; i < num_ssa_names; i++)
4536    {
4537      tree name = ssa_name (i);
4538      int part;
4539      rtx r;
4540
4541      if (!name
4542	  || !POINTER_TYPE_P (TREE_TYPE (name))
4543	  /* We might have generated new SSA names in
4544	     update_alias_info_with_stack_vars.  They will have a NULL
4545	     defining statements, and won't be part of the partitioning,
4546	     so ignore those.  */
4547	  || !SSA_NAME_DEF_STMT (name))
4548	continue;
4549      part = var_to_partition (SA.map, name);
4550      if (part == NO_PARTITION)
4551	continue;
4552      r = SA.partition_to_pseudo[part];
4553      if (REG_P (r))
4554	mark_reg_pointer (r, get_pointer_alignment (name));
4555    }
4556
4557  /* If this function is `main', emit a call to `__main'
4558     to run global initializers, etc.  */
4559  if (DECL_NAME (current_function_decl)
4560      && MAIN_NAME_P (DECL_NAME (current_function_decl))
4561      && DECL_FILE_SCOPE_P (current_function_decl))
4562    expand_main_function ();
4563
4564  /* Initialize the stack_protect_guard field.  This must happen after the
4565     call to __main (if any) so that the external decl is initialized.  */
4566  if (crtl->stack_protect_guard)
4567    stack_protect_prologue ();
4568
4569  expand_phi_nodes (&SA);
4570
4571  /* Register rtl specific functions for cfg.  */
4572  rtl_register_cfg_hooks ();
4573
4574  init_block = construct_init_block ();
4575
4576  /* Clear EDGE_EXECUTABLE on the entry edge(s).  It is cleaned from the
4577     remaining edges later.  */
4578  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4579    e->flags &= ~EDGE_EXECUTABLE;
4580
4581  lab_rtx_for_bb = pointer_map_create ();
4582  FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4583    bb = expand_gimple_basic_block (bb);
4584
4585  if (MAY_HAVE_DEBUG_INSNS)
4586    expand_debug_locations ();
4587
4588  execute_free_datastructures ();
4589  timevar_push (TV_OUT_OF_SSA);
4590  finish_out_of_ssa (&SA);
4591  timevar_pop (TV_OUT_OF_SSA);
4592
4593  timevar_push (TV_POST_EXPAND);
4594  /* We are no longer in SSA form.  */
4595  cfun->gimple_df->in_ssa_p = false;
4596
4597  /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4598     conservatively to true until they are all profile aware.  */
4599  pointer_map_destroy (lab_rtx_for_bb);
4600  free_histograms ();
4601
4602  construct_exit_block ();
4603  set_curr_insn_block (DECL_INITIAL (current_function_decl));
4604  insn_locators_finalize ();
4605
4606  /* Zap the tree EH table.  */
4607  set_eh_throw_stmt_table (cfun, NULL);
4608
4609  /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4610     split edges which edge insertions might do.  */
4611  rebuild_jump_labels (get_insns ());
4612
4613  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4614    {
4615      edge e;
4616      edge_iterator ei;
4617      for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4618	{
4619	  if (e->insns.r)
4620	    {
4621	      rebuild_jump_labels_chain (e->insns.r);
4622	      /* Avoid putting insns before parm_birth_insn.  */
4623	      if (e->src == ENTRY_BLOCK_PTR
4624		  && single_succ_p (ENTRY_BLOCK_PTR)
4625		  && parm_birth_insn)
4626		{
4627		  rtx insns = e->insns.r;
4628		  e->insns.r = NULL_RTX;
4629		  emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4630		}
4631	      else
4632		commit_one_edge_insertion (e);
4633	    }
4634	  else
4635	    ei_next (&ei);
4636	}
4637    }
4638
4639  /* We're done expanding trees to RTL.  */
4640  currently_expanding_to_rtl = 0;
4641
4642  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4643    {
4644      edge e;
4645      edge_iterator ei;
4646      for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4647	{
4648	  /* Clear EDGE_EXECUTABLE.  This flag is never used in the backend.  */
4649	  e->flags &= ~EDGE_EXECUTABLE;
4650
4651	  /* At the moment not all abnormal edges match the RTL
4652	     representation.  It is safe to remove them here as
4653	     find_many_sub_basic_blocks will rediscover them.
4654	     In the future we should get this fixed properly.  */
4655	  if ((e->flags & EDGE_ABNORMAL)
4656	      && !(e->flags & EDGE_SIBCALL))
4657	    remove_edge (e);
4658	  else
4659	    ei_next (&ei);
4660	}
4661    }
4662
4663  blocks = sbitmap_alloc (last_basic_block);
4664  sbitmap_ones (blocks);
4665  find_many_sub_basic_blocks (blocks);
4666  sbitmap_free (blocks);
4667  purge_all_dead_edges ();
4668
4669  compact_blocks ();
4670
4671  expand_stack_alignment ();
4672
4673#ifdef ENABLE_CHECKING
4674  verify_flow_info ();
4675#endif
4676
4677  /* There's no need to defer outputting this function any more; we
4678     know we want to output it.  */
4679  DECL_DEFER_OUTPUT (current_function_decl) = 0;
4680
4681  /* Now that we're done expanding trees to RTL, we shouldn't have any
4682     more CONCATs anywhere.  */
4683  generating_concat_p = 0;
4684
4685  if (dump_file)
4686    {
4687      fprintf (dump_file,
4688	       "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4689      /* And the pass manager will dump RTL for us.  */
4690    }
4691
4692  /* If we're emitting a nested function, make sure its parent gets
4693     emitted as well.  Doing otherwise confuses debug info.  */
4694  {
4695    tree parent;
4696    for (parent = DECL_CONTEXT (current_function_decl);
4697	 parent != NULL_TREE;
4698	 parent = get_containing_scope (parent))
4699      if (TREE_CODE (parent) == FUNCTION_DECL)
4700	TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4701  }
4702
4703  /* We are now committed to emitting code for this function.  Do any
4704     preparation, such as emitting abstract debug info for the inline
4705     before it gets mangled by optimization.  */
4706  if (cgraph_function_possibly_inlined_p (current_function_decl))
4707    (*debug_hooks->outlining_inline_function) (current_function_decl);
4708
4709  TREE_ASM_WRITTEN (current_function_decl) = 1;
4710
4711  /* After expanding, the return labels are no longer needed. */
4712  return_label = NULL;
4713  naked_return_label = NULL;
4714
4715  /* After expanding, the tm_restart map is no longer needed.  */
4716  if (cfun->gimple_df->tm_restart)
4717    {
4718      htab_delete (cfun->gimple_df->tm_restart);
4719      cfun->gimple_df->tm_restart = NULL;
4720    }
4721
4722  /* Tag the blocks with a depth number so that change_scope can find
4723     the common parent easily.  */
4724  set_block_levels (DECL_INITIAL (cfun->decl), 0);
4725  default_rtl_profile ();
4726  timevar_pop (TV_POST_EXPAND);
4727  return 0;
4728}
4729
4730struct rtl_opt_pass pass_expand =
4731{
4732 {
4733  RTL_PASS,
4734  "expand",				/* name */
4735  NULL,                                 /* gate */
4736  gimple_expand_cfg,			/* execute */
4737  NULL,                                 /* sub */
4738  NULL,                                 /* next */
4739  0,                                    /* static_pass_number */
4740  TV_EXPAND,				/* tv_id */
4741  PROP_ssa | PROP_gimple_leh | PROP_cfg
4742    | PROP_gimple_lcx,			/* properties_required */
4743  PROP_rtl,                             /* properties_provided */
4744  PROP_ssa | PROP_trees,		/* properties_destroyed */
4745  TODO_verify_ssa | TODO_verify_flow
4746    | TODO_verify_stmts,		/* todo_flags_start */
4747  TODO_ggc_collect			/* todo_flags_finish */
4748 }
4749};
4750