source: trunk/third/gcc/integrate.c @ 11288

Revision 11288, 109.6 KB checked in by ghudson, 26 years ago (diff)
This commit was generated by cvs2svn to compensate for changes in r11287, which included commits to RCS files with non-trunk default branches.
Line 
1/* Procedure integration for GNU CC.
2   Copyright (C) 1988, 91, 93-97, 1998 Free Software Foundation, Inc.
3   Contributed by Michael Tiemann (tiemann@cygnus.com)
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING.  If not, write to
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA.  */
21
22
23#include "config.h"
24#include <stdio.h>
25#include "rtl.h"
26#include "tree.h"
27#include "regs.h"
28#include "flags.h"
29#include "insn-config.h"
30#include "insn-flags.h"
31#include "expr.h"
32#include "output.h"
33#include "recog.h"
34#include "integrate.h"
35#include "real.h"
36#include "except.h"
37#include "function.h"
38#include "bytecode.h"
39
40#include "obstack.h"
41#define obstack_chunk_alloc     xmalloc
42#define obstack_chunk_free      free
43
44extern struct obstack *function_maybepermanent_obstack;
45
46extern tree pushdecl ();
47extern tree poplevel ();
48
49/* Similar, but round to the next highest integer that meets the
50   alignment.  */
51#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
52
53/* Default max number of insns a function can have and still be inline.
54   This is overridden on RISC machines.  */
55#ifndef INTEGRATE_THRESHOLD
56#define INTEGRATE_THRESHOLD(DECL) \
57  (8 * (8 + list_length (DECL_ARGUMENTS (DECL))))
58#endif
59
60static rtx initialize_for_inline PROTO((tree, int, int, int, int));
61static void finish_inline       PROTO((tree, rtx));
62static void adjust_copied_decl_tree PROTO((tree));
63static tree copy_decl_list      PROTO((tree));
64static tree copy_decl_tree      PROTO((tree));
65static void copy_decl_rtls      PROTO((tree));
66static void save_constants      PROTO((rtx *));
67static void note_modified_parmregs PROTO((rtx, rtx));
68static rtx copy_for_inline      PROTO((rtx));
69static void integrate_parm_decls PROTO((tree, struct inline_remap *, rtvec));
70static void integrate_decl_tree PROTO((tree, int, struct inline_remap *));
71static void save_constants_in_decl_trees PROTO ((tree));
72static void subst_constants     PROTO((rtx *, rtx, struct inline_remap *));
73static void restore_constants   PROTO((rtx *));
74static void set_block_origin_self PROTO((tree));
75static void set_decl_origin_self PROTO((tree));
76static void set_block_abstract_flags PROTO((tree, int));
77
78void set_decl_abstract_flags    PROTO((tree, int));
79
80/* Returns the Ith entry in the label_map contained in MAP.  If the
81   Ith entry has not yet been set, return a fresh label.  This function
82   performs a lazy initialization of label_map, thereby avoiding huge memory
83   explosions when the label_map gets very large.  */
84
85rtx
86get_label_from_map (map, i)
87     struct inline_remap *map;
88     int i;
89{
90  rtx x = map->label_map[i];
91
92  if (x == NULL_RTX)
93    x = map->label_map[i] = gen_label_rtx();
94
95  return x;
96}
97
98/* Zero if the current function (whose FUNCTION_DECL is FNDECL)
99   is safe and reasonable to integrate into other functions.
100   Nonzero means value is a warning message with a single %s
101   for the function's name.  */
102
103char *
104function_cannot_inline_p (fndecl)
105     register tree fndecl;
106{
107  register rtx insn;
108  tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
109  int max_insns = INTEGRATE_THRESHOLD (fndecl);
110  register int ninsns = 0;
111  register tree parms;
112  rtx result;
113
114  /* No inlines with varargs.  */
115  if ((last && TREE_VALUE (last) != void_type_node)
116      || current_function_varargs)
117    return "varargs function cannot be inline";
118
119  if (current_function_calls_alloca)
120    return "function using alloca cannot be inline";
121
122  if (current_function_contains_functions)
123    return "function with nested functions cannot be inline";
124
125  /* If its not even close, don't even look.  */
126  if (!DECL_INLINE (fndecl) && get_max_uid () > 3 * max_insns)
127    return "function too large to be inline";
128
129#if 0
130  /* Don't inline functions which do not specify a function prototype and
131     have BLKmode argument or take the address of a parameter.  */
132  for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
133    {
134      if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
135        TREE_ADDRESSABLE (parms) = 1;
136      if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
137        return "no prototype, and parameter address used; cannot be inline";
138    }
139#endif
140
141  /* We can't inline functions that return structures
142     the old-fashioned PCC way, copying into a static block.  */
143  if (current_function_returns_pcc_struct)
144    return "inline functions not supported for this return value type";
145
146  /* We can't inline functions that return BLKmode structures in registers.  */
147  if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
148      && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
149    return "inline functions not supported for this return value type";
150
151  /* We can't inline functions that return structures of varying size.  */
152  if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
153    return "function with varying-size return value cannot be inline";
154
155  /* Cannot inline a function with a varying size argument or one that
156     receives a transparent union.  */
157  for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
158    {
159      if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
160        return "function with varying-size parameter cannot be inline";
161      else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
162        return "function with transparent unit parameter cannot be inline";
163    }
164
165  if (!DECL_INLINE (fndecl) && get_max_uid () > max_insns)
166    {
167      for (ninsns = 0, insn = get_first_nonparm_insn ();
168           insn && ninsns < max_insns;
169           insn = NEXT_INSN (insn))
170        if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
171          ninsns++;
172
173      if (ninsns >= max_insns)
174        return "function too large to be inline";
175    }
176
177  /* We cannot inline this function if forced_labels is non-zero.  This
178     implies that a label in this function was used as an initializer.
179     Because labels can not be duplicated, all labels in the function
180     will be renamed when it is inlined.  However, there is no way to find
181     and fix all variables initialized with addresses of labels in this
182     function, hence inlining is impossible.  */
183
184  if (forced_labels)
185    return "function with label addresses used in initializers cannot inline";
186
187  /* We cannot inline a nested function that jumps to a nonlocal label.  */
188  if (current_function_has_nonlocal_goto)
189    return "function with nonlocal goto cannot be inline";
190
191  /* This is a hack, until the inliner is taught about eh regions at
192     the start of the function.  */
193  for (insn = get_insns ();
194       insn
195         && ! (GET_CODE (insn) == NOTE
196               && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG);
197       insn = NEXT_INSN (insn))
198    {
199      if (insn && GET_CODE (insn) == NOTE
200          && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
201        return "function with complex parameters cannot be inline";
202    }
203
204  /* We can't inline functions that return a PARALLEL rtx.  */
205  result = DECL_RTL (DECL_RESULT (fndecl));
206  if (result && GET_CODE (result) == PARALLEL)
207    return "inline functions not supported for this return value type";
208
209  return 0;
210}
211
212/* Variables used within save_for_inline.  */
213
214/* Mapping from old pseudo-register to new pseudo-registers.
215   The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
216   It is allocated in `save_for_inline' and `expand_inline_function',
217   and deallocated on exit from each of those routines.  */
218static rtx *reg_map;
219
220/* Mapping from old code-labels to new code-labels.
221   The first element of this map is label_map[min_labelno].
222   It is allocated in `save_for_inline' and `expand_inline_function',
223   and deallocated on exit from each of those routines.  */
224static rtx *label_map;
225
226/* Mapping from old insn uid's to copied insns.
227   It is allocated in `save_for_inline' and `expand_inline_function',
228   and deallocated on exit from each of those routines.  */
229static rtx *insn_map;
230
231/* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
232   Zero for a reg that isn't a parm's home.
233   Only reg numbers less than max_parm_reg are mapped here.  */
234static tree *parmdecl_map;
235
236/* Keep track of first pseudo-register beyond those that are parms.  */
237extern int max_parm_reg;
238extern rtx *parm_reg_stack_loc;
239
240/* When an insn is being copied by copy_for_inline,
241   this is nonzero if we have copied an ASM_OPERANDS.
242   In that case, it is the original input-operand vector.  */
243static rtvec orig_asm_operands_vector;
244
245/* When an insn is being copied by copy_for_inline,
246   this is nonzero if we have copied an ASM_OPERANDS.
247   In that case, it is the copied input-operand vector.  */
248static rtvec copy_asm_operands_vector;
249
250/* Likewise, this is the copied constraints vector.  */
251static rtvec copy_asm_constraints_vector;
252
253/* In save_for_inline, nonzero if past the parm-initialization insns.  */
254static int in_nonparm_insns;
255
256/* Subroutine for `save_for_inline{copying,nocopy}'.  Performs initialization
257   needed to save FNDECL's insns and info for future inline expansion.  */
258   
259static rtx
260initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, copy)
261     tree fndecl;
262     int min_labelno;
263     int max_labelno;
264     int max_reg;
265     int copy;
266{
267  int function_flags, i;
268  rtvec arg_vector;
269  tree parms;
270
271  /* Compute the values of any flags we must restore when inlining this.  */
272
273  function_flags
274    = (current_function_calls_alloca * FUNCTION_FLAGS_CALLS_ALLOCA
275       + current_function_calls_setjmp * FUNCTION_FLAGS_CALLS_SETJMP
276       + current_function_calls_longjmp * FUNCTION_FLAGS_CALLS_LONGJMP
277       + current_function_returns_struct * FUNCTION_FLAGS_RETURNS_STRUCT
278       + current_function_returns_pcc_struct * FUNCTION_FLAGS_RETURNS_PCC_STRUCT
279       + current_function_needs_context * FUNCTION_FLAGS_NEEDS_CONTEXT
280       + current_function_has_nonlocal_label * FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
281       + current_function_returns_pointer * FUNCTION_FLAGS_RETURNS_POINTER
282       + current_function_uses_const_pool * FUNCTION_FLAGS_USES_CONST_POOL
283       + current_function_uses_pic_offset_table * FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE);
284
285  /* Clear out PARMDECL_MAP.  It was allocated in the caller's frame.  */
286  bzero ((char *) parmdecl_map, max_parm_reg * sizeof (tree));
287  arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
288
289  for (parms = DECL_ARGUMENTS (fndecl), i = 0;
290       parms;
291       parms = TREE_CHAIN (parms), i++)
292    {
293      rtx p = DECL_RTL (parms);
294      int copied_incoming = 0;
295
296      /* If we have (mem (addressof (mem ...))), use the inner MEM since
297         otherwise the copy_rtx call below will not unshare the MEM since
298         it shares ADDRESSOF.  */
299      if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
300          && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
301        p = XEXP (XEXP (p, 0), 0);
302
303      if (GET_CODE (p) == MEM && copy)
304        {
305          /* Copy the rtl so that modifications of the addresses
306             later in compilation won't affect this arg_vector.
307             Virtual register instantiation can screw the address
308             of the rtl.  */
309          rtx new = copy_rtx (p);
310
311          /* Don't leave the old copy anywhere in this decl.  */
312          if (DECL_RTL (parms) == DECL_INCOMING_RTL (parms)
313              || (GET_CODE (DECL_RTL (parms)) == MEM
314                  && GET_CODE (DECL_INCOMING_RTL (parms)) == MEM
315                  && (XEXP (DECL_RTL (parms), 0)
316                      == XEXP (DECL_INCOMING_RTL (parms), 0))))
317            DECL_INCOMING_RTL (parms) = new, copied_incoming = 1;
318
319          DECL_RTL (parms) = new;
320        }
321
322      RTVEC_ELT (arg_vector, i) = p;
323
324      if (GET_CODE (p) == REG)
325        parmdecl_map[REGNO (p)] = parms;
326      else if (GET_CODE (p) == CONCAT)
327        {
328          rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
329          rtx pimag = gen_imagpart (GET_MODE (preal), p);
330
331          if (GET_CODE (preal) == REG)
332            parmdecl_map[REGNO (preal)] = parms;
333          if (GET_CODE (pimag) == REG)
334            parmdecl_map[REGNO (pimag)] = parms;
335        }
336
337      /* This flag is cleared later
338         if the function ever modifies the value of the parm.  */
339      TREE_READONLY (parms) = 1;
340
341      /* Copy DECL_INCOMING_RTL if not done already.  This can
342         happen if DECL_RTL is a reg.  */
343      if (copy && ! copied_incoming)
344        {
345          p = DECL_INCOMING_RTL (parms);
346
347          /* If we have (mem (addressof (mem ...))), use the inner MEM since
348             otherwise the copy_rtx call below will not unshare the MEM since
349             it shares ADDRESSOF.  */
350          if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
351              && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
352            p = XEXP (XEXP (p, 0), 0);
353
354          if (GET_CODE (p) == MEM)
355            DECL_INCOMING_RTL (parms) = copy_rtx (p);
356        }
357    }
358
359  /* Assume we start out in the insns that set up the parameters.  */
360  in_nonparm_insns = 0;
361
362  /* The list of DECL_SAVED_INSNS, starts off with a header which
363     contains the following information:
364
365     the first insn of the function (not including the insns that copy
366     parameters into registers).
367     the first parameter insn of the function,
368     the first label used by that function,
369     the last label used by that function,
370     the highest register number used for parameters,
371     the total number of registers used,
372     the size of the incoming stack area for parameters,
373     the number of bytes popped on return,
374     the stack slot list,
375     the labels that are forced to exist,
376     some flags that are used to restore compiler globals,
377     the value of current_function_outgoing_args_size,
378     the original argument vector,
379     the original DECL_INITIAL,
380     and pointers to the table of pseudo regs, pointer flags, and alignment. */
381
382  return gen_inline_header_rtx (NULL_RTX, NULL_RTX, min_labelno, max_labelno,
383                                max_parm_reg, max_reg,
384                                current_function_args_size,
385                                current_function_pops_args,
386                                stack_slot_list, forced_labels, function_flags,
387                                current_function_outgoing_args_size,
388                                arg_vector, (rtx) DECL_INITIAL (fndecl),
389                                (rtvec) regno_reg_rtx, regno_pointer_flag,
390                                regno_pointer_align,
391                                (rtvec) parm_reg_stack_loc);
392}
393
394/* Subroutine for `save_for_inline{copying,nocopy}'.  Finishes up the
395   things that must be done to make FNDECL expandable as an inline function.
396   HEAD contains the chain of insns to which FNDECL will expand.  */
397   
398static void
399finish_inline (fndecl, head)
400     tree fndecl;
401     rtx head;
402{
403  FIRST_FUNCTION_INSN (head) = get_first_nonparm_insn ();
404  FIRST_PARM_INSN (head) = get_insns ();
405  DECL_SAVED_INSNS (fndecl) = head;
406  DECL_FRAME_SIZE (fndecl) = get_frame_size ();
407}
408
409/* Adjust the BLOCK_END_NOTE pointers in a given copied DECL tree so that
410   they all point to the new (copied) rtxs.  */
411
412static void
413adjust_copied_decl_tree (block)
414     register tree block;
415{
416  register tree subblock;
417  register rtx original_end;
418
419  original_end = BLOCK_END_NOTE (block);
420  if (original_end)
421    {
422      BLOCK_END_NOTE (block) = (rtx) NOTE_SOURCE_FILE (original_end);
423      NOTE_SOURCE_FILE (original_end) = 0;
424    }
425
426  /* Process all subblocks.  */
427  for (subblock = BLOCK_SUBBLOCKS (block);
428       subblock;
429       subblock = TREE_CHAIN (subblock))
430    adjust_copied_decl_tree (subblock);
431}
432
433/* Make the insns and PARM_DECLs of the current function permanent
434   and record other information in DECL_SAVED_INSNS to allow inlining
435   of this function in subsequent calls.
436
437   This function is called when we are going to immediately compile
438   the insns for FNDECL.  The insns in maybepermanent_obstack cannot be
439   modified by the compilation process, so we copy all of them to
440   new storage and consider the new insns to be the insn chain to be
441   compiled.  Our caller (rest_of_compilation) saves the original
442   DECL_INITIAL and DECL_ARGUMENTS; here we copy them.  */
443
444/* ??? The nonlocal_label list should be adjusted also.  However, since
445   a function that contains a nested function never gets inlined currently,
446   the nonlocal_label list will always be empty, so we don't worry about
447   it for now.  */
448
449void
450save_for_inline_copying (fndecl)
451     tree fndecl;
452{
453  rtx first_insn, last_insn, insn;
454  rtx head, copy;
455  int max_labelno, min_labelno, i, len;
456  int max_reg;
457  int max_uid;
458  rtx first_nonparm_insn;
459  char *new, *new1;
460  rtx *new_parm_reg_stack_loc;
461
462  /* Make and emit a return-label if we have not already done so.
463     Do this before recording the bounds on label numbers.  */
464
465  if (return_label == 0)
466    {
467      return_label = gen_label_rtx ();
468      emit_label (return_label);
469    }
470
471  /* Get some bounds on the labels and registers used.  */
472
473  max_labelno = max_label_num ();
474  min_labelno = get_first_label_num ();
475  max_reg = max_reg_num ();
476
477  /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
478     Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
479     Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
480     for the parms, prior to elimination of virtual registers.
481     These values are needed for substituting parms properly.  */
482
483  parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
484
485  head = initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, 1);
486
487  if (current_function_uses_const_pool)
488    {
489      /* Replace any constant pool references with the actual constant.  We
490         will put the constants back in the copy made below.  */
491      for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
492        if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
493          {
494            save_constants (&PATTERN (insn));
495            if (REG_NOTES (insn))
496              save_constants (&REG_NOTES (insn));
497          }
498
499      /* Also scan all decls, and replace any constant pool references with the
500         actual constant.  */
501      save_constants_in_decl_trees (DECL_INITIAL (fndecl));
502
503      /* Clear out the constant pool so that we can recreate it with the
504         copied constants below.  */
505      init_const_rtx_hash_table ();
506      clear_const_double_mem ();
507    }
508
509  max_uid = INSN_UID (head);
510
511  /* We have now allocated all that needs to be allocated permanently
512     on the rtx obstack.  Set our high-water mark, so that we
513     can free the rest of this when the time comes.  */
514
515  preserve_data ();
516
517  /* Copy the chain insns of this function.
518     Install the copied chain as the insns of this function,
519     for continued compilation;
520     the original chain is recorded as the DECL_SAVED_INSNS
521     for inlining future calls.  */
522
523  /* If there are insns that copy parms from the stack into pseudo registers,
524     those insns are not copied.  `expand_inline_function' must
525     emit the correct code to handle such things.  */
526
527  insn = get_insns ();
528  if (GET_CODE (insn) != NOTE)
529    abort ();
530  first_insn = rtx_alloc (NOTE);
531  NOTE_SOURCE_FILE (first_insn) = NOTE_SOURCE_FILE (insn);
532  NOTE_LINE_NUMBER (first_insn) = NOTE_LINE_NUMBER (insn);
533  INSN_UID (first_insn) = INSN_UID (insn);
534  PREV_INSN (first_insn) = NULL;
535  NEXT_INSN (first_insn) = NULL;
536  last_insn = first_insn;
537
538  /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
539     Make these new rtx's now, and install them in regno_reg_rtx, so they
540     will be the official pseudo-reg rtx's for the rest of compilation.  */
541
542  reg_map = (rtx *) savealloc (regno_pointer_flag_length * sizeof (rtx));
543
544  len = sizeof (struct rtx_def) + (GET_RTX_LENGTH (REG) - 1) * sizeof (rtunion);
545  for (i = max_reg - 1; i > LAST_VIRTUAL_REGISTER; i--)
546    reg_map[i] = (rtx)obstack_copy (function_maybepermanent_obstack,
547                                    regno_reg_rtx[i], len);
548
549  regno_reg_rtx = reg_map;
550
551  /* Put copies of all the virtual register rtx into the new regno_reg_rtx.  */
552  regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
553  regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
554  regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
555  regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
556
557  /* Likewise each label rtx must have a unique rtx as its copy.  */
558
559  /* We used to use alloca here, but the size of what it would try to
560     allocate would occasionally cause it to exceed the stack limit and
561     cause unpredictable core dumps.  Some examples were > 2Mb in size.  */
562  label_map = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
563
564  for (i = min_labelno; i < max_labelno; i++)
565    label_map[i] = gen_label_rtx ();
566
567  /* Likewise for parm_reg_stack_slot.  */
568  new_parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
569  for (i = 0; i < max_parm_reg; i++)
570    new_parm_reg_stack_loc[i] = copy_for_inline (parm_reg_stack_loc[i]);
571
572  parm_reg_stack_loc = new_parm_reg_stack_loc;
573
574  /* Record the mapping of old insns to copied insns.  */
575
576  insn_map = (rtx *) alloca (max_uid * sizeof (rtx));
577  bzero ((char *) insn_map, max_uid * sizeof (rtx));
578
579  /* Get the insn which signals the end of parameter setup code.  */
580  first_nonparm_insn = get_first_nonparm_insn ();
581
582  /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
583     (the former occurs when a variable has its address taken)
584     since these may be shared and can be changed by virtual
585     register instantiation.  DECL_RTL values for our arguments
586     have already been copied by initialize_for_inline.  */
587  for (i = LAST_VIRTUAL_REGISTER + 1; i < max_reg; i++)
588    if (GET_CODE (regno_reg_rtx[i]) == MEM)
589      XEXP (regno_reg_rtx[i], 0)
590        = copy_for_inline (XEXP (regno_reg_rtx[i], 0));
591
592  /* Copy the tree of subblocks of the function, and the decls in them.
593     We will use the copy for compiling this function, then restore the original
594     subblocks and decls for use when inlining this function.
595
596     Several parts of the compiler modify BLOCK trees.  In particular,
597     instantiate_virtual_regs will instantiate any virtual regs
598     mentioned in the DECL_RTLs of the decls, and loop
599     unrolling will replicate any BLOCK trees inside an unrolled loop.
600
601     The modified subblocks or DECL_RTLs would be incorrect for the original rtl
602     which we will use for inlining.  The rtl might even contain pseudoregs
603     whose space has been freed.  */
604
605  DECL_INITIAL (fndecl) = copy_decl_tree (DECL_INITIAL (fndecl));
606  DECL_ARGUMENTS (fndecl) = copy_decl_list (DECL_ARGUMENTS (fndecl));
607
608  /* Now copy each DECL_RTL which is a MEM,
609     so it is safe to modify their addresses.  */
610  copy_decl_rtls (DECL_INITIAL (fndecl));
611
612  /* The fndecl node acts as its own progenitor, so mark it as such.  */
613  DECL_ABSTRACT_ORIGIN (fndecl) = fndecl;
614
615  /* Now copy the chain of insns.  Do this twice.  The first copy the insn
616     itself and its body.  The second time copy of REG_NOTES.  This is because
617     a REG_NOTE may have a forward pointer to another insn.  */
618
619  for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
620    {
621      orig_asm_operands_vector = 0;
622
623      if (insn == first_nonparm_insn)
624        in_nonparm_insns = 1;
625
626      switch (GET_CODE (insn))
627        {
628        case NOTE:
629          /* No need to keep these.  */
630          if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
631            continue;
632
633          copy = rtx_alloc (NOTE);
634          NOTE_LINE_NUMBER (copy) = NOTE_LINE_NUMBER (insn);
635          if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END)
636            NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
637          else
638            {
639              NOTE_SOURCE_FILE (insn) = (char *) copy;
640              NOTE_SOURCE_FILE (copy) = 0;
641            }
642          if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
643              || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END)
644            {
645              /* We have to forward these both to match the new exception
646                 region.  */
647              NOTE_BLOCK_NUMBER (copy)
648                = CODE_LABEL_NUMBER (label_map[NOTE_BLOCK_NUMBER (copy)]);
649             
650            }
651          RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
652          break;
653
654        case INSN:
655        case JUMP_INSN:
656        case CALL_INSN:
657          copy = rtx_alloc (GET_CODE (insn));
658
659          if (GET_CODE (insn) == CALL_INSN)
660            CALL_INSN_FUNCTION_USAGE (copy)
661              = copy_for_inline (CALL_INSN_FUNCTION_USAGE (insn));
662
663          PATTERN (copy) = copy_for_inline (PATTERN (insn));
664          INSN_CODE (copy) = -1;
665          LOG_LINKS (copy) = NULL_RTX;
666          RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
667          break;
668
669        case CODE_LABEL:
670          copy = label_map[CODE_LABEL_NUMBER (insn)];
671          LABEL_NAME (copy) = LABEL_NAME (insn);
672          break;
673
674        case BARRIER:
675          copy = rtx_alloc (BARRIER);
676          break;
677
678        default:
679          abort ();
680        }
681      INSN_UID (copy) = INSN_UID (insn);
682      insn_map[INSN_UID (insn)] = copy;
683      NEXT_INSN (last_insn) = copy;
684      PREV_INSN (copy) = last_insn;
685      last_insn = copy;
686    }
687
688  adjust_copied_decl_tree (DECL_INITIAL (fndecl));
689
690  /* Now copy the REG_NOTES.  */
691  for (insn = NEXT_INSN (get_insns ()); insn; insn = NEXT_INSN (insn))
692    if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
693        && insn_map[INSN_UID(insn)])
694      REG_NOTES (insn_map[INSN_UID (insn)])
695        = copy_for_inline (REG_NOTES (insn));
696
697  NEXT_INSN (last_insn) = NULL;
698
699  finish_inline (fndecl, head);
700
701  /* Make new versions of the register tables.  */
702  new = (char *) savealloc (regno_pointer_flag_length);
703  bcopy (regno_pointer_flag, new, regno_pointer_flag_length);
704  new1 = (char *) savealloc (regno_pointer_flag_length);
705  bcopy (regno_pointer_align, new1, regno_pointer_flag_length);
706
707  regno_pointer_flag = new;
708  regno_pointer_align = new1;
709
710  set_new_first_and_last_insn (first_insn, last_insn);
711
712  if (label_map)
713    free (label_map);
714}
715
716/* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
717   For example, this can copy a list made of TREE_LIST nodes.  While copying,
718   for each node copied which doesn't already have is DECL_ABSTRACT_ORIGIN
719   set to some non-zero value, set the DECL_ABSTRACT_ORIGIN of the copy to
720   point to the corresponding (abstract) original node.  */
721
722static tree
723copy_decl_list (list)
724     tree list;
725{
726  tree head;
727  register tree prev, next;
728
729  if (list == 0)
730    return 0;
731
732  head = prev = copy_node (list);
733  if (DECL_ABSTRACT_ORIGIN (head) == NULL_TREE)
734    DECL_ABSTRACT_ORIGIN (head) = list;
735  next = TREE_CHAIN (list);
736  while (next)
737    {
738      register tree copy;
739
740      copy = copy_node (next);
741      if (DECL_ABSTRACT_ORIGIN (copy) == NULL_TREE)
742        DECL_ABSTRACT_ORIGIN (copy) = next;
743      TREE_CHAIN (prev) = copy;
744      prev = copy;
745      next = TREE_CHAIN (next);
746    }
747  return head;
748}
749
750/* Make a copy of the entire tree of blocks BLOCK, and return it.  */
751
752static tree
753copy_decl_tree (block)
754     tree block;
755{
756  tree t, vars, subblocks;
757
758  vars = copy_decl_list (BLOCK_VARS (block));
759  subblocks = 0;
760
761  /* Process all subblocks.  */
762  for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
763    {
764      tree copy = copy_decl_tree (t);
765      TREE_CHAIN (copy) = subblocks;
766      subblocks = copy;
767    }
768
769  t = copy_node (block);
770  BLOCK_VARS (t) = vars;
771  BLOCK_SUBBLOCKS (t) = nreverse (subblocks);
772  /* If the BLOCK being cloned is already marked as having been instantiated
773     from something else, then leave that `origin' marking alone.  Otherwise,
774     mark the clone as having originated from the BLOCK we are cloning.  */
775  if (BLOCK_ABSTRACT_ORIGIN (t) == NULL_TREE)
776    BLOCK_ABSTRACT_ORIGIN (t) = block;
777  return t;
778}
779
780/* Copy DECL_RTLs in all decls in the given BLOCK node.  */
781
782static void
783copy_decl_rtls (block)
784     tree block;
785{
786  tree t;
787
788  for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
789    if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
790      DECL_RTL (t) = copy_for_inline (DECL_RTL (t));
791
792  /* Process all subblocks.  */
793  for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
794    copy_decl_rtls (t);
795}
796
797/* Make the insns and PARM_DECLs of the current function permanent
798   and record other information in DECL_SAVED_INSNS to allow inlining
799   of this function in subsequent calls.
800
801   This routine need not copy any insns because we are not going
802   to immediately compile the insns in the insn chain.  There
803   are two cases when we would compile the insns for FNDECL:
804   (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
805   be output at the end of other compilation, because somebody took
806   its address.  In the first case, the insns of FNDECL are copied
807   as it is expanded inline, so FNDECL's saved insns are not
808   modified.  In the second case, FNDECL is used for the last time,
809   so modifying the rtl is not a problem.
810
811   We don't have to worry about FNDECL being inline expanded by
812   other functions which are written at the end of compilation
813   because flag_no_inline is turned on when we begin writing
814   functions at the end of compilation.  */
815
816void
817save_for_inline_nocopy (fndecl)
818     tree fndecl;
819{
820  rtx insn;
821  rtx head;
822  rtx first_nonparm_insn;
823
824  /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
825     Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
826     Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
827     for the parms, prior to elimination of virtual registers.
828     These values are needed for substituting parms properly.  */
829
830  parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
831
832  /* Make and emit a return-label if we have not already done so.  */
833
834  if (return_label == 0)
835    {
836      return_label = gen_label_rtx ();
837      emit_label (return_label);
838    }
839
840  head = initialize_for_inline (fndecl, get_first_label_num (),
841                                max_label_num (), max_reg_num (), 0);
842
843  /* If there are insns that copy parms from the stack into pseudo registers,
844     those insns are not copied.  `expand_inline_function' must
845     emit the correct code to handle such things.  */
846
847  insn = get_insns ();
848  if (GET_CODE (insn) != NOTE)
849    abort ();
850
851  /* Get the insn which signals the end of parameter setup code.  */
852  first_nonparm_insn = get_first_nonparm_insn ();
853
854  /* Now just scan the chain of insns to see what happens to our
855     PARM_DECLs.  If a PARM_DECL is used but never modified, we
856     can substitute its rtl directly when expanding inline (and
857     perform constant folding when its incoming value is constant).
858     Otherwise, we have to copy its value into a new register and track
859     the new register's life.  */
860
861  for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
862    {
863      if (insn == first_nonparm_insn)
864        in_nonparm_insns = 1;
865
866      if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
867        {
868          if (current_function_uses_const_pool)
869            {
870              /* Replace any constant pool references with the actual constant.
871                 We will put the constant back if we need to write the
872                 function out after all.  */
873              save_constants (&PATTERN (insn));
874              if (REG_NOTES (insn))
875                save_constants (&REG_NOTES (insn));
876            }
877
878          /* Record what interesting things happen to our parameters.  */
879          note_stores (PATTERN (insn), note_modified_parmregs);
880        }
881    }
882
883  /* Also scan all decls, and replace any constant pool references with the
884     actual constant.  */
885  save_constants_in_decl_trees (DECL_INITIAL (fndecl));
886
887  /* We have now allocated all that needs to be allocated permanently
888     on the rtx obstack.  Set our high-water mark, so that we
889     can free the rest of this when the time comes.  */
890
891  preserve_data ();
892
893  finish_inline (fndecl, head);
894}
895
896/* Given PX, a pointer into an insn, search for references to the constant
897   pool.  Replace each with a CONST that has the mode of the original
898   constant, contains the constant, and has RTX_INTEGRATED_P set.
899   Similarly, constant pool addresses not enclosed in a MEM are replaced
900   with an ADDRESS and CONST rtx which also gives the constant, its
901   mode, the mode of the address, and has RTX_INTEGRATED_P set.  */
902
903static void
904save_constants (px)
905     rtx *px;
906{
907  rtx x;
908  int i, j;
909
910 again:
911  x = *px;
912
913  /* If this is a CONST_DOUBLE, don't try to fix things up in
914     CONST_DOUBLE_MEM, because this is an infinite recursion.  */
915  if (GET_CODE (x) == CONST_DOUBLE)
916    return;
917  else if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
918           && CONSTANT_POOL_ADDRESS_P (XEXP (x,0)))
919    {
920      enum machine_mode const_mode = get_pool_mode (XEXP (x, 0));
921      rtx new = gen_rtx (CONST, const_mode, get_pool_constant (XEXP (x, 0)));
922      RTX_INTEGRATED_P (new) = 1;
923
924      /* If the MEM was in a different mode than the constant (perhaps we
925         were only looking at the low-order part), surround it with a
926         SUBREG so we can save both modes.  */
927
928      if (GET_MODE (x) != const_mode)
929        {
930          new = gen_rtx (SUBREG, GET_MODE (x), new, 0);
931          RTX_INTEGRATED_P (new) = 1;
932        }
933
934      *px = new;
935      save_constants (&XEXP (*px, 0));
936    }
937  else if (GET_CODE (x) == SYMBOL_REF
938           && CONSTANT_POOL_ADDRESS_P (x))
939    {
940      *px = gen_rtx (ADDRESS, GET_MODE (x),
941                     gen_rtx (CONST, get_pool_mode (x),
942                              get_pool_constant (x)));
943      save_constants (&XEXP (*px, 0));
944      RTX_INTEGRATED_P (*px) = 1;
945    }
946
947  else
948    {
949      char *fmt = GET_RTX_FORMAT (GET_CODE (x));
950      int len = GET_RTX_LENGTH (GET_CODE (x));
951
952      for (i = len-1; i >= 0; i--)
953        {
954          switch (fmt[i])
955            {
956            case 'E':
957              for (j = 0; j < XVECLEN (x, i); j++)
958                save_constants (&XVECEXP (x, i, j));
959              break;
960
961            case 'e':
962              if (XEXP (x, i) == 0)
963                continue;
964              if (i == 0)
965                {
966                  /* Hack tail-recursion here.  */
967                  px = &XEXP (x, 0);
968                  goto again;
969                }
970              save_constants (&XEXP (x, i));
971              break;
972            }
973        }
974    }
975}
976
977/* Note whether a parameter is modified or not.  */
978
979static void
980note_modified_parmregs (reg, x)
981     rtx reg;
982     rtx x;
983{
984  if (GET_CODE (reg) == REG && in_nonparm_insns
985      && REGNO (reg) < max_parm_reg
986      && REGNO (reg) >= FIRST_PSEUDO_REGISTER
987      && parmdecl_map[REGNO (reg)] != 0)
988    TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
989}
990
991/* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
992   according to `reg_map' and `label_map'.  The original rtl insns
993   will be saved for inlining; this is used to make a copy
994   which is used to finish compiling the inline function itself.
995
996   If we find a "saved" constant pool entry, one which was replaced with
997   the value of the constant, convert it back to a constant pool entry.
998   Since the pool wasn't touched, this should simply restore the old
999   address.
1000
1001   All other kinds of rtx are copied except those that can never be
1002   changed during compilation.  */
1003
1004static rtx
1005copy_for_inline (orig)
1006     rtx orig;
1007{
1008  register rtx x = orig;
1009  register rtx new;
1010  register int i;
1011  register enum rtx_code code;
1012  register char *format_ptr;
1013
1014  if (x == 0)
1015    return x;
1016
1017  code = GET_CODE (x);
1018
1019  /* These types may be freely shared.  */
1020
1021  switch (code)
1022    {
1023    case QUEUED:
1024    case CONST_INT:
1025    case SYMBOL_REF:
1026    case PC:
1027    case CC0:
1028      return x;
1029
1030    case CONST_DOUBLE:
1031      /* We have to make a new CONST_DOUBLE to ensure that we account for
1032         it correctly.  Using the old CONST_DOUBLE_MEM data is wrong.  */
1033      if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1034        {
1035          REAL_VALUE_TYPE d;
1036
1037          REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1038          return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
1039        }
1040      else
1041        return immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
1042                                   VOIDmode);
1043
1044    case CONST:
1045      /* Get constant pool entry for constant in the pool.  */
1046      if (RTX_INTEGRATED_P (x))
1047        return validize_mem (force_const_mem (GET_MODE (x),
1048                                              copy_for_inline (XEXP (x, 0))));
1049      break;
1050
1051    case SUBREG:
1052      /* Get constant pool entry, but access in different mode.  */
1053      if (RTX_INTEGRATED_P (x))
1054        {
1055          new = force_const_mem (GET_MODE (SUBREG_REG (x)),
1056                                 copy_for_inline (XEXP (SUBREG_REG (x), 0)));
1057
1058          PUT_MODE (new, GET_MODE (x));
1059          return validize_mem (new);
1060        }
1061      break;
1062
1063    case ADDRESS:
1064      /* If not special for constant pool error.  Else get constant pool
1065         address.  */
1066      if (! RTX_INTEGRATED_P (x))
1067        abort ();
1068
1069      new = force_const_mem (GET_MODE (XEXP (x, 0)),
1070                             copy_for_inline (XEXP (XEXP (x, 0), 0)));
1071      new = XEXP (new, 0);
1072
1073#ifdef POINTERS_EXTEND_UNSIGNED
1074      if (GET_MODE (new) != GET_MODE (x))
1075        new = convert_memory_address (GET_MODE (x), new);
1076#endif
1077
1078      return new;
1079
1080    case ASM_OPERANDS:
1081      /* If a single asm insn contains multiple output operands
1082         then it contains multiple ASM_OPERANDS rtx's that share operand 3.
1083         We must make sure that the copied insn continues to share it.  */
1084      if (orig_asm_operands_vector == XVEC (orig, 3))
1085        {
1086          x = rtx_alloc (ASM_OPERANDS);
1087          x->volatil = orig->volatil;
1088          XSTR (x, 0) = XSTR (orig, 0);
1089          XSTR (x, 1) = XSTR (orig, 1);
1090          XINT (x, 2) = XINT (orig, 2);
1091          XVEC (x, 3) = copy_asm_operands_vector;
1092          XVEC (x, 4) = copy_asm_constraints_vector;
1093          XSTR (x, 5) = XSTR (orig, 5);
1094          XINT (x, 6) = XINT (orig, 6);
1095          return x;
1096        }
1097      break;
1098
1099    case MEM:
1100      /* A MEM is usually allowed to be shared if its address is constant
1101         or is a constant plus one of the special registers.
1102
1103         We do not allow sharing of addresses that are either a special
1104         register or the sum of a constant and a special register because
1105         it is possible for unshare_all_rtl to copy the address, into memory
1106         that won't be saved.  Although the MEM can safely be shared, and
1107         won't be copied there, the address itself cannot be shared, and may
1108         need to be copied.
1109
1110         There are also two exceptions with constants: The first is if the
1111         constant is a LABEL_REF or the sum of the LABEL_REF
1112         and an integer.  This case can happen if we have an inline
1113         function that supplies a constant operand to the call of another
1114         inline function that uses it in a switch statement.  In this case,
1115         we will be replacing the LABEL_REF, so we have to replace this MEM
1116         as well.
1117
1118         The second case is if we have a (const (plus (address ..) ...)).
1119         In that case we need to put back the address of the constant pool
1120         entry.  */
1121
1122      if (CONSTANT_ADDRESS_P (XEXP (x, 0))
1123          && GET_CODE (XEXP (x, 0)) != LABEL_REF
1124          && ! (GET_CODE (XEXP (x, 0)) == CONST
1125                && (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
1126                    && ((GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1127                        == LABEL_REF)
1128                        || (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1129                            == ADDRESS)))))
1130        return x;
1131      break;
1132
1133    case LABEL_REF:
1134      /* If this is a non-local label, just make a new LABEL_REF.
1135         Otherwise, use the new label as well.  */
1136      x = gen_rtx (LABEL_REF, GET_MODE (orig),
1137                   LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1138                   : label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
1139      LABEL_REF_NONLOCAL_P (x) = LABEL_REF_NONLOCAL_P (orig);
1140      LABEL_OUTSIDE_LOOP_P (x) = LABEL_OUTSIDE_LOOP_P (orig);
1141      return x;
1142
1143    case REG:
1144      if (REGNO (x) > LAST_VIRTUAL_REGISTER)
1145        return reg_map [REGNO (x)];
1146      else
1147        return x;
1148
1149    case SET:
1150      /* If a parm that gets modified lives in a pseudo-reg,
1151         clear its TREE_READONLY to prevent certain optimizations.  */
1152      {
1153        rtx dest = SET_DEST (x);
1154
1155        while (GET_CODE (dest) == STRICT_LOW_PART
1156               || GET_CODE (dest) == ZERO_EXTRACT
1157               || GET_CODE (dest) == SUBREG)
1158          dest = XEXP (dest, 0);
1159
1160        if (GET_CODE (dest) == REG
1161            && REGNO (dest) < max_parm_reg
1162            && REGNO (dest) >= FIRST_PSEUDO_REGISTER
1163            && parmdecl_map[REGNO (dest)] != 0
1164            /* The insn to load an arg pseudo from a stack slot
1165               does not count as modifying it.  */
1166            && in_nonparm_insns)
1167          TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0;
1168      }
1169      break;
1170
1171#if 0 /* This is a good idea, but here is the wrong place for it.  */
1172      /* Arrange that CONST_INTs always appear as the second operand
1173         if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
1174         always appear as the first.  */
1175    case PLUS:
1176      if (GET_CODE (XEXP (x, 0)) == CONST_INT
1177          || (XEXP (x, 1) == frame_pointer_rtx
1178              || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1179                  && XEXP (x, 1) == arg_pointer_rtx)))
1180        {
1181          rtx t = XEXP (x, 0);
1182          XEXP (x, 0) = XEXP (x, 1);
1183          XEXP (x, 1) = t;
1184        }
1185      break;
1186#endif
1187    default:
1188      break;
1189    }
1190
1191  /* Replace this rtx with a copy of itself.  */
1192
1193  x = rtx_alloc (code);
1194  bcopy ((char *) orig, (char *) x,
1195         (sizeof (*x) - sizeof (x->fld)
1196          + sizeof (x->fld[0]) * GET_RTX_LENGTH (code)));
1197
1198  /* Now scan the subexpressions recursively.
1199     We can store any replaced subexpressions directly into X
1200     since we know X is not shared!  Any vectors in X
1201     must be copied if X was copied.  */
1202
1203  format_ptr = GET_RTX_FORMAT (code);
1204
1205  for (i = 0; i < GET_RTX_LENGTH (code); i++)
1206    {
1207      switch (*format_ptr++)
1208        {
1209        case 'e':
1210          XEXP (x, i) = copy_for_inline (XEXP (x, i));
1211          break;
1212
1213        case 'u':
1214          /* Change any references to old-insns to point to the
1215             corresponding copied insns.  */
1216          XEXP (x, i) = insn_map[INSN_UID (XEXP (x, i))];
1217          break;
1218
1219        case 'E':
1220          if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
1221            {
1222              register int j;
1223
1224              XVEC (x, i) = gen_rtvec_vv (XVECLEN (x, i), XVEC (x, i)->elem);
1225              for (j = 0; j < XVECLEN (x, i); j++)
1226                XVECEXP (x, i, j)
1227                  = copy_for_inline (XVECEXP (x, i, j));
1228            }
1229          break;
1230        }
1231    }
1232
1233  if (code == ASM_OPERANDS && orig_asm_operands_vector == 0)
1234    {
1235      orig_asm_operands_vector = XVEC (orig, 3);
1236      copy_asm_operands_vector = XVEC (x, 3);
1237      copy_asm_constraints_vector = XVEC (x, 4);
1238    }
1239
1240  return x;
1241}
1242
1243/* Unfortunately, we need a global copy of const_equiv map for communication
1244   with a function called from note_stores.  Be *very* careful that this
1245   is used properly in the presence of recursion.  */
1246
1247rtx *global_const_equiv_map;
1248int global_const_equiv_map_size;
1249
1250#define FIXED_BASE_PLUS_P(X) \
1251  (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT  \
1252   && GET_CODE (XEXP (X, 0)) == REG                             \
1253   && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER             \
1254   && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
1255
1256/* Integrate the procedure defined by FNDECL.  Note that this function
1257   may wind up calling itself.  Since the static variables are not
1258   reentrant, we do not assign them until after the possibility
1259   of recursion is eliminated.
1260
1261   If IGNORE is nonzero, do not produce a value.
1262   Otherwise store the value in TARGET if it is nonzero and that is convenient.
1263
1264   Value is:
1265   (rtx)-1 if we could not substitute the function
1266   0 if we substituted it and it does not produce a value
1267   else an rtx for where the value is stored.  */
1268
1269rtx
1270expand_inline_function (fndecl, parms, target, ignore, type,
1271                        structure_value_addr)
1272     tree fndecl, parms;
1273     rtx target;
1274     int ignore;
1275     tree type;
1276     rtx structure_value_addr;
1277{
1278  tree formal, actual, block;
1279  rtx header = DECL_SAVED_INSNS (fndecl);
1280  rtx insns = FIRST_FUNCTION_INSN (header);
1281  rtx parm_insns = FIRST_PARM_INSN (header);
1282  tree *arg_trees;
1283  rtx *arg_vals;
1284  rtx insn;
1285  int max_regno;
1286  register int i;
1287  int min_labelno = FIRST_LABELNO (header);
1288  int max_labelno = LAST_LABELNO (header);
1289  int nargs;
1290  rtx local_return_label = 0;
1291  rtx loc;
1292  rtx stack_save = 0;
1293  rtx temp;
1294  struct inline_remap *map;
1295  rtx cc0_insn = 0;
1296  rtvec arg_vector = ORIGINAL_ARG_VECTOR (header);
1297  rtx static_chain_value = 0;
1298
1299  /* The pointer used to track the true location of the memory used
1300     for MAP->LABEL_MAP.  */
1301  rtx *real_label_map = 0;
1302
1303  /* Allow for equivalences of the pseudos we make for virtual fp and ap.  */
1304  max_regno = MAX_REGNUM (header) + 3;
1305  if (max_regno < FIRST_PSEUDO_REGISTER)
1306    abort ();
1307
1308  nargs = list_length (DECL_ARGUMENTS (fndecl));
1309
1310  /* Check that the parms type match and that sufficient arguments were
1311     passed.  Since the appropriate conversions or default promotions have
1312     already been applied, the machine modes should match exactly.  */
1313
1314  for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
1315       formal;
1316       formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
1317    {
1318      tree arg;
1319      enum machine_mode mode;
1320
1321      if (actual == 0)
1322        return (rtx) (HOST_WIDE_INT) -1;
1323
1324      arg = TREE_VALUE (actual);
1325      mode = TYPE_MODE (DECL_ARG_TYPE (formal));
1326
1327      if (mode != TYPE_MODE (TREE_TYPE (arg))
1328          /* If they are block mode, the types should match exactly.
1329             They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
1330             which could happen if the parameter has incomplete type.  */
1331          || (mode == BLKmode
1332              && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
1333                  != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
1334        return (rtx) (HOST_WIDE_INT) -1;
1335    }
1336
1337  /* Extra arguments are valid, but will be ignored below, so we must
1338     evaluate them here for side-effects.  */
1339  for (; actual; actual = TREE_CHAIN (actual))
1340    expand_expr (TREE_VALUE (actual), const0_rtx,
1341                 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
1342
1343  /* Make a binding contour to keep inline cleanups called at
1344     outer function-scope level from looking like they are shadowing
1345     parameter declarations.  */
1346  pushlevel (0);
1347
1348  /* Expand the function arguments.  Do this first so that any
1349     new registers get created before we allocate the maps.  */
1350
1351  arg_vals = (rtx *) alloca (nargs * sizeof (rtx));
1352  arg_trees = (tree *) alloca (nargs * sizeof (tree));
1353
1354  for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
1355       formal;
1356       formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
1357    {
1358      /* Actual parameter, converted to the type of the argument within the
1359         function.  */
1360      tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
1361      /* Mode of the variable used within the function.  */
1362      enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
1363      int invisiref = 0;
1364
1365      arg_trees[i] = arg;
1366      loc = RTVEC_ELT (arg_vector, i);
1367
1368      /* If this is an object passed by invisible reference, we copy the
1369         object into a stack slot and save its address.  If this will go
1370         into memory, we do nothing now.  Otherwise, we just expand the
1371         argument.  */
1372      if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1373          && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1374        {
1375          rtx stack_slot
1376            = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg)),
1377                                 int_size_in_bytes (TREE_TYPE (arg)), 1);
1378          MEM_IN_STRUCT_P (stack_slot) = AGGREGATE_TYPE_P (TREE_TYPE (arg));
1379
1380          store_expr (arg, stack_slot, 0);
1381
1382          arg_vals[i] = XEXP (stack_slot, 0);
1383          invisiref = 1;
1384        }
1385      else if (GET_CODE (loc) != MEM)
1386        {
1387          if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
1388            /* The mode if LOC and ARG can differ if LOC was a variable
1389               that had its mode promoted via PROMOTED_MODE.  */
1390            arg_vals[i] = convert_modes (GET_MODE (loc),
1391                                         TYPE_MODE (TREE_TYPE (arg)),
1392                                         expand_expr (arg, NULL_RTX, mode,
1393                                                      EXPAND_SUM),
1394                                         TREE_UNSIGNED (TREE_TYPE (formal)));
1395          else
1396            arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
1397        }
1398      else
1399        arg_vals[i] = 0;
1400
1401      if (arg_vals[i] != 0
1402          && (! TREE_READONLY (formal)
1403              /* If the parameter is not read-only, copy our argument through
1404                 a register.  Also, we cannot use ARG_VALS[I] if it overlaps
1405                 TARGET in any way.  In the inline function, they will likely
1406                 be two different pseudos, and `safe_from_p' will make all
1407                 sorts of smart assumptions about their not conflicting.
1408                 But if ARG_VALS[I] overlaps TARGET, these assumptions are
1409                 wrong, so put ARG_VALS[I] into a fresh register.
1410                 Don't worry about invisible references, since their stack
1411                 temps will never overlap the target.  */
1412              || (target != 0
1413                  && ! invisiref
1414                  && (GET_CODE (arg_vals[i]) == REG
1415                      || GET_CODE (arg_vals[i]) == SUBREG
1416                      || GET_CODE (arg_vals[i]) == MEM)
1417                  && reg_overlap_mentioned_p (arg_vals[i], target))
1418              /* ??? We must always copy a SUBREG into a REG, because it might
1419                 get substituted into an address, and not all ports correctly
1420                 handle SUBREGs in addresses.  */
1421              || (GET_CODE (arg_vals[i]) == SUBREG)))
1422        arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
1423
1424      if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
1425          && POINTER_TYPE_P (TREE_TYPE (formal)))
1426        mark_reg_pointer (arg_vals[i],
1427                          (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal)))
1428                           / BITS_PER_UNIT));
1429    }
1430       
1431  /* Allocate the structures we use to remap things.  */
1432
1433  map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
1434  map->fndecl = fndecl;
1435
1436  map->reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
1437  bzero ((char *) map->reg_map, max_regno * sizeof (rtx));
1438
1439  /* We used to use alloca here, but the size of what it would try to
1440     allocate would occasionally cause it to exceed the stack limit and
1441     cause unpredictable core dumps.  */
1442  real_label_map
1443    = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
1444  map->label_map = real_label_map;
1445
1446  map->insn_map = (rtx *) alloca (INSN_UID (header) * sizeof (rtx));
1447  bzero ((char *) map->insn_map, INSN_UID (header) * sizeof (rtx));
1448  map->min_insnno = 0;
1449  map->max_insnno = INSN_UID (header);
1450
1451  map->integrating = 1;
1452
1453  /* const_equiv_map maps pseudos in our routine to constants, so it needs to
1454     be large enough for all our pseudos.  This is the number we are currently
1455     using plus the number in the called routine, plus 15 for each arg,
1456     five to compute the virtual frame pointer, and five for the return value.
1457     This should be enough for most cases.  We do not reference entries
1458     outside the range of the map.
1459
1460     ??? These numbers are quite arbitrary and were obtained by
1461     experimentation.  At some point, we should try to allocate the
1462     table after all the parameters are set up so we an more accurately
1463     estimate the number of pseudos we will need.  */
1464
1465  map->const_equiv_map_size
1466    = max_reg_num () + (max_regno - FIRST_PSEUDO_REGISTER) + 15 * nargs + 10;
1467
1468  map->const_equiv_map
1469    = (rtx *)alloca (map->const_equiv_map_size * sizeof (rtx));
1470  bzero ((char *) map->const_equiv_map,
1471         map->const_equiv_map_size * sizeof (rtx));
1472
1473  map->const_age_map
1474    = (unsigned *)alloca (map->const_equiv_map_size * sizeof (unsigned));
1475  bzero ((char *) map->const_age_map,
1476         map->const_equiv_map_size * sizeof (unsigned));
1477  map->const_age = 0;
1478
1479  /* Record the current insn in case we have to set up pointers to frame
1480     and argument memory blocks.  If there are no insns yet, add a dummy
1481     insn that can be used as an insertion point.  */
1482  map->insns_at_start = get_last_insn ();
1483  if (map->insns_at_start == 0)
1484    map->insns_at_start = emit_note (NULL_PTR, NOTE_INSN_DELETED);
1485
1486  map->regno_pointer_flag = INLINE_REGNO_POINTER_FLAG (header);
1487  map->regno_pointer_align = INLINE_REGNO_POINTER_ALIGN (header);
1488
1489  /* Update the outgoing argument size to allow for those in the inlined
1490     function.  */
1491  if (OUTGOING_ARGS_SIZE (header) > current_function_outgoing_args_size)
1492    current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (header);
1493
1494  /* If the inline function needs to make PIC references, that means
1495     that this function's PIC offset table must be used.  */
1496  if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
1497    current_function_uses_pic_offset_table = 1;
1498
1499  /* If this function needs a context, set it up.  */
1500  if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_NEEDS_CONTEXT)
1501    static_chain_value = lookup_static_chain (fndecl);
1502
1503  if (GET_CODE (parm_insns) == NOTE
1504      && NOTE_LINE_NUMBER (parm_insns) > 0)
1505    {
1506      rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
1507                            NOTE_LINE_NUMBER (parm_insns));
1508      if (note)
1509        RTX_INTEGRATED_P (note) = 1;
1510    }
1511
1512  /* Process each argument.  For each, set up things so that the function's
1513     reference to the argument will refer to the argument being passed.
1514     We only replace REG with REG here.  Any simplifications are done
1515     via const_equiv_map.
1516
1517     We make two passes:  In the first, we deal with parameters that will
1518     be placed into registers, since we need to ensure that the allocated
1519     register number fits in const_equiv_map.  Then we store all non-register
1520     parameters into their memory location.  */
1521
1522  /* Don't try to free temp stack slots here, because we may put one of the
1523     parameters into a temp stack slot.  */
1524
1525  for (i = 0; i < nargs; i++)
1526    {
1527      rtx copy = arg_vals[i];
1528
1529      loc = RTVEC_ELT (arg_vector, i);
1530
1531      /* There are three cases, each handled separately.  */
1532      if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1533          && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1534        {
1535          /* This must be an object passed by invisible reference (it could
1536             also be a variable-sized object, but we forbid inlining functions
1537             with variable-sized arguments).  COPY is the address of the
1538             actual value (this computation will cause it to be copied).  We
1539             map that address for the register, noting the actual address as
1540             an equivalent in case it can be substituted into the insns.  */
1541
1542          if (GET_CODE (copy) != REG)
1543            {
1544              temp = copy_addr_to_reg (copy);
1545              if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1546                  && REGNO (temp) < map->const_equiv_map_size)
1547                {
1548                  map->const_equiv_map[REGNO (temp)] = copy;
1549                  map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1550                }
1551              copy = temp;
1552            }
1553          map->reg_map[REGNO (XEXP (loc, 0))] = copy;
1554        }
1555      else if (GET_CODE (loc) == MEM)
1556        {
1557          /* This is the case of a parameter that lives in memory.
1558             It will live in the block we allocate in the called routine's
1559             frame that simulates the incoming argument area.  Do nothing
1560             now; we will call store_expr later.  */
1561          ;
1562        }
1563      else if (GET_CODE (loc) == REG)
1564        {
1565          /* This is the good case where the parameter is in a register.
1566             If it is read-only and our argument is a constant, set up the
1567             constant equivalence.
1568
1569             If LOC is REG_USERVAR_P, the usual case, COPY must also have
1570             that flag set if it is a register.
1571
1572             Also, don't allow hard registers here; they might not be valid
1573             when substituted into insns.  */
1574
1575          if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
1576              || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
1577                  && ! REG_USERVAR_P (copy))
1578              || (GET_CODE (copy) == REG
1579                  && REGNO (copy) < FIRST_PSEUDO_REGISTER))
1580            {
1581              temp = copy_to_mode_reg (GET_MODE (loc), copy);
1582              REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
1583              if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1584                  && REGNO (temp) < map->const_equiv_map_size)
1585                {
1586                  map->const_equiv_map[REGNO (temp)] = copy;
1587                  map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1588                }
1589              copy = temp;
1590            }
1591          map->reg_map[REGNO (loc)] = copy;
1592        }
1593      else if (GET_CODE (loc) == CONCAT)
1594        {
1595          /* This is the good case where the parameter is in a
1596             pair of separate pseudos.
1597             If it is read-only and our argument is a constant, set up the
1598             constant equivalence.
1599
1600             If LOC is REG_USERVAR_P, the usual case, COPY must also have
1601             that flag set if it is a register.
1602
1603             Also, don't allow hard registers here; they might not be valid
1604             when substituted into insns.  */
1605          rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
1606          rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
1607          rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
1608          rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
1609
1610          if ((GET_CODE (copyreal) != REG && GET_CODE (copyreal) != SUBREG)
1611              || (GET_CODE (copyreal) == REG && REG_USERVAR_P (locreal)
1612                  && ! REG_USERVAR_P (copyreal))
1613              || (GET_CODE (copyreal) == REG
1614                  && REGNO (copyreal) < FIRST_PSEUDO_REGISTER))
1615            {
1616              temp = copy_to_mode_reg (GET_MODE (locreal), copyreal);
1617              REG_USERVAR_P (temp) = REG_USERVAR_P (locreal);
1618              if ((CONSTANT_P (copyreal) || FIXED_BASE_PLUS_P (copyreal))
1619                  && REGNO (temp) < map->const_equiv_map_size)
1620                {
1621                  map->const_equiv_map[REGNO (temp)] = copyreal;
1622                  map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1623                }
1624              copyreal = temp;
1625            }
1626          map->reg_map[REGNO (locreal)] = copyreal;
1627
1628          if ((GET_CODE (copyimag) != REG && GET_CODE (copyimag) != SUBREG)
1629              || (GET_CODE (copyimag) == REG && REG_USERVAR_P (locimag)
1630                  && ! REG_USERVAR_P (copyimag))
1631              || (GET_CODE (copyimag) == REG
1632                  && REGNO (copyimag) < FIRST_PSEUDO_REGISTER))
1633            {
1634              temp = copy_to_mode_reg (GET_MODE (locimag), copyimag);
1635              REG_USERVAR_P (temp) = REG_USERVAR_P (locimag);
1636              if ((CONSTANT_P (copyimag) || FIXED_BASE_PLUS_P (copyimag))
1637                  && REGNO (temp) < map->const_equiv_map_size)
1638                {
1639                  map->const_equiv_map[REGNO (temp)] = copyimag;
1640                  map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1641                }
1642              copyimag = temp;
1643            }
1644          map->reg_map[REGNO (locimag)] = copyimag;
1645        }
1646      else
1647        abort ();
1648    }
1649
1650  /* Now do the parameters that will be placed in memory.  */
1651
1652  for (formal = DECL_ARGUMENTS (fndecl), i = 0;
1653       formal; formal = TREE_CHAIN (formal), i++)
1654    {
1655      loc = RTVEC_ELT (arg_vector, i);
1656
1657      if (GET_CODE (loc) == MEM
1658          /* Exclude case handled above.  */
1659          && ! (GET_CODE (XEXP (loc, 0)) == REG
1660                && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
1661        {
1662          rtx note = emit_note (DECL_SOURCE_FILE (formal),
1663                                DECL_SOURCE_LINE (formal));
1664          if (note)
1665            RTX_INTEGRATED_P (note) = 1;
1666
1667          /* Compute the address in the area we reserved and store the
1668             value there.  */
1669          temp = copy_rtx_and_substitute (loc, map);
1670          subst_constants (&temp, NULL_RTX, map);
1671          apply_change_group ();
1672          if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1673            temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1674          store_expr (arg_trees[i], temp, 0);
1675        }
1676    }
1677
1678  /* Deal with the places that the function puts its result.
1679     We are driven by what is placed into DECL_RESULT.
1680
1681     Initially, we assume that we don't have anything special handling for
1682     REG_FUNCTION_RETURN_VALUE_P.  */
1683
1684  map->inline_target = 0;
1685  loc = DECL_RTL (DECL_RESULT (fndecl));
1686  if (TYPE_MODE (type) == VOIDmode)
1687    /* There is no return value to worry about.  */
1688    ;
1689  else if (GET_CODE (loc) == MEM)
1690    {
1691      if (! structure_value_addr || ! aggregate_value_p (DECL_RESULT (fndecl)))
1692        abort ();
1693 
1694      /* Pass the function the address in which to return a structure value.
1695         Note that a constructor can cause someone to call us with
1696         STRUCTURE_VALUE_ADDR, but the initialization takes place
1697         via the first parameter, rather than the struct return address.
1698
1699         We have two cases:  If the address is a simple register indirect,
1700         use the mapping mechanism to point that register to our structure
1701         return address.  Otherwise, store the structure return value into
1702         the place that it will be referenced from.  */
1703
1704      if (GET_CODE (XEXP (loc, 0)) == REG)
1705        {
1706          temp = force_reg (Pmode,
1707                            force_operand (structure_value_addr, NULL_RTX));
1708          map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1709          if ((CONSTANT_P (structure_value_addr)
1710               || GET_CODE (structure_value_addr) == ADDRESSOF
1711               || (GET_CODE (structure_value_addr) == PLUS
1712                   && XEXP (structure_value_addr, 0) == virtual_stack_vars_rtx
1713                   && GET_CODE (XEXP (structure_value_addr, 1)) == CONST_INT))
1714              && REGNO (temp) < map->const_equiv_map_size)
1715            {
1716              map->const_equiv_map[REGNO (temp)] = structure_value_addr;
1717              map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1718            }
1719        }
1720      else
1721        {
1722          temp = copy_rtx_and_substitute (loc, map);
1723          subst_constants (&temp, NULL_RTX, map);
1724          apply_change_group ();
1725          emit_move_insn (temp, structure_value_addr);
1726        }
1727    }
1728  else if (ignore)
1729    /* We will ignore the result value, so don't look at its structure.
1730       Note that preparations for an aggregate return value
1731       do need to be made (above) even if it will be ignored.  */
1732    ;
1733  else if (GET_CODE (loc) == REG)
1734    {
1735      /* The function returns an object in a register and we use the return
1736         value.  Set up our target for remapping.  */
1737
1738      /* Machine mode function was declared to return.   */
1739      enum machine_mode departing_mode = TYPE_MODE (type);
1740      /* (Possibly wider) machine mode it actually computes
1741         (for the sake of callers that fail to declare it right).
1742         We have to use the mode of the result's RTL, rather than
1743         its type, since expand_function_start may have promoted it.  */
1744      enum machine_mode arriving_mode
1745        = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1746      rtx reg_to_map;
1747
1748      /* Don't use MEMs as direct targets because on some machines
1749         substituting a MEM for a REG makes invalid insns.
1750         Let the combiner substitute the MEM if that is valid.  */
1751      if (target == 0 || GET_CODE (target) != REG
1752          || GET_MODE (target) != departing_mode)
1753        target = gen_reg_rtx (departing_mode);
1754
1755      /* If function's value was promoted before return,
1756         avoid machine mode mismatch when we substitute INLINE_TARGET.
1757         But TARGET is what we will return to the caller.  */
1758      if (arriving_mode != departing_mode)
1759        {
1760          /* Avoid creating a paradoxical subreg wider than
1761             BITS_PER_WORD, since that is illegal.  */
1762          if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1763            {
1764              if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1765                                          GET_MODE_BITSIZE (arriving_mode)))
1766                /* Maybe could be handled by using convert_move () ?  */
1767                abort ();
1768              reg_to_map = gen_reg_rtx (arriving_mode);
1769              target = gen_lowpart (departing_mode, reg_to_map);
1770            }
1771          else
1772            reg_to_map = gen_rtx (SUBREG, arriving_mode, target, 0);
1773        }
1774      else
1775        reg_to_map = target;
1776
1777      /* Usually, the result value is the machine's return register.
1778         Sometimes it may be a pseudo. Handle both cases.  */
1779      if (REG_FUNCTION_VALUE_P (loc))
1780        map->inline_target = reg_to_map;
1781      else
1782        map->reg_map[REGNO (loc)] = reg_to_map;
1783    }
1784  else
1785    abort ();
1786
1787  /* Make a fresh binding contour that we can easily remove.  Do this after
1788     expanding our arguments so cleanups are properly scoped.  */
1789  pushlevel (0);
1790  expand_start_bindings (0);
1791
1792  /* Initialize label_map.  get_label_from_map will actually make
1793     the labels.  */
1794  bzero ((char *) &map->label_map [min_labelno],
1795         (max_labelno - min_labelno) * sizeof (rtx));
1796
1797  /* Perform postincrements before actually calling the function.  */
1798  emit_queue ();
1799
1800  /* Clean up stack so that variables might have smaller offsets.  */
1801  do_pending_stack_adjust ();
1802
1803  /* Save a copy of the location of const_equiv_map for mark_stores, called
1804     via note_stores.  */
1805  global_const_equiv_map = map->const_equiv_map;
1806  global_const_equiv_map_size = map->const_equiv_map_size;
1807
1808  /* If the called function does an alloca, save and restore the
1809     stack pointer around the call.  This saves stack space, but
1810     also is required if this inline is being done between two
1811     pushes.  */
1812  if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
1813    emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1814
1815  /* Now copy the insns one by one.  Do this in two passes, first the insns and
1816     then their REG_NOTES, just like save_for_inline.  */
1817
1818  /* This loop is very similar to the loop in copy_loop_body in unroll.c.  */
1819
1820  for (insn = insns; insn; insn = NEXT_INSN (insn))
1821    {
1822      rtx copy, pattern, set;
1823
1824      map->orig_asm_operands_vector = 0;
1825
1826      switch (GET_CODE (insn))
1827        {
1828        case INSN:
1829          pattern = PATTERN (insn);
1830          set = single_set (insn);
1831          copy = 0;
1832          if (GET_CODE (pattern) == USE
1833              && GET_CODE (XEXP (pattern, 0)) == REG
1834              && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1835            /* The (USE (REG n)) at return from the function should
1836               be ignored since we are changing (REG n) into
1837               inline_target.  */
1838            break;
1839
1840          /* Ignore setting a function value that we don't want to use.  */
1841          if (map->inline_target == 0
1842              && set != 0
1843              && GET_CODE (SET_DEST (set)) == REG
1844              && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1845            {
1846              if (volatile_refs_p (SET_SRC (set)))
1847                {
1848                  rtx new_set;
1849
1850                  /* If we must not delete the source,
1851                     load it into a new temporary.  */
1852                  copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1853
1854                  new_set = single_set (copy);
1855                  if (new_set == 0)
1856                    abort ();
1857
1858                  SET_DEST (new_set)
1859                    = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1860                }
1861              /* If the source and destination are the same and it
1862                 has a note on it, keep the insn.  */
1863              else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1864                       && REG_NOTES (insn) != 0)
1865                copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1866              else
1867                break;
1868            }
1869
1870          /* If this is setting the static chain rtx, omit it.  */
1871          else if (static_chain_value != 0
1872                   && set != 0
1873                   && GET_CODE (SET_DEST (set)) == REG
1874                   && rtx_equal_p (SET_DEST (set),
1875                                   static_chain_incoming_rtx))
1876            break;
1877
1878          /* If this is setting the static chain pseudo, set it from
1879             the value we want to give it instead.  */
1880          else if (static_chain_value != 0
1881                   && set != 0
1882                   && rtx_equal_p (SET_SRC (set),
1883                                   static_chain_incoming_rtx))
1884            {
1885              rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map);
1886
1887              copy = emit_move_insn (newdest, static_chain_value);
1888              static_chain_value = 0;
1889            }
1890          else
1891            copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1892          /* REG_NOTES will be copied later.  */
1893
1894#ifdef HAVE_cc0
1895          /* If this insn is setting CC0, it may need to look at
1896             the insn that uses CC0 to see what type of insn it is.
1897             In that case, the call to recog via validate_change will
1898             fail.  So don't substitute constants here.  Instead,
1899             do it when we emit the following insn.
1900
1901             For example, see the pyr.md file.  That machine has signed and
1902             unsigned compares.  The compare patterns must check the
1903             following branch insn to see which what kind of compare to
1904             emit.
1905
1906             If the previous insn set CC0, substitute constants on it as
1907             well.  */
1908          if (sets_cc0_p (PATTERN (copy)) != 0)
1909            cc0_insn = copy;
1910          else
1911            {
1912              if (cc0_insn)
1913                try_constants (cc0_insn, map);
1914              cc0_insn = 0;
1915              try_constants (copy, map);
1916            }
1917#else
1918          try_constants (copy, map);
1919#endif
1920          break;
1921
1922        case JUMP_INSN:
1923          if (GET_CODE (PATTERN (insn)) == RETURN
1924              || (GET_CODE (PATTERN (insn)) == PARALLEL
1925                  && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
1926            {
1927              if (local_return_label == 0)
1928                local_return_label = gen_label_rtx ();
1929              pattern = gen_jump (local_return_label);
1930            }
1931          else
1932            pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1933
1934          copy = emit_jump_insn (pattern);
1935
1936#ifdef HAVE_cc0
1937          if (cc0_insn)
1938            try_constants (cc0_insn, map);
1939          cc0_insn = 0;
1940#endif
1941          try_constants (copy, map);
1942
1943          /* If this used to be a conditional jump insn but whose branch
1944             direction is now know, we must do something special.  */
1945          if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
1946            {
1947#ifdef HAVE_cc0
1948              /* The previous insn set cc0 for us.  So delete it.  */
1949              delete_insn (PREV_INSN (copy));
1950#endif
1951
1952              /* If this is now a no-op, delete it.  */
1953              if (map->last_pc_value == pc_rtx)
1954                {
1955                  delete_insn (copy);
1956                  copy = 0;
1957                }
1958              else
1959                /* Otherwise, this is unconditional jump so we must put a
1960                   BARRIER after it.  We could do some dead code elimination
1961                   here, but jump.c will do it just as well.  */
1962                emit_barrier ();
1963            }
1964          break;
1965
1966        case CALL_INSN:
1967          pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1968          copy = emit_call_insn (pattern);
1969
1970          /* Because the USAGE information potentially contains objects other
1971             than hard registers, we need to copy it.  */
1972          CALL_INSN_FUNCTION_USAGE (copy)
1973            = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn), map);
1974
1975#ifdef HAVE_cc0
1976          if (cc0_insn)
1977            try_constants (cc0_insn, map);
1978          cc0_insn = 0;
1979#endif
1980          try_constants (copy, map);
1981
1982          /* Be lazy and assume CALL_INSNs clobber all hard registers.  */
1983          for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1984            map->const_equiv_map[i] = 0;
1985          break;
1986
1987        case CODE_LABEL:
1988          copy = emit_label (get_label_from_map (map,
1989                                                 CODE_LABEL_NUMBER (insn)));
1990          LABEL_NAME (copy) = LABEL_NAME (insn);
1991          map->const_age++;
1992          break;
1993
1994        case BARRIER:
1995          copy = emit_barrier ();
1996          break;
1997
1998        case NOTE:
1999          /* It is important to discard function-end and function-beg notes,
2000             so we have only one of each in the current function.
2001             Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
2002             deleted these in the copy used for continuing compilation,
2003             not the copy used for inlining).  */
2004          if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
2005              && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
2006              && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
2007            {
2008              copy = emit_note (NOTE_SOURCE_FILE (insn),
2009                                NOTE_LINE_NUMBER (insn));
2010              if (copy
2011                  && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
2012                      || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END))
2013                {
2014                  rtx label
2015                    = get_label_from_map (map, NOTE_BLOCK_NUMBER (copy));
2016
2017                  /* We have to forward these both to match the new exception
2018                     region.  */
2019                  NOTE_BLOCK_NUMBER (copy) = CODE_LABEL_NUMBER (label);
2020                }
2021            }
2022          else
2023            copy = 0;
2024          break;
2025
2026        default:
2027          abort ();
2028          break;
2029        }
2030
2031      if (copy)
2032        RTX_INTEGRATED_P (copy) = 1;
2033
2034      map->insn_map[INSN_UID (insn)] = copy;
2035    }
2036
2037  /* Now copy the REG_NOTES.  Increment const_age, so that only constants
2038     from parameters can be substituted in.  These are the only ones that
2039     are valid across the entire function.  */
2040  map->const_age++;
2041  for (insn = insns; insn; insn = NEXT_INSN (insn))
2042    if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
2043        && map->insn_map[INSN_UID (insn)]
2044        && REG_NOTES (insn))
2045      {
2046        rtx tem = copy_rtx_and_substitute (REG_NOTES (insn), map);
2047        /* We must also do subst_constants, in case one of our parameters
2048           has const type and constant value.  */
2049        subst_constants (&tem, NULL_RTX, map);
2050        apply_change_group ();
2051        REG_NOTES (map->insn_map[INSN_UID (insn)]) = tem;
2052      }
2053
2054  if (local_return_label)
2055    emit_label (local_return_label);
2056
2057  /* Restore the stack pointer if we saved it above.  */
2058  if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
2059    emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
2060
2061  /* Make copies of the decls of the symbols in the inline function, so that
2062     the copies of the variables get declared in the current function.  Set
2063     up things so that lookup_static_chain knows that to interpret registers
2064     in SAVE_EXPRs for TYPE_SIZEs as local.  */
2065
2066  inline_function_decl = fndecl;
2067  integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
2068  integrate_decl_tree ((tree) ORIGINAL_DECL_INITIAL (header), 0, map);
2069  inline_function_decl = 0;
2070
2071  /* End the scope containing the copied formal parameter variables
2072     and copied LABEL_DECLs.  */
2073
2074  expand_end_bindings (getdecls (), 1, 1);
2075  block = poplevel (1, 1, 0);
2076  BLOCK_ABSTRACT_ORIGIN (block) = (DECL_ABSTRACT_ORIGIN (fndecl) == NULL
2077                                   ? fndecl : DECL_ABSTRACT_ORIGIN (fndecl));
2078  poplevel (0, 0, 0);
2079
2080  /* Must mark the line number note after inlined functions as a repeat, so
2081     that the test coverage code can avoid counting the call twice.  This
2082     just tells the code to ignore the immediately following line note, since
2083     there already exists a copy of this note before the expanded inline call.
2084     This line number note is still needed for debugging though, so we can't
2085     delete it.  */
2086  if (flag_test_coverage)
2087    emit_note (0, NOTE_REPEATED_LINE_NUMBER);
2088
2089  emit_line_note (input_filename, lineno);
2090
2091  if (structure_value_addr)
2092    {
2093      target = gen_rtx (MEM, TYPE_MODE (type),
2094                        memory_address (TYPE_MODE (type), structure_value_addr));
2095      MEM_IN_STRUCT_P (target) = 1;
2096    }
2097
2098  /* Make sure we free the things we explicitly allocated with xmalloc.  */
2099  if (real_label_map)
2100    free (real_label_map);
2101
2102  return target;
2103}
2104
2105/* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
2106   push all of those decls and give each one the corresponding home.  */
2107
2108static void
2109integrate_parm_decls (args, map, arg_vector)
2110     tree args;
2111     struct inline_remap *map;
2112     rtvec arg_vector;
2113{
2114  register tree tail;
2115  register int i;
2116
2117  for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
2118    {
2119      register tree decl = build_decl (VAR_DECL, DECL_NAME (tail),
2120                                       TREE_TYPE (tail));
2121      rtx new_decl_rtl
2122        = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map);
2123
2124      DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (tail);
2125      /* We really should be setting DECL_INCOMING_RTL to something reasonable
2126         here, but that's going to require some more work.  */
2127      /* DECL_INCOMING_RTL (decl) = ?; */
2128      /* These args would always appear unused, if not for this.  */
2129      TREE_USED (decl) = 1;
2130      /* Prevent warning for shadowing with these.  */
2131      DECL_ABSTRACT_ORIGIN (decl) = tail;
2132      pushdecl (decl);
2133      /* Fully instantiate the address with the equivalent form so that the
2134         debugging information contains the actual register, instead of the
2135         virtual register.   Do this by not passing an insn to
2136         subst_constants.  */
2137      subst_constants (&new_decl_rtl, NULL_RTX, map);
2138      apply_change_group ();
2139      DECL_RTL (decl) = new_decl_rtl;
2140    }
2141}
2142
2143/* Given a BLOCK node LET, push decls and levels so as to construct in the
2144   current function a tree of contexts isomorphic to the one that is given.
2145
2146   LEVEL indicates how far down into the BLOCK tree is the node we are
2147   currently traversing.  It is always zero except for recursive calls.
2148
2149   MAP, if nonzero, is a pointer to an inline_remap map which indicates how
2150   registers used in the DECL_RTL field should be remapped.  If it is zero,
2151   no mapping is necessary.  */
2152
2153static void
2154integrate_decl_tree (let, level, map)
2155     tree let;
2156     int level;
2157     struct inline_remap *map;
2158{
2159  tree t, node;
2160
2161  if (level > 0)
2162    pushlevel (0);
2163 
2164  for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2165    {
2166      tree d;
2167
2168      push_obstacks_nochange ();
2169      saveable_allocation ();
2170      d = copy_node (t);
2171      pop_obstacks ();
2172
2173      if (DECL_RTL (t) != 0)
2174        {
2175          DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map);
2176          /* Fully instantiate the address with the equivalent form so that the
2177             debugging information contains the actual register, instead of the
2178             virtual register.   Do this by not passing an insn to
2179             subst_constants.  */
2180          subst_constants (&DECL_RTL (d), NULL_RTX, map);
2181          apply_change_group ();
2182        }
2183      /* These args would always appear unused, if not for this.  */
2184      TREE_USED (d) = 1;
2185      /* Prevent warning for shadowing with these.  */
2186      DECL_ABSTRACT_ORIGIN (d) = t;
2187
2188      if (DECL_LANG_SPECIFIC (d))
2189        copy_lang_decl (d);
2190
2191      pushdecl (d);
2192    }
2193
2194  for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2195    integrate_decl_tree (t, level + 1, map);
2196
2197  if (level > 0)
2198    {
2199      node = poplevel (1, 0, 0);
2200      if (node)
2201        {
2202          TREE_USED (node) = TREE_USED (let);
2203          BLOCK_ABSTRACT_ORIGIN (node) = let;
2204        }
2205    }
2206}
2207
2208/* Given a BLOCK node LET, search for all DECL_RTL fields, and pass them
2209   through save_constants.  */
2210
2211static void
2212save_constants_in_decl_trees (let)
2213     tree let;
2214{
2215  tree t;
2216
2217  for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2218    if (DECL_RTL (t) != 0)
2219      save_constants (&DECL_RTL (t));
2220
2221  for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2222    save_constants_in_decl_trees (t);
2223}
2224
2225/* Create a new copy of an rtx.
2226   Recursively copies the operands of the rtx,
2227   except for those few rtx codes that are sharable.
2228
2229   We always return an rtx that is similar to that incoming rtx, with the
2230   exception of possibly changing a REG to a SUBREG or vice versa.  No
2231   rtl is ever emitted.
2232
2233   Handle constants that need to be placed in the constant pool by
2234   calling `force_const_mem'.  */
2235
2236rtx
2237copy_rtx_and_substitute (orig, map)
2238     register rtx orig;
2239     struct inline_remap *map;
2240{
2241  register rtx copy, temp;
2242  register int i, j;
2243  register RTX_CODE code;
2244  register enum machine_mode mode;
2245  register char *format_ptr;
2246  int regno;
2247
2248  if (orig == 0)
2249    return 0;
2250
2251  code = GET_CODE (orig);
2252  mode = GET_MODE (orig);
2253
2254  switch (code)
2255    {
2256    case REG:
2257      /* If the stack pointer register shows up, it must be part of
2258         stack-adjustments (*not* because we eliminated the frame pointer!).
2259         Small hard registers are returned as-is.  Pseudo-registers
2260         go through their `reg_map'.  */
2261      regno = REGNO (orig);
2262      if (regno <= LAST_VIRTUAL_REGISTER)
2263        {
2264          /* Some hard registers are also mapped,
2265             but others are not translated.  */
2266          if (map->reg_map[regno] != 0)
2267            return map->reg_map[regno];
2268
2269          /* If this is the virtual frame pointer, make space in current
2270             function's stack frame for the stack frame of the inline function.
2271
2272             Copy the address of this area into a pseudo.  Map
2273             virtual_stack_vars_rtx to this pseudo and set up a constant
2274             equivalence for it to be the address.  This will substitute the
2275             address into insns where it can be substituted and use the new
2276             pseudo where it can't.  */
2277          if (regno == VIRTUAL_STACK_VARS_REGNUM)
2278            {
2279              rtx loc, seq;
2280              int size = DECL_FRAME_SIZE (map->fndecl);
2281
2282#ifdef FRAME_GROWS_DOWNWARD
2283              /* In this case, virtual_stack_vars_rtx points to one byte
2284                 higher than the top of the frame area.  So make sure we
2285                 allocate a big enough chunk to keep the frame pointer
2286                 aligned like a real one.  */
2287              size = CEIL_ROUND (size, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2288#endif
2289              start_sequence ();
2290              loc = assign_stack_temp (BLKmode, size, 1);
2291              loc = XEXP (loc, 0);
2292#ifdef FRAME_GROWS_DOWNWARD
2293              /* In this case, virtual_stack_vars_rtx points to one byte
2294                 higher than the top of the frame area.  So compute the offset
2295                 to one byte higher than our substitute frame.  */
2296              loc = plus_constant (loc, size);
2297#endif
2298              map->reg_map[regno] = temp
2299                = force_reg (Pmode, force_operand (loc, NULL_RTX));
2300
2301#ifdef STACK_BOUNDARY
2302              mark_reg_pointer (map->reg_map[regno],
2303                                STACK_BOUNDARY / BITS_PER_UNIT);
2304#endif
2305
2306              if (REGNO (temp) < map->const_equiv_map_size)
2307                {
2308                  map->const_equiv_map[REGNO (temp)] = loc;
2309                  map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2310                }
2311
2312              seq = gen_sequence ();
2313              end_sequence ();
2314              emit_insn_after (seq, map->insns_at_start);
2315              return temp;
2316            }
2317          else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
2318            {
2319              /* Do the same for a block to contain any arguments referenced
2320                 in memory.  */
2321              rtx loc, seq;
2322              int size = FUNCTION_ARGS_SIZE (DECL_SAVED_INSNS (map->fndecl));
2323
2324              start_sequence ();
2325              loc = assign_stack_temp (BLKmode, size, 1);
2326              loc = XEXP (loc, 0);
2327              /* When arguments grow downward, the virtual incoming
2328                 args pointer points to the top of the argument block,
2329                 so the remapped location better do the same.  */
2330#ifdef ARGS_GROW_DOWNWARD
2331              loc = plus_constant (loc, size);
2332#endif
2333              map->reg_map[regno] = temp
2334                = force_reg (Pmode, force_operand (loc, NULL_RTX));
2335
2336#ifdef STACK_BOUNDARY
2337              mark_reg_pointer (map->reg_map[regno],
2338                                STACK_BOUNDARY / BITS_PER_UNIT);
2339#endif
2340
2341              if (REGNO (temp) < map->const_equiv_map_size)
2342                {
2343                  map->const_equiv_map[REGNO (temp)] = loc;
2344                  map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2345                }
2346
2347              seq = gen_sequence ();
2348              end_sequence ();
2349              emit_insn_after (seq, map->insns_at_start);
2350              return temp;
2351            }
2352          else if (REG_FUNCTION_VALUE_P (orig))
2353            {
2354              /* This is a reference to the function return value.  If
2355                 the function doesn't have a return value, error.  If the
2356                 mode doesn't agree, make a SUBREG.  */
2357              if (map->inline_target == 0)
2358                /* Must be unrolling loops or replicating code if we
2359                   reach here, so return the register unchanged.  */
2360                return orig;
2361              else if (mode != GET_MODE (map->inline_target))
2362                return gen_lowpart (mode, map->inline_target);
2363              else
2364                return map->inline_target;
2365            }
2366          return orig;
2367        }
2368      if (map->reg_map[regno] == NULL)
2369        {
2370          map->reg_map[regno] = gen_reg_rtx (mode);
2371          REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
2372          REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
2373          RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
2374          /* A reg with REG_FUNCTION_VALUE_P true will never reach here.  */
2375
2376          if (map->regno_pointer_flag[regno])
2377            mark_reg_pointer (map->reg_map[regno],
2378                              map->regno_pointer_align[regno]);
2379        }
2380      return map->reg_map[regno];
2381
2382    case SUBREG:
2383      copy = copy_rtx_and_substitute (SUBREG_REG (orig), map);
2384      /* SUBREG is ordinary, but don't make nested SUBREGs.  */
2385      if (GET_CODE (copy) == SUBREG)
2386        return gen_rtx (SUBREG, GET_MODE (orig), SUBREG_REG (copy),
2387                        SUBREG_WORD (orig) + SUBREG_WORD (copy));
2388      else if (GET_CODE (copy) == CONCAT)
2389        return (subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1));
2390      else
2391        return gen_rtx (SUBREG, GET_MODE (orig), copy,
2392                        SUBREG_WORD (orig));
2393
2394    case ADDRESSOF:
2395      copy = gen_rtx (ADDRESSOF, mode,
2396                      copy_rtx_and_substitute (XEXP (orig, 0), map));
2397      SET_ADDRESSOF_DECL (copy, ADDRESSOF_DECL (orig));
2398      regno = ADDRESSOF_REGNO (orig);
2399      if (map->reg_map[regno])
2400        regno = REGNO (map->reg_map[regno]);
2401      else if (regno > LAST_VIRTUAL_REGISTER)
2402        {
2403          temp = XEXP (orig, 0);
2404          map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
2405          REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
2406          REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
2407          RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
2408          /* A reg with REG_FUNCTION_VALUE_P true will never reach here.  */
2409
2410          if (map->regno_pointer_flag[regno])
2411            mark_reg_pointer (map->reg_map[regno],
2412                              map->regno_pointer_align[regno]);
2413          regno = REGNO (map->reg_map[regno]);
2414        }
2415      ADDRESSOF_REGNO (copy) = regno;
2416      return copy;
2417
2418    case USE:
2419    case CLOBBER:
2420      /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2421         to (use foo) if the original insn didn't have a subreg.
2422         Removing the subreg distorts the VAX movstrhi pattern
2423         by changing the mode of an operand.  */
2424      copy = copy_rtx_and_substitute (XEXP (orig, 0), map);
2425      if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
2426        copy = SUBREG_REG (copy);
2427      return gen_rtx (code, VOIDmode, copy);
2428
2429    case CODE_LABEL:
2430      LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
2431        = LABEL_PRESERVE_P (orig);
2432      return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
2433
2434    case LABEL_REF:
2435      copy = gen_rtx (LABEL_REF, mode,
2436                      LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2437                      : get_label_from_map (map,
2438                                            CODE_LABEL_NUMBER (XEXP (orig, 0))));
2439      LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2440
2441      /* The fact that this label was previously nonlocal does not mean
2442         it still is, so we must check if it is within the range of
2443         this function's labels.  */
2444      LABEL_REF_NONLOCAL_P (copy)
2445        = (LABEL_REF_NONLOCAL_P (orig)
2446           && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2447                 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2448
2449      /* If we have made a nonlocal label local, it means that this
2450         inlined call will be referring to our nonlocal goto handler.
2451         So make sure we create one for this block; we normally would
2452         not since this is not otherwise considered a "call".  */
2453      if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2454        function_call_count++;
2455
2456      return copy;
2457
2458    case PC:
2459    case CC0:
2460    case CONST_INT:
2461      return orig;
2462
2463    case SYMBOL_REF:
2464      /* Symbols which represent the address of a label stored in the constant
2465         pool must be modified to point to a constant pool entry for the
2466         remapped label.  Otherwise, symbols are returned unchanged.  */
2467      if (CONSTANT_POOL_ADDRESS_P (orig))
2468        {
2469          rtx constant = get_pool_constant (orig);
2470          if (GET_CODE (constant) == LABEL_REF)
2471            return XEXP (force_const_mem (GET_MODE (orig),
2472                                          copy_rtx_and_substitute (constant,
2473                                                                   map)),
2474                         0);
2475        }
2476
2477      return orig;
2478
2479    case CONST_DOUBLE:
2480      /* We have to make a new copy of this CONST_DOUBLE because don't want
2481         to use the old value of CONST_DOUBLE_MEM.  Also, this may be a
2482         duplicate of a CONST_DOUBLE we have already seen.  */
2483      if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2484        {
2485          REAL_VALUE_TYPE d;
2486
2487          REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2488          return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2489        }
2490      else
2491        return immed_double_const (CONST_DOUBLE_LOW (orig),
2492                                   CONST_DOUBLE_HIGH (orig), VOIDmode);
2493
2494    case CONST:
2495      /* Make new constant pool entry for a constant
2496         that was in the pool of the inline function.  */
2497      if (RTX_INTEGRATED_P (orig))
2498        {
2499          /* If this was an address of a constant pool entry that itself
2500             had to be placed in the constant pool, it might not be a
2501             valid address.  So the recursive call below might turn it
2502             into a register.  In that case, it isn't a constant any
2503             more, so return it.  This has the potential of changing a
2504             MEM into a REG, but we'll assume that it safe.  */
2505          temp = copy_rtx_and_substitute (XEXP (orig, 0), map);
2506          if (! CONSTANT_P (temp))
2507            return temp;
2508          return validize_mem (force_const_mem (GET_MODE (orig), temp));
2509        }
2510      break;
2511
2512    case ADDRESS:
2513      /* If from constant pool address, make new constant pool entry and
2514         return its address.  */
2515      if (! RTX_INTEGRATED_P (orig))
2516        abort ();
2517
2518      temp
2519        = force_const_mem (GET_MODE (XEXP (orig, 0)),
2520                           copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
2521                                                    map));
2522
2523#if 0
2524      /* Legitimizing the address here is incorrect.
2525
2526         The only ADDRESS rtx's that can reach here are ones created by
2527         save_constants.  Hence the operand of the ADDRESS is always valid
2528         in this position of the instruction, since the original rtx without
2529         the ADDRESS was valid.
2530
2531         The reason we don't legitimize the address here is that on the
2532         Sparc, the caller may have a (high ...) surrounding this ADDRESS.
2533         This code forces the operand of the address to a register, which
2534         fails because we can not take the HIGH part of a register.
2535
2536         Also, change_address may create new registers.  These registers
2537         will not have valid reg_map entries.  This can cause try_constants()
2538         to fail because assumes that all registers in the rtx have valid
2539         reg_map entries, and it may end up replacing one of these new
2540         registers with junk.  */
2541
2542      if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2543        temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2544#endif
2545
2546      temp = XEXP (temp, 0);
2547
2548#ifdef POINTERS_EXTEND_UNSIGNED
2549      if (GET_MODE (temp) != GET_MODE (orig))
2550        temp = convert_memory_address (GET_MODE (orig), temp);
2551#endif
2552
2553      return temp;
2554
2555    case ASM_OPERANDS:
2556      /* If a single asm insn contains multiple output operands
2557         then it contains multiple ASM_OPERANDS rtx's that share operand 3.
2558         We must make sure that the copied insn continues to share it.  */
2559      if (map->orig_asm_operands_vector == XVEC (orig, 3))
2560        {
2561          copy = rtx_alloc (ASM_OPERANDS);
2562          copy->volatil = orig->volatil;
2563          XSTR (copy, 0) = XSTR (orig, 0);
2564          XSTR (copy, 1) = XSTR (orig, 1);
2565          XINT (copy, 2) = XINT (orig, 2);
2566          XVEC (copy, 3) = map->copy_asm_operands_vector;
2567          XVEC (copy, 4) = map->copy_asm_constraints_vector;
2568          XSTR (copy, 5) = XSTR (orig, 5);
2569          XINT (copy, 6) = XINT (orig, 6);
2570          return copy;
2571        }
2572      break;
2573
2574    case CALL:
2575      /* This is given special treatment because the first
2576         operand of a CALL is a (MEM ...) which may get
2577         forced into a register for cse.  This is undesirable
2578         if function-address cse isn't wanted or if we won't do cse.  */
2579#ifndef NO_FUNCTION_CSE
2580      if (! (optimize && ! flag_no_function_cse))
2581#endif
2582        return gen_rtx (CALL, GET_MODE (orig),
2583                        gen_rtx (MEM, GET_MODE (XEXP (orig, 0)),
2584                                 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0), map)),
2585                        copy_rtx_and_substitute (XEXP (orig, 1), map));
2586      break;
2587
2588#if 0
2589      /* Must be ifdefed out for loop unrolling to work.  */
2590    case RETURN:
2591      abort ();
2592#endif
2593
2594    case SET:
2595      /* If this is setting fp or ap, it means that we have a nonlocal goto.
2596         Adjust the setting by the offset of the area we made.
2597         If the nonlocal goto is into the current function,
2598         this will result in unnecessarily bad code, but should work.  */
2599      if (SET_DEST (orig) == virtual_stack_vars_rtx
2600          || SET_DEST (orig) == virtual_incoming_args_rtx)
2601        {
2602          /* In case a translation hasn't occurred already, make one now. */
2603          rtx junk = copy_rtx_and_substitute (SET_DEST (orig), map);
2604          rtx equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
2605          rtx equiv_loc = map->const_equiv_map[REGNO (equiv_reg)];
2606          HOST_WIDE_INT loc_offset
2607            = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
2608             
2609          return gen_rtx (SET, VOIDmode, SET_DEST (orig),
2610                          force_operand
2611                          (plus_constant
2612                           (copy_rtx_and_substitute (SET_SRC (orig), map),
2613                            - loc_offset),
2614                           NULL_RTX));
2615        }
2616      break;
2617
2618    case MEM:
2619      copy = rtx_alloc (MEM);
2620      PUT_MODE (copy, mode);
2621      XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map);
2622      MEM_IN_STRUCT_P (copy) = MEM_IN_STRUCT_P (orig);
2623      MEM_VOLATILE_P (copy) = MEM_VOLATILE_P (orig);
2624
2625      /* If doing function inlining, this MEM might not be const in the
2626         function that it is being inlined into, and thus may not be
2627         unchanging after function inlining.  Constant pool references are
2628         handled elsewhere, so this doesn't lose RTX_UNCHANGING_P bits
2629         for them.  */
2630      if (! map->integrating)
2631        RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig);
2632
2633      return copy;
2634     
2635    default:
2636      break;
2637    }
2638
2639  copy = rtx_alloc (code);
2640  PUT_MODE (copy, mode);
2641  copy->in_struct = orig->in_struct;
2642  copy->volatil = orig->volatil;
2643  copy->unchanging = orig->unchanging;
2644
2645  format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2646
2647  for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2648    {
2649      switch (*format_ptr++)
2650        {
2651        case '0':
2652          XEXP (copy, i) = XEXP (orig, i);
2653          break;
2654
2655        case 'e':
2656          XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i), map);
2657          break;
2658
2659        case 'u':
2660          /* Change any references to old-insns to point to the
2661             corresponding copied insns.  */
2662          XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2663          break;
2664
2665        case 'E':
2666          XVEC (copy, i) = XVEC (orig, i);
2667          if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2668            {
2669              XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2670              for (j = 0; j < XVECLEN (copy, i); j++)
2671                XVECEXP (copy, i, j)
2672                  = copy_rtx_and_substitute (XVECEXP (orig, i, j), map);
2673            }
2674          break;
2675
2676        case 'w':
2677          XWINT (copy, i) = XWINT (orig, i);
2678          break;
2679
2680        case 'i':
2681          XINT (copy, i) = XINT (orig, i);
2682          break;
2683
2684        case 's':
2685          XSTR (copy, i) = XSTR (orig, i);
2686          break;
2687
2688        default:
2689          abort ();
2690        }
2691    }
2692
2693  if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2694    {
2695      map->orig_asm_operands_vector = XVEC (orig, 3);
2696      map->copy_asm_operands_vector = XVEC (copy, 3);
2697      map->copy_asm_constraints_vector = XVEC (copy, 4);
2698    }
2699
2700  return copy;
2701}
2702
2703/* Substitute known constant values into INSN, if that is valid.  */
2704
2705void
2706try_constants (insn, map)
2707     rtx insn;
2708     struct inline_remap *map;
2709{
2710  int i;
2711
2712  map->num_sets = 0;
2713  subst_constants (&PATTERN (insn), insn, map);
2714
2715  /* Apply the changes if they are valid; otherwise discard them.  */
2716  apply_change_group ();
2717
2718  /* Show we don't know the value of anything stored or clobbered.  */
2719  note_stores (PATTERN (insn), mark_stores);
2720  map->last_pc_value = 0;
2721#ifdef HAVE_cc0
2722  map->last_cc0_value = 0;
2723#endif
2724
2725  /* Set up any constant equivalences made in this insn.  */
2726  for (i = 0; i < map->num_sets; i++)
2727    {
2728      if (GET_CODE (map->equiv_sets[i].dest) == REG)
2729        {
2730          int regno = REGNO (map->equiv_sets[i].dest);
2731
2732          if (regno < map->const_equiv_map_size
2733              && (map->const_equiv_map[regno] == 0
2734                  /* Following clause is a hack to make case work where GNU C++
2735                     reassigns a variable to make cse work right.  */
2736                  || ! rtx_equal_p (map->const_equiv_map[regno],
2737                                    map->equiv_sets[i].equiv)))
2738            {
2739              map->const_equiv_map[regno] = map->equiv_sets[i].equiv;
2740              map->const_age_map[regno] = map->const_age;
2741            }
2742        }
2743      else if (map->equiv_sets[i].dest == pc_rtx)
2744        map->last_pc_value = map->equiv_sets[i].equiv;
2745#ifdef HAVE_cc0
2746      else if (map->equiv_sets[i].dest == cc0_rtx)
2747        map->last_cc0_value = map->equiv_sets[i].equiv;
2748#endif
2749    }
2750}
2751
2752/* Substitute known constants for pseudo regs in the contents of LOC,
2753   which are part of INSN.
2754   If INSN is zero, the substitution should always be done (this is used to
2755   update DECL_RTL).
2756   These changes are taken out by try_constants if the result is not valid.
2757
2758   Note that we are more concerned with determining when the result of a SET
2759   is a constant, for further propagation, than actually inserting constants
2760   into insns; cse will do the latter task better.
2761
2762   This function is also used to adjust address of items previously addressed
2763   via the virtual stack variable or virtual incoming arguments registers.  */
2764
2765static void
2766subst_constants (loc, insn, map)
2767     rtx *loc;
2768     rtx insn;
2769     struct inline_remap *map;
2770{
2771  rtx x = *loc;
2772  register int i;
2773  register enum rtx_code code;
2774  register char *format_ptr;
2775  int num_changes = num_validated_changes ();
2776  rtx new = 0;
2777  enum machine_mode op0_mode;
2778
2779  code = GET_CODE (x);
2780
2781  switch (code)
2782    {
2783    case PC:
2784    case CONST_INT:
2785    case CONST_DOUBLE:
2786    case SYMBOL_REF:
2787    case CONST:
2788    case LABEL_REF:
2789    case ADDRESS:
2790      return;
2791
2792#ifdef HAVE_cc0
2793    case CC0:
2794      validate_change (insn, loc, map->last_cc0_value, 1);
2795      return;
2796#endif
2797
2798    case USE:
2799    case CLOBBER:
2800      /* The only thing we can do with a USE or CLOBBER is possibly do
2801         some substitutions in a MEM within it.  */
2802      if (GET_CODE (XEXP (x, 0)) == MEM)
2803        subst_constants (&XEXP (XEXP (x, 0), 0), insn, map);
2804      return;
2805
2806    case REG:
2807      /* Substitute for parms and known constants.  Don't replace
2808         hard regs used as user variables with constants.  */
2809      {
2810        int regno = REGNO (x);
2811
2812        if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2813            && regno < map->const_equiv_map_size
2814            && map->const_equiv_map[regno] != 0
2815            && map->const_age_map[regno] >= map->const_age)
2816          validate_change (insn, loc, map->const_equiv_map[regno], 1);
2817        return;
2818      }
2819
2820    case SUBREG:
2821      /* SUBREG applied to something other than a reg
2822         should be treated as ordinary, since that must
2823         be a special hack and we don't know how to treat it specially.
2824         Consider for example mulsidi3 in m68k.md.
2825         Ordinary SUBREG of a REG needs this special treatment.  */
2826      if (GET_CODE (SUBREG_REG (x)) == REG)
2827        {
2828          rtx inner = SUBREG_REG (x);
2829          rtx new = 0;
2830
2831          /* We can't call subst_constants on &SUBREG_REG (x) because any
2832             constant or SUBREG wouldn't be valid inside our SUBEG.  Instead,
2833             see what is inside, try to form the new SUBREG and see if that is
2834             valid.  We handle two cases: extracting a full word in an
2835             integral mode and extracting the low part.  */
2836          subst_constants (&inner, NULL_RTX, map);
2837
2838          if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2839              && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2840              && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2841            new = operand_subword (inner, SUBREG_WORD (x), 0,
2842                                   GET_MODE (SUBREG_REG (x)));
2843
2844          cancel_changes (num_changes);
2845          if (new == 0 && subreg_lowpart_p (x))
2846            new = gen_lowpart_common (GET_MODE (x), inner);
2847
2848          if (new)
2849            validate_change (insn, loc, new, 1);
2850
2851          return;
2852        }
2853      break;
2854
2855    case MEM:
2856      subst_constants (&XEXP (x, 0), insn, map);
2857
2858      /* If a memory address got spoiled, change it back.  */
2859      if (insn != 0 && num_validated_changes () != num_changes
2860          && !memory_address_p (GET_MODE (x), XEXP (x, 0)))
2861        cancel_changes (num_changes);
2862      return;
2863
2864    case SET:
2865      {
2866        /* Substitute constants in our source, and in any arguments to a
2867           complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2868           itself.  */
2869        rtx *dest_loc = &SET_DEST (x);
2870        rtx dest = *dest_loc;
2871        rtx src, tem;
2872
2873        subst_constants (&SET_SRC (x), insn, map);
2874        src = SET_SRC (x);
2875
2876        while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2877               || GET_CODE (*dest_loc) == SUBREG
2878               || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2879          {
2880            if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2881              {
2882                subst_constants (&XEXP (*dest_loc, 1), insn, map);
2883                subst_constants (&XEXP (*dest_loc, 2), insn, map);
2884              }
2885            dest_loc = &XEXP (*dest_loc, 0);
2886          }
2887
2888        /* Do substitute in the address of a destination in memory.  */
2889        if (GET_CODE (*dest_loc) == MEM)
2890          subst_constants (&XEXP (*dest_loc, 0), insn, map);
2891
2892        /* Check for the case of DEST a SUBREG, both it and the underlying
2893           register are less than one word, and the SUBREG has the wider mode.
2894           In the case, we are really setting the underlying register to the
2895           source converted to the mode of DEST.  So indicate that.  */
2896        if (GET_CODE (dest) == SUBREG
2897            && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2898            && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2899            && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2900                      <= GET_MODE_SIZE (GET_MODE (dest)))
2901            && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2902                                               src)))
2903          src = tem, dest = SUBREG_REG (dest);
2904
2905        /* If storing a recognizable value save it for later recording.  */
2906        if ((map->num_sets < MAX_RECOG_OPERANDS)
2907            && (CONSTANT_P (src)
2908                || (GET_CODE (src) == REG
2909                    && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2910                        || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2911                || (GET_CODE (src) == PLUS
2912                    && GET_CODE (XEXP (src, 0)) == REG
2913                    && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2914                        || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2915                    && CONSTANT_P (XEXP (src, 1)))
2916                || GET_CODE (src) == COMPARE
2917#ifdef HAVE_cc0
2918                || dest == cc0_rtx
2919#endif
2920                || (dest == pc_rtx
2921                    && (src == pc_rtx || GET_CODE (src) == RETURN
2922                        || GET_CODE (src) == LABEL_REF))))
2923          {
2924            /* Normally, this copy won't do anything.  But, if SRC is a COMPARE
2925               it will cause us to save the COMPARE with any constants
2926               substituted, which is what we want for later.  */
2927            map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
2928            map->equiv_sets[map->num_sets++].dest = dest;
2929          }
2930      }
2931      return;
2932
2933    default:
2934      break;
2935    }
2936
2937  format_ptr = GET_RTX_FORMAT (code);
2938 
2939  /* If the first operand is an expression, save its mode for later.  */
2940  if (*format_ptr == 'e')
2941    op0_mode = GET_MODE (XEXP (x, 0));
2942
2943  for (i = 0; i < GET_RTX_LENGTH (code); i++)
2944    {
2945      switch (*format_ptr++)
2946        {
2947        case '0':
2948          break;
2949
2950        case 'e':
2951          if (XEXP (x, i))
2952            subst_constants (&XEXP (x, i), insn, map);
2953          break;
2954
2955        case 'u':
2956        case 'i':
2957        case 's':
2958        case 'w':
2959          break;
2960
2961        case 'E':
2962          if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2963            {
2964              int j;
2965              for (j = 0; j < XVECLEN (x, i); j++)
2966                subst_constants (&XVECEXP (x, i, j), insn, map);
2967            }
2968          break;
2969
2970        default:
2971          abort ();
2972        }
2973    }
2974
2975  /* If this is a commutative operation, move a constant to the second
2976     operand unless the second operand is already a CONST_INT.  */
2977  if ((GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2978      && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2979    {
2980      rtx tem = XEXP (x, 0);
2981      validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2982      validate_change (insn, &XEXP (x, 1), tem, 1);
2983    }
2984
2985  /* Simplify the expression in case we put in some constants.  */
2986  switch (GET_RTX_CLASS (code))
2987    {
2988    case '1':
2989      new = simplify_unary_operation (code, GET_MODE (x),
2990                                      XEXP (x, 0), op0_mode);
2991      break;
2992
2993    case '<':
2994      {
2995        enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2996        if (op_mode == VOIDmode)
2997          op_mode = GET_MODE (XEXP (x, 1));
2998        new = simplify_relational_operation (code, op_mode,
2999                                             XEXP (x, 0), XEXP (x, 1));
3000#ifdef FLOAT_STORE_FLAG_VALUE
3001        if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3002          new = ((new == const0_rtx) ? CONST0_RTX (GET_MODE (x))
3003                 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
3004                                                 GET_MODE (x)));
3005#endif
3006        break;
3007      }
3008
3009    case '2':
3010    case 'c':
3011      new = simplify_binary_operation (code, GET_MODE (x),
3012                                       XEXP (x, 0), XEXP (x, 1));
3013      break;
3014
3015    case 'b':
3016    case '3':
3017      new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
3018                                        XEXP (x, 0), XEXP (x, 1), XEXP (x, 2));
3019      break;
3020    }
3021
3022  if (new)
3023    validate_change (insn, loc, new, 1);
3024}
3025
3026/* Show that register modified no longer contain known constants.  We are
3027   called from note_stores with parts of the new insn.  */
3028
3029void
3030mark_stores (dest, x)
3031     rtx dest;
3032     rtx x;
3033{
3034  int regno = -1;
3035  enum machine_mode mode;
3036
3037  /* DEST is always the innermost thing set, except in the case of
3038     SUBREGs of hard registers.  */
3039
3040  if (GET_CODE (dest) == REG)
3041    regno = REGNO (dest), mode = GET_MODE (dest);
3042  else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
3043    {
3044      regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
3045      mode = GET_MODE (SUBREG_REG (dest));
3046    }
3047
3048  if (regno >= 0)
3049    {
3050      int last_reg = (regno >= FIRST_PSEUDO_REGISTER ? regno
3051                      : regno + HARD_REGNO_NREGS (regno, mode) - 1);
3052      int i;
3053
3054      /* Ignore virtual stack var or virtual arg register since those
3055         are handled separately.  */
3056      if (regno != VIRTUAL_INCOMING_ARGS_REGNUM
3057          && regno != VIRTUAL_STACK_VARS_REGNUM)
3058        for (i = regno; i <= last_reg; i++)
3059          if (i < global_const_equiv_map_size)
3060            global_const_equiv_map[i] = 0;
3061    }
3062}
3063
3064/* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
3065   pointed to by PX, they represent constants in the constant pool.
3066   Replace these with a new memory reference obtained from force_const_mem.
3067   Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
3068   address of a constant pool entry.  Replace them with the address of
3069   a new constant pool entry obtained from force_const_mem.  */
3070
3071static void
3072restore_constants (px)
3073     rtx *px;
3074{
3075  rtx x = *px;
3076  int i, j;
3077  char *fmt;
3078
3079  if (x == 0)
3080    return;
3081
3082  if (GET_CODE (x) == CONST_DOUBLE)
3083    {
3084      /* We have to make a new CONST_DOUBLE to ensure that we account for
3085         it correctly.  Using the old CONST_DOUBLE_MEM data is wrong.  */
3086      if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3087        {
3088          REAL_VALUE_TYPE d;
3089
3090          REAL_VALUE_FROM_CONST_DOUBLE (d, x);
3091          *px = CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
3092        }
3093      else
3094        *px = immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
3095                                  VOIDmode);
3096    }
3097
3098  else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == CONST)
3099    {
3100      restore_constants (&XEXP (x, 0));
3101      *px = validize_mem (force_const_mem (GET_MODE (x), XEXP (x, 0)));
3102    }
3103  else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == SUBREG)
3104    {
3105      /* This must be (subreg/i:M1 (const/i:M2 ...) 0).  */
3106      rtx new = XEXP (SUBREG_REG (x), 0);
3107
3108      restore_constants (&new);
3109      new = force_const_mem (GET_MODE (SUBREG_REG (x)), new);
3110      PUT_MODE (new, GET_MODE (x));
3111      *px = validize_mem (new);
3112    }
3113  else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == ADDRESS)
3114    {
3115      rtx new = XEXP (force_const_mem (GET_MODE (XEXP (x, 0)),
3116                                       XEXP (XEXP (x, 0), 0)),
3117                      0);
3118
3119#ifdef POINTERS_EXTEND_UNSIGNED
3120      if (GET_MODE (new) != GET_MODE (x))
3121        new = convert_memory_address (GET_MODE (x), new);
3122#endif
3123
3124      *px = new;
3125    }
3126  else
3127    {
3128      fmt = GET_RTX_FORMAT (GET_CODE (x));
3129      for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
3130        {
3131          switch (*fmt++)
3132            {
3133            case 'E':
3134              for (j = 0; j < XVECLEN (x, i); j++)
3135                restore_constants (&XVECEXP (x, i, j));
3136              break;
3137
3138            case 'e':
3139              restore_constants (&XEXP (x, i));
3140              break;
3141            }
3142        }
3143    }
3144}
3145
3146/* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
3147   given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
3148   that it points to the node itself, thus indicating that the node is its
3149   own (abstract) origin.  Additionally, if the BLOCK_ABSTRACT_ORIGIN for
3150   the given node is NULL, recursively descend the decl/block tree which
3151   it is the root of, and for each other ..._DECL or BLOCK node contained
3152   therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
3153   still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
3154   values to point to themselves.  */
3155
3156static void
3157set_block_origin_self (stmt)
3158     register tree stmt;
3159{
3160  if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
3161    {
3162      BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
3163
3164      {
3165        register tree local_decl;
3166
3167        for (local_decl = BLOCK_VARS (stmt);
3168             local_decl != NULL_TREE;
3169             local_decl = TREE_CHAIN (local_decl))
3170          set_decl_origin_self (local_decl);    /* Potential recursion.  */
3171      }
3172
3173      {
3174        register tree subblock;
3175
3176        for (subblock = BLOCK_SUBBLOCKS (stmt);
3177             subblock != NULL_TREE;
3178             subblock = BLOCK_CHAIN (subblock))
3179          set_block_origin_self (subblock);     /* Recurse.  */
3180      }
3181    }
3182}
3183
3184/* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
3185   the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
3186   node to so that it points to the node itself, thus indicating that the
3187   node represents its own (abstract) origin.  Additionally, if the
3188   DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
3189   the decl/block tree of which the given node is the root of, and for
3190   each other ..._DECL or BLOCK node contained therein whose
3191   DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
3192   set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
3193   point to themselves.  */
3194
3195static void
3196set_decl_origin_self (decl)
3197     register tree decl;
3198{
3199  if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
3200    {
3201      DECL_ABSTRACT_ORIGIN (decl) = decl;
3202      if (TREE_CODE (decl) == FUNCTION_DECL)
3203        {
3204          register tree arg;
3205
3206          for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3207            DECL_ABSTRACT_ORIGIN (arg) = arg;
3208          if (DECL_INITIAL (decl) != NULL_TREE
3209              && DECL_INITIAL (decl) != error_mark_node)
3210            set_block_origin_self (DECL_INITIAL (decl));
3211        }
3212    }
3213}
3214
3215/* Given a pointer to some BLOCK node, and a boolean value to set the
3216   "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
3217   the given block, and for all local decls and all local sub-blocks
3218   (recursively) which are contained therein.  */
3219
3220static void
3221set_block_abstract_flags (stmt, setting)
3222     register tree stmt;
3223     register int setting;
3224{
3225  register tree local_decl;
3226  register tree subblock;
3227
3228  BLOCK_ABSTRACT (stmt) = setting;
3229
3230  for (local_decl = BLOCK_VARS (stmt);
3231       local_decl != NULL_TREE;
3232       local_decl = TREE_CHAIN (local_decl))
3233    set_decl_abstract_flags (local_decl, setting);
3234
3235  for (subblock = BLOCK_SUBBLOCKS (stmt);
3236       subblock != NULL_TREE;
3237       subblock = BLOCK_CHAIN (subblock))
3238    set_block_abstract_flags (subblock, setting);
3239}
3240
3241/* Given a pointer to some ..._DECL node, and a boolean value to set the
3242   "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
3243   given decl, and (in the case where the decl is a FUNCTION_DECL) also
3244   set the abstract flags for all of the parameters, local vars, local
3245   blocks and sub-blocks (recursively) to the same setting.  */
3246
3247void
3248set_decl_abstract_flags (decl, setting)
3249     register tree decl;
3250     register int setting;
3251{
3252  DECL_ABSTRACT (decl) = setting;
3253  if (TREE_CODE (decl) == FUNCTION_DECL)
3254    {
3255      register tree arg;
3256
3257      for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3258        DECL_ABSTRACT (arg) = setting;
3259      if (DECL_INITIAL (decl) != NULL_TREE
3260          && DECL_INITIAL (decl) != error_mark_node)
3261        set_block_abstract_flags (DECL_INITIAL (decl), setting);
3262    }
3263}
3264
3265/* Output the assembly language code for the function FNDECL
3266   from its DECL_SAVED_INSNS.  Used for inline functions that are output
3267   at end of compilation instead of where they came in the source.  */
3268
3269void
3270output_inline_function (fndecl)
3271     tree fndecl;
3272{
3273  rtx head;
3274  rtx last;
3275  int save_flag_no_inline = flag_no_inline;
3276
3277  if (output_bytecode)
3278    {
3279      warning ("`inline' ignored for bytecode output");
3280      return;
3281    }
3282
3283  /* Things we allocate from here on are part of this function, not
3284     permanent.  */
3285  temporary_allocation ();
3286
3287  head = DECL_SAVED_INSNS (fndecl);
3288  current_function_decl = fndecl;
3289
3290  /* This call is only used to initialize global variables.  */
3291  init_function_start (fndecl, "lossage", 1);
3292
3293  /* Redo parameter determinations in case the FUNCTION_...
3294     macros took machine-specific actions that need to be redone.  */
3295  assign_parms (fndecl, 1);
3296
3297  /* Set stack frame size.  */
3298  assign_stack_local (BLKmode, DECL_FRAME_SIZE (fndecl), 0);
3299
3300  /* The first is a bit of a lie (the array may be larger), but doesn't
3301     matter too much and it isn't worth saving the actual bound.  */
3302  reg_rtx_no = regno_pointer_flag_length = MAX_REGNUM (head);
3303  regno_reg_rtx = (rtx *) INLINE_REGNO_REG_RTX (head);
3304  regno_pointer_flag = INLINE_REGNO_POINTER_FLAG (head);
3305  regno_pointer_align = INLINE_REGNO_POINTER_ALIGN (head);
3306  max_parm_reg = MAX_PARMREG (head);
3307  parm_reg_stack_loc = (rtx *) PARMREG_STACK_LOC (head);
3308 
3309  stack_slot_list = STACK_SLOT_LIST (head);
3310  forced_labels = FORCED_LABELS (head);
3311
3312  if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_ALLOCA)
3313    current_function_calls_alloca = 1;
3314
3315  if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_SETJMP)
3316    current_function_calls_setjmp = 1;
3317
3318  if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_LONGJMP)
3319    current_function_calls_longjmp = 1;
3320
3321  if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_STRUCT)
3322    current_function_returns_struct = 1;
3323
3324  if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_PCC_STRUCT)
3325    current_function_returns_pcc_struct = 1;
3326
3327  if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_NEEDS_CONTEXT)
3328    current_function_needs_context = 1;
3329
3330  if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL)
3331    current_function_has_nonlocal_label = 1;
3332
3333  if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_POINTER)
3334    current_function_returns_pointer = 1;
3335
3336  if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_CONST_POOL)
3337    current_function_uses_const_pool = 1;
3338
3339  if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
3340    current_function_uses_pic_offset_table = 1;
3341
3342  current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (head);
3343  current_function_pops_args = POPS_ARGS (head);
3344
3345  /* This is the only thing the expand_function_end call that uses to be here
3346     actually does and that call can cause problems.  */
3347  immediate_size_expand--;
3348
3349  /* Find last insn and rebuild the constant pool.  */
3350  for (last = FIRST_PARM_INSN (head);
3351       NEXT_INSN (last); last = NEXT_INSN (last))
3352    {
3353      if (GET_RTX_CLASS (GET_CODE (last)) == 'i')
3354        {
3355          restore_constants (&PATTERN (last));
3356          restore_constants (&REG_NOTES (last));
3357        }
3358    }
3359
3360  set_new_first_and_last_insn (FIRST_PARM_INSN (head), last);
3361  set_new_first_and_last_label_num (FIRST_LABELNO (head), LAST_LABELNO (head));
3362
3363  /* We must have already output DWARF debugging information for the
3364     original (abstract) inline function declaration/definition, so
3365     we want to make sure that the debugging information we generate
3366     for this special instance of the inline function refers back to
3367     the information we already generated.  To make sure that happens,
3368     we simply have to set the DECL_ABSTRACT_ORIGIN for the function
3369     node (and for all of the local ..._DECL nodes which are its children)
3370     so that they all point to themselves.  */
3371
3372  set_decl_origin_self (fndecl);
3373
3374  /* We're not deferring this any longer.  */
3375  DECL_DEFER_OUTPUT (fndecl) = 0;
3376
3377  /* Integrating function calls isn't safe anymore, so turn on
3378     flag_no_inline.  */
3379  flag_no_inline = 1;
3380
3381  /* Compile this function all the way down to assembly code.  */
3382  rest_of_compilation (fndecl);
3383
3384  /* Reset flag_no_inline to its original value.  */
3385  flag_no_inline = save_flag_no_inline;
3386
3387  current_function_decl = 0;
3388}
Note: See TracBrowser for help on using the repository browser.