source: trunk/third/gcc/integrate.c @ 8834

Revision 8834, 100.3 KB checked in by ghudson, 28 years ago (diff)
This commit was generated by cvs2svn to compensate for changes in r8833, which included commits to RCS files with non-trunk default branches.
Line 
1/* Procedure integration for GNU CC.
2   Copyright (C) 1988, 1991, 1993, 1994, 1995 Free Software Foundation, Inc.
3   Contributed by Michael Tiemann (tiemann@cygnus.com)
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING.  If not, write to
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA.  */
21
22
23#include <stdio.h>
24
25#include "config.h"
26#include "rtl.h"
27#include "tree.h"
28#include "flags.h"
29#include "insn-config.h"
30#include "insn-flags.h"
31#include "expr.h"
32#include "output.h"
33#include "integrate.h"
34#include "real.h"
35#include "function.h"
36#include "bytecode.h"
37
38#include "obstack.h"
39#define obstack_chunk_alloc     xmalloc
40#define obstack_chunk_free      free
41
42extern struct obstack *function_maybepermanent_obstack;
43
44extern tree pushdecl ();
45extern tree poplevel ();
46
47/* Similar, but round to the next highest integer that meets the
48   alignment.  */
49#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
50
51/* Default max number of insns a function can have and still be inline.
52   This is overridden on RISC machines.  */
53#ifndef INTEGRATE_THRESHOLD
54#define INTEGRATE_THRESHOLD(DECL) \
55  (8 * (8 + list_length (DECL_ARGUMENTS (DECL))))
56#endif
57
58static rtx initialize_for_inline PROTO((tree, int, int, int, int));
59static void finish_inline       PROTO((tree, rtx));
60static void adjust_copied_decl_tree PROTO((tree));
61static tree copy_decl_list      PROTO((tree));
62static tree copy_decl_tree      PROTO((tree));
63static void copy_decl_rtls      PROTO((tree));
64static void save_constants      PROTO((rtx *));
65static void note_modified_parmregs PROTO((rtx, rtx));
66static rtx copy_for_inline      PROTO((rtx));
67static void integrate_parm_decls PROTO((tree, struct inline_remap *, rtvec));
68static void integrate_decl_tree PROTO((tree, int, struct inline_remap *));
69static void subst_constants     PROTO((rtx *, rtx, struct inline_remap *));
70static void restore_constants   PROTO((rtx *));
71static void set_block_origin_self PROTO((tree));
72static void set_decl_origin_self PROTO((tree));
73static void set_block_abstract_flags PROTO((tree, int));
74
75void set_decl_abstract_flags    PROTO((tree, int));
76
77/* Zero if the current function (whose FUNCTION_DECL is FNDECL)
78   is safe and reasonable to integrate into other functions.
79   Nonzero means value is a warning message with a single %s
80   for the function's name.  */
81
82char *
83function_cannot_inline_p (fndecl)
84     register tree fndecl;
85{
86  register rtx insn;
87  tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
88  int max_insns = INTEGRATE_THRESHOLD (fndecl);
89  register int ninsns = 0;
90  register tree parms;
91
92  /* No inlines with varargs.  `grokdeclarator' gives a warning
93     message about that if `inline' is specified.  This code
94     it put in to catch the volunteers.  */
95  if ((last && TREE_VALUE (last) != void_type_node)
96      || current_function_varargs)
97    return "varargs function cannot be inline";
98
99  if (current_function_calls_alloca)
100    return "function using alloca cannot be inline";
101
102  if (current_function_contains_functions)
103    return "function with nested functions cannot be inline";
104
105  /* If its not even close, don't even look.  */
106  if (!DECL_INLINE (fndecl) && get_max_uid () > 3 * max_insns)
107    return "function too large to be inline";
108
109#if 0
110  /* Large stacks are OK now that inlined functions can share them.  */
111  /* Don't inline functions with large stack usage,
112     since they can make other recursive functions burn up stack.  */
113  if (!DECL_INLINE (fndecl) && get_frame_size () > 100)
114    return "function stack frame for inlining";
115#endif
116
117#if 0
118  /* Don't inline functions which do not specify a function prototype and
119     have BLKmode argument or take the address of a parameter.  */
120  for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
121    {
122      if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
123        TREE_ADDRESSABLE (parms) = 1;
124      if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
125        return "no prototype, and parameter address used; cannot be inline";
126    }
127#endif
128
129  /* We can't inline functions that return structures
130     the old-fashioned PCC way, copying into a static block.  */
131  if (current_function_returns_pcc_struct)
132    return "inline functions not supported for this return value type";
133
134  /* We can't inline functions that return BLKmode structures in registers.  */
135  if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
136      && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
137    return "inline functions not supported for this return value type";
138
139  /* We can't inline functions that return structures of varying size.  */
140  if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
141    return "function with varying-size return value cannot be inline";
142
143  /* Cannot inline a function with a varying size argument or one that
144     receives a transparent union.  */
145  for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
146    {
147      if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
148        return "function with varying-size parameter cannot be inline";
149      else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
150        return "function with transparent unit parameter cannot be inline";
151    }
152
153  if (!DECL_INLINE (fndecl) && get_max_uid () > max_insns)
154    {
155      for (ninsns = 0, insn = get_first_nonparm_insn (); insn && ninsns < max_insns;
156           insn = NEXT_INSN (insn))
157        {
158          if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
159            ninsns++;
160        }
161
162      if (ninsns >= max_insns)
163        return "function too large to be inline";
164    }
165
166  /* We cannot inline this function if forced_labels is non-zero.  This
167     implies that a label in this function was used as an initializer.
168     Because labels can not be duplicated, all labels in the function
169     will be renamed when it is inlined.  However, there is no way to find
170     and fix all variables initialized with addresses of labels in this
171     function, hence inlining is impossible.  */
172
173  if (forced_labels)
174    return "function with label addresses used in initializers cannot inline";
175
176  /* We cannot inline a nested function that jumps to a nonlocal label.  */
177  if (current_function_has_nonlocal_goto)
178    return "function with nonlocal goto cannot be inline";
179
180  return 0;
181}
182
183/* Variables used within save_for_inline.  */
184
185/* Mapping from old pseudo-register to new pseudo-registers.
186   The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
187   It is allocated in `save_for_inline' and `expand_inline_function',
188   and deallocated on exit from each of those routines.  */
189static rtx *reg_map;
190
191/* Mapping from old code-labels to new code-labels.
192   The first element of this map is label_map[min_labelno].
193   It is allocated in `save_for_inline' and `expand_inline_function',
194   and deallocated on exit from each of those routines.  */
195static rtx *label_map;
196
197/* Mapping from old insn uid's to copied insns.
198   It is allocated in `save_for_inline' and `expand_inline_function',
199   and deallocated on exit from each of those routines.  */
200static rtx *insn_map;
201
202/* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
203   Zero for a reg that isn't a parm's home.
204   Only reg numbers less than max_parm_reg are mapped here.  */
205static tree *parmdecl_map;
206
207/* Keep track of first pseudo-register beyond those that are parms.  */
208static int max_parm_reg;
209
210/* When an insn is being copied by copy_for_inline,
211   this is nonzero if we have copied an ASM_OPERANDS.
212   In that case, it is the original input-operand vector.  */
213static rtvec orig_asm_operands_vector;
214
215/* When an insn is being copied by copy_for_inline,
216   this is nonzero if we have copied an ASM_OPERANDS.
217   In that case, it is the copied input-operand vector.  */
218static rtvec copy_asm_operands_vector;
219
220/* Likewise, this is the copied constraints vector.  */
221static rtvec copy_asm_constraints_vector;
222
223/* In save_for_inline, nonzero if past the parm-initialization insns.  */
224static int in_nonparm_insns;
225
226/* Subroutine for `save_for_inline{copying,nocopy}'.  Performs initialization
227   needed to save FNDECL's insns and info for future inline expansion.  */
228   
229static rtx
230initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, copy)
231     tree fndecl;
232     int min_labelno;
233     int max_labelno;
234     int max_reg;
235     int copy;
236{
237  int function_flags, i;
238  rtvec arg_vector;
239  tree parms;
240
241  /* Compute the values of any flags we must restore when inlining this.  */
242
243  function_flags
244    = (current_function_calls_alloca * FUNCTION_FLAGS_CALLS_ALLOCA
245       + current_function_calls_setjmp * FUNCTION_FLAGS_CALLS_SETJMP
246       + current_function_calls_longjmp * FUNCTION_FLAGS_CALLS_LONGJMP
247       + current_function_returns_struct * FUNCTION_FLAGS_RETURNS_STRUCT
248       + current_function_returns_pcc_struct * FUNCTION_FLAGS_RETURNS_PCC_STRUCT
249       + current_function_needs_context * FUNCTION_FLAGS_NEEDS_CONTEXT
250       + current_function_has_nonlocal_label * FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
251       + current_function_returns_pointer * FUNCTION_FLAGS_RETURNS_POINTER
252       + current_function_uses_const_pool * FUNCTION_FLAGS_USES_CONST_POOL
253       + current_function_uses_pic_offset_table * FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE);
254
255  /* Clear out PARMDECL_MAP.  It was allocated in the caller's frame.  */
256  bzero ((char *) parmdecl_map, max_parm_reg * sizeof (tree));
257  arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
258
259  for (parms = DECL_ARGUMENTS (fndecl), i = 0;
260       parms;
261       parms = TREE_CHAIN (parms), i++)
262    {
263      rtx p = DECL_RTL (parms);
264
265      if (GET_CODE (p) == MEM && copy)
266        {
267          /* Copy the rtl so that modifications of the addresses
268             later in compilation won't affect this arg_vector.
269             Virtual register instantiation can screw the address
270             of the rtl.  */
271          rtx new = copy_rtx (p);
272
273          /* Don't leave the old copy anywhere in this decl.  */
274          if (DECL_RTL (parms) == DECL_INCOMING_RTL (parms)
275              || (GET_CODE (DECL_RTL (parms)) == MEM
276                  && GET_CODE (DECL_INCOMING_RTL (parms)) == MEM
277                  && (XEXP (DECL_RTL (parms), 0)
278                      == XEXP (DECL_INCOMING_RTL (parms), 0))))
279            DECL_INCOMING_RTL (parms) = new;
280          DECL_RTL (parms) = new;
281        }
282
283      RTVEC_ELT (arg_vector, i) = p;
284
285      if (GET_CODE (p) == REG)
286        parmdecl_map[REGNO (p)] = parms;
287      else if (GET_CODE (p) == CONCAT)
288        {
289          rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
290          rtx pimag = gen_imagpart (GET_MODE (preal), p);
291
292          if (GET_CODE (preal) == REG)
293            parmdecl_map[REGNO (preal)] = parms;
294          if (GET_CODE (pimag) == REG)
295            parmdecl_map[REGNO (pimag)] = parms;
296        }
297
298      /* This flag is cleared later
299         if the function ever modifies the value of the parm.  */
300      TREE_READONLY (parms) = 1;
301    }
302
303  /* Assume we start out in the insns that set up the parameters.  */
304  in_nonparm_insns = 0;
305
306  /* The list of DECL_SAVED_INSNS, starts off with a header which
307     contains the following information:
308
309     the first insn of the function (not including the insns that copy
310     parameters into registers).
311     the first parameter insn of the function,
312     the first label used by that function,
313     the last label used by that function,
314     the highest register number used for parameters,
315     the total number of registers used,
316     the size of the incoming stack area for parameters,
317     the number of bytes popped on return,
318     the stack slot list,
319     some flags that are used to restore compiler globals,
320     the value of current_function_outgoing_args_size,
321     the original argument vector,
322     and the original DECL_INITIAL.  */
323
324  return gen_inline_header_rtx (NULL_RTX, NULL_RTX, min_labelno, max_labelno,
325                                max_parm_reg, max_reg,
326                                current_function_args_size,
327                                current_function_pops_args,
328                                stack_slot_list, forced_labels, function_flags,
329                                current_function_outgoing_args_size,
330                                arg_vector, (rtx) DECL_INITIAL (fndecl));
331}
332
333/* Subroutine for `save_for_inline{copying,nocopy}'.  Finishes up the
334   things that must be done to make FNDECL expandable as an inline function.
335   HEAD contains the chain of insns to which FNDECL will expand.  */
336   
337static void
338finish_inline (fndecl, head)
339     tree fndecl;
340     rtx head;
341{
342  NEXT_INSN (head) = get_first_nonparm_insn ();
343  FIRST_PARM_INSN (head) = get_insns ();
344  DECL_SAVED_INSNS (fndecl) = head;
345  DECL_FRAME_SIZE (fndecl) = get_frame_size ();
346}
347
348/* Adjust the BLOCK_END_NOTE pointers in a given copied DECL tree so that
349   they all point to the new (copied) rtxs.  */
350
351static void
352adjust_copied_decl_tree (block)
353     register tree block;
354{
355  register tree subblock;
356  register rtx original_end;
357
358  original_end = BLOCK_END_NOTE (block);
359  if (original_end)
360    {
361      BLOCK_END_NOTE (block) = (rtx) NOTE_SOURCE_FILE (original_end);
362      NOTE_SOURCE_FILE (original_end) = 0;
363    }
364
365  /* Process all subblocks.  */
366  for (subblock = BLOCK_SUBBLOCKS (block);
367       subblock;
368       subblock = TREE_CHAIN (subblock))
369    adjust_copied_decl_tree (subblock);
370}
371
372/* Make the insns and PARM_DECLs of the current function permanent
373   and record other information in DECL_SAVED_INSNS to allow inlining
374   of this function in subsequent calls.
375
376   This function is called when we are going to immediately compile
377   the insns for FNDECL.  The insns in maybepermanent_obstack cannot be
378   modified by the compilation process, so we copy all of them to
379   new storage and consider the new insns to be the insn chain to be
380   compiled.  Our caller (rest_of_compilation) saves the original
381   DECL_INITIAL and DECL_ARGUMENTS; here we copy them.  */
382
383/* ??? The nonlocal_label list should be adjusted also.  However, since
384   a function that contains a nested function never gets inlined currently,
385   the nonlocal_label list will always be empty, so we don't worry about
386   it for now.  */
387
388void
389save_for_inline_copying (fndecl)
390     tree fndecl;
391{
392  rtx first_insn, last_insn, insn;
393  rtx head, copy;
394  int max_labelno, min_labelno, i, len;
395  int max_reg;
396  int max_uid;
397  rtx first_nonparm_insn;
398
399  /* Make and emit a return-label if we have not already done so.
400     Do this before recording the bounds on label numbers. */
401
402  if (return_label == 0)
403    {
404      return_label = gen_label_rtx ();
405      emit_label (return_label);
406    }
407
408  /* Get some bounds on the labels and registers used.  */
409
410  max_labelno = max_label_num ();
411  min_labelno = get_first_label_num ();
412  max_reg = max_reg_num ();
413
414  /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
415     Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
416     Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
417     for the parms, prior to elimination of virtual registers.
418     These values are needed for substituting parms properly.  */
419
420  max_parm_reg = max_parm_reg_num ();
421  parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
422
423  head = initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, 1);
424
425  if (current_function_uses_const_pool)
426    {
427      /* Replace any constant pool references with the actual constant.  We
428         will put the constants back in the copy made below.  */
429      for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
430        if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
431          {
432            save_constants (&PATTERN (insn));
433            if (REG_NOTES (insn))
434              save_constants (&REG_NOTES (insn));
435          }
436
437      /* Clear out the constant pool so that we can recreate it with the
438         copied constants below.  */
439      init_const_rtx_hash_table ();
440      clear_const_double_mem ();
441    }
442
443  max_uid = INSN_UID (head);
444
445  /* We have now allocated all that needs to be allocated permanently
446     on the rtx obstack.  Set our high-water mark, so that we
447     can free the rest of this when the time comes.  */
448
449  preserve_data ();
450
451  /* Copy the chain insns of this function.
452     Install the copied chain as the insns of this function,
453     for continued compilation;
454     the original chain is recorded as the DECL_SAVED_INSNS
455     for inlining future calls.  */
456
457  /* If there are insns that copy parms from the stack into pseudo registers,
458     those insns are not copied.  `expand_inline_function' must
459     emit the correct code to handle such things.  */
460
461  insn = get_insns ();
462  if (GET_CODE (insn) != NOTE)
463    abort ();
464  first_insn = rtx_alloc (NOTE);
465  NOTE_SOURCE_FILE (first_insn) = NOTE_SOURCE_FILE (insn);
466  NOTE_LINE_NUMBER (first_insn) = NOTE_LINE_NUMBER (insn);
467  INSN_UID (first_insn) = INSN_UID (insn);
468  PREV_INSN (first_insn) = NULL;
469  NEXT_INSN (first_insn) = NULL;
470  last_insn = first_insn;
471
472  /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
473     Make these new rtx's now, and install them in regno_reg_rtx, so they
474     will be the official pseudo-reg rtx's for the rest of compilation.  */
475
476  reg_map = (rtx *) alloca ((max_reg + 1) * sizeof (rtx));
477
478  len = sizeof (struct rtx_def) + (GET_RTX_LENGTH (REG) - 1) * sizeof (rtunion);
479  for (i = max_reg - 1; i > LAST_VIRTUAL_REGISTER; i--)
480    reg_map[i] = (rtx)obstack_copy (function_maybepermanent_obstack,
481                                    regno_reg_rtx[i], len);
482
483  bcopy ((char *) (reg_map + LAST_VIRTUAL_REGISTER + 1),
484         (char *) (regno_reg_rtx + LAST_VIRTUAL_REGISTER + 1),
485         (max_reg - (LAST_VIRTUAL_REGISTER + 1)) * sizeof (rtx));
486
487  /* Likewise each label rtx must have a unique rtx as its copy.  */
488
489  label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx));
490  label_map -= min_labelno;
491
492  for (i = min_labelno; i < max_labelno; i++)
493    label_map[i] = gen_label_rtx ();
494
495  /* Record the mapping of old insns to copied insns.  */
496
497  insn_map = (rtx *) alloca (max_uid * sizeof (rtx));
498  bzero ((char *) insn_map, max_uid * sizeof (rtx));
499
500  /* Get the insn which signals the end of parameter setup code.  */
501  first_nonparm_insn = get_first_nonparm_insn ();
502
503  /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
504     (the former occurs when a variable has its address taken)
505     since these may be shared and can be changed by virtual
506     register instantiation.  DECL_RTL values for our arguments
507     have already been copied by initialize_for_inline.  */
508  for (i = LAST_VIRTUAL_REGISTER + 1; i < max_reg; i++)
509    if (GET_CODE (regno_reg_rtx[i]) == MEM)
510      XEXP (regno_reg_rtx[i], 0)
511        = copy_for_inline (XEXP (regno_reg_rtx[i], 0));
512
513  /* Copy the tree of subblocks of the function, and the decls in them.
514     We will use the copy for compiling this function, then restore the original
515     subblocks and decls for use when inlining this function.
516
517     Several parts of the compiler modify BLOCK trees.  In particular,
518     instantiate_virtual_regs will instantiate any virtual regs
519     mentioned in the DECL_RTLs of the decls, and loop
520     unrolling will replicate any BLOCK trees inside an unrolled loop.
521
522     The modified subblocks or DECL_RTLs would be incorrect for the original rtl
523     which we will use for inlining.  The rtl might even contain pseudoregs
524     whose space has been freed.  */
525
526  DECL_INITIAL (fndecl) = copy_decl_tree (DECL_INITIAL (fndecl));
527  DECL_ARGUMENTS (fndecl) = copy_decl_list (DECL_ARGUMENTS (fndecl));
528
529  /* Now copy each DECL_RTL which is a MEM,
530     so it is safe to modify their addresses.  */
531  copy_decl_rtls (DECL_INITIAL (fndecl));
532
533  /* The fndecl node acts as its own progenitor, so mark it as such.  */
534  DECL_ABSTRACT_ORIGIN (fndecl) = fndecl;
535
536  /* Now copy the chain of insns.  Do this twice.  The first copy the insn
537     itself and its body.  The second time copy of REG_NOTES.  This is because
538     a REG_NOTE may have a forward pointer to another insn.  */
539
540  for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
541    {
542      orig_asm_operands_vector = 0;
543
544      if (insn == first_nonparm_insn)
545        in_nonparm_insns = 1;
546
547      switch (GET_CODE (insn))
548        {
549        case NOTE:
550          /* No need to keep these.  */
551          if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
552            continue;
553
554          copy = rtx_alloc (NOTE);
555          NOTE_LINE_NUMBER (copy) = NOTE_LINE_NUMBER (insn);
556          if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END)
557            NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
558          else
559            {
560              NOTE_SOURCE_FILE (insn) = (char *) copy;
561              NOTE_SOURCE_FILE (copy) = 0;
562            }
563          break;
564
565        case INSN:
566        case JUMP_INSN:
567        case CALL_INSN:
568          copy = rtx_alloc (GET_CODE (insn));
569
570          if (GET_CODE (insn) == CALL_INSN)
571            CALL_INSN_FUNCTION_USAGE (copy) =
572                   copy_for_inline (CALL_INSN_FUNCTION_USAGE (insn));
573
574          PATTERN (copy) = copy_for_inline (PATTERN (insn));
575          INSN_CODE (copy) = -1;
576          LOG_LINKS (copy) = NULL_RTX;
577          RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
578          break;
579
580        case CODE_LABEL:
581          copy = label_map[CODE_LABEL_NUMBER (insn)];
582          LABEL_NAME (copy) = LABEL_NAME (insn);
583          break;
584
585        case BARRIER:
586          copy = rtx_alloc (BARRIER);
587          break;
588
589        default:
590          abort ();
591        }
592      INSN_UID (copy) = INSN_UID (insn);
593      insn_map[INSN_UID (insn)] = copy;
594      NEXT_INSN (last_insn) = copy;
595      PREV_INSN (copy) = last_insn;
596      last_insn = copy;
597    }
598
599  adjust_copied_decl_tree (DECL_INITIAL (fndecl));
600
601  /* Now copy the REG_NOTES.  */
602  for (insn = NEXT_INSN (get_insns ()); insn; insn = NEXT_INSN (insn))
603    if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
604        && insn_map[INSN_UID(insn)])
605      REG_NOTES (insn_map[INSN_UID (insn)])
606        = copy_for_inline (REG_NOTES (insn));
607
608  NEXT_INSN (last_insn) = NULL;
609
610  finish_inline (fndecl, head);
611
612  set_new_first_and_last_insn (first_insn, last_insn);
613}
614
615/* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
616   For example, this can copy a list made of TREE_LIST nodes.  While copying,
617   for each node copied which doesn't already have is DECL_ABSTRACT_ORIGIN
618   set to some non-zero value, set the DECL_ABSTRACT_ORIGIN of the copy to
619   point to the corresponding (abstract) original node.  */
620
621static tree
622copy_decl_list (list)
623     tree list;
624{
625  tree head;
626  register tree prev, next;
627
628  if (list == 0)
629    return 0;
630
631  head = prev = copy_node (list);
632  if (DECL_ABSTRACT_ORIGIN (head) == NULL_TREE)
633    DECL_ABSTRACT_ORIGIN (head) = list;
634  next = TREE_CHAIN (list);
635  while (next)
636    {
637      register tree copy;
638
639      copy = copy_node (next);
640      if (DECL_ABSTRACT_ORIGIN (copy) == NULL_TREE)
641        DECL_ABSTRACT_ORIGIN (copy) = next;
642      TREE_CHAIN (prev) = copy;
643      prev = copy;
644      next = TREE_CHAIN (next);
645    }
646  return head;
647}
648
649/* Make a copy of the entire tree of blocks BLOCK, and return it.  */
650
651static tree
652copy_decl_tree (block)
653     tree block;
654{
655  tree t, vars, subblocks;
656
657  vars = copy_decl_list (BLOCK_VARS (block));
658  subblocks = 0;
659
660  /* Process all subblocks.  */
661  for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
662    {
663      tree copy = copy_decl_tree (t);
664      TREE_CHAIN (copy) = subblocks;
665      subblocks = copy;
666    }
667
668  t = copy_node (block);
669  BLOCK_VARS (t) = vars;
670  BLOCK_SUBBLOCKS (t) = nreverse (subblocks);
671  /* If the BLOCK being cloned is already marked as having been instantiated
672     from something else, then leave that `origin' marking alone.  Otherwise,
673     mark the clone as having originated from the BLOCK we are cloning.  */
674  if (BLOCK_ABSTRACT_ORIGIN (t) == NULL_TREE)
675    BLOCK_ABSTRACT_ORIGIN (t) = block;
676  return t;
677}
678
679/* Copy DECL_RTLs in all decls in the given BLOCK node.  */
680
681static void
682copy_decl_rtls (block)
683     tree block;
684{
685  tree t;
686
687  for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
688    if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
689      DECL_RTL (t) = copy_for_inline (DECL_RTL (t));
690
691  /* Process all subblocks.  */
692  for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
693    copy_decl_rtls (t);
694}
695
696/* Make the insns and PARM_DECLs of the current function permanent
697   and record other information in DECL_SAVED_INSNS to allow inlining
698   of this function in subsequent calls.
699
700   This routine need not copy any insns because we are not going
701   to immediately compile the insns in the insn chain.  There
702   are two cases when we would compile the insns for FNDECL:
703   (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
704   be output at the end of other compilation, because somebody took
705   its address.  In the first case, the insns of FNDECL are copied
706   as it is expanded inline, so FNDECL's saved insns are not
707   modified.  In the second case, FNDECL is used for the last time,
708   so modifying the rtl is not a problem.
709
710   We don't have to worry about FNDECL being inline expanded by
711   other functions which are written at the end of compilation
712   because flag_no_inline is turned on when we begin writing
713   functions at the end of compilation.  */
714
715void
716save_for_inline_nocopy (fndecl)
717     tree fndecl;
718{
719  rtx insn;
720  rtx head;
721  rtx first_nonparm_insn;
722
723  /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
724     Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
725     Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
726     for the parms, prior to elimination of virtual registers.
727     These values are needed for substituting parms properly.  */
728
729  max_parm_reg = max_parm_reg_num ();
730  parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
731
732  /* Make and emit a return-label if we have not already done so.  */
733
734  if (return_label == 0)
735    {
736      return_label = gen_label_rtx ();
737      emit_label (return_label);
738    }
739
740  head = initialize_for_inline (fndecl, get_first_label_num (),
741                                max_label_num (), max_reg_num (), 0);
742
743  /* If there are insns that copy parms from the stack into pseudo registers,
744     those insns are not copied.  `expand_inline_function' must
745     emit the correct code to handle such things.  */
746
747  insn = get_insns ();
748  if (GET_CODE (insn) != NOTE)
749    abort ();
750
751  /* Get the insn which signals the end of parameter setup code.  */
752  first_nonparm_insn = get_first_nonparm_insn ();
753
754  /* Now just scan the chain of insns to see what happens to our
755     PARM_DECLs.  If a PARM_DECL is used but never modified, we
756     can substitute its rtl directly when expanding inline (and
757     perform constant folding when its incoming value is constant).
758     Otherwise, we have to copy its value into a new register and track
759     the new register's life.  */
760
761  for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
762    {
763      if (insn == first_nonparm_insn)
764        in_nonparm_insns = 1;
765
766      if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
767        {
768          if (current_function_uses_const_pool)
769            {
770              /* Replace any constant pool references with the actual constant.
771                 We will put the constant back if we need to write the
772                 function out after all.  */
773              save_constants (&PATTERN (insn));
774              if (REG_NOTES (insn))
775                save_constants (&REG_NOTES (insn));
776            }
777
778          /* Record what interesting things happen to our parameters.  */
779          note_stores (PATTERN (insn), note_modified_parmregs);
780        }
781    }
782
783  /* We have now allocated all that needs to be allocated permanently
784     on the rtx obstack.  Set our high-water mark, so that we
785     can free the rest of this when the time comes.  */
786
787  preserve_data ();
788
789  finish_inline (fndecl, head);
790}
791
792/* Given PX, a pointer into an insn, search for references to the constant
793   pool.  Replace each with a CONST that has the mode of the original
794   constant, contains the constant, and has RTX_INTEGRATED_P set.
795   Similarly, constant pool addresses not enclosed in a MEM are replaced
796   with an ADDRESS rtx which also gives the constant, mode, and has
797   RTX_INTEGRATED_P set.  */
798
799static void
800save_constants (px)
801     rtx *px;
802{
803  rtx x;
804  int i, j;
805
806 again:
807  x = *px;
808
809  /* If this is a CONST_DOUBLE, don't try to fix things up in
810     CONST_DOUBLE_MEM, because this is an infinite recursion.  */
811  if (GET_CODE (x) == CONST_DOUBLE)
812    return;
813  else if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
814           && CONSTANT_POOL_ADDRESS_P (XEXP (x,0)))
815    {
816      enum machine_mode const_mode = get_pool_mode (XEXP (x, 0));
817      rtx new = gen_rtx (CONST, const_mode, get_pool_constant (XEXP (x, 0)));
818      RTX_INTEGRATED_P (new) = 1;
819
820      /* If the MEM was in a different mode than the constant (perhaps we
821         were only looking at the low-order part), surround it with a
822         SUBREG so we can save both modes.  */
823
824      if (GET_MODE (x) != const_mode)
825        {
826          new = gen_rtx (SUBREG, GET_MODE (x), new, 0);
827          RTX_INTEGRATED_P (new) = 1;
828        }
829
830      *px = new;
831      save_constants (&XEXP (*px, 0));
832    }
833  else if (GET_CODE (x) == SYMBOL_REF
834           && CONSTANT_POOL_ADDRESS_P (x))
835    {
836      *px = gen_rtx (ADDRESS, get_pool_mode (x), get_pool_constant (x));
837      save_constants (&XEXP (*px, 0));
838      RTX_INTEGRATED_P (*px) = 1;
839    }
840
841  else
842    {
843      char *fmt = GET_RTX_FORMAT (GET_CODE (x));
844      int len = GET_RTX_LENGTH (GET_CODE (x));
845
846      for (i = len-1; i >= 0; i--)
847        {
848          switch (fmt[i])
849            {
850            case 'E':
851              for (j = 0; j < XVECLEN (x, i); j++)
852                save_constants (&XVECEXP (x, i, j));
853              break;
854
855            case 'e':
856              if (XEXP (x, i) == 0)
857                continue;
858              if (i == 0)
859                {
860                  /* Hack tail-recursion here.  */
861                  px = &XEXP (x, 0);
862                  goto again;
863                }
864              save_constants (&XEXP (x, i));
865              break;
866            }
867        }
868    }
869}
870
871/* Note whether a parameter is modified or not.  */
872
873static void
874note_modified_parmregs (reg, x)
875     rtx reg;
876     rtx x;
877{
878  if (GET_CODE (reg) == REG && in_nonparm_insns
879      && REGNO (reg) < max_parm_reg
880      && REGNO (reg) >= FIRST_PSEUDO_REGISTER
881      && parmdecl_map[REGNO (reg)] != 0)
882    TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
883}
884
885/* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
886   according to `reg_map' and `label_map'.  The original rtl insns
887   will be saved for inlining; this is used to make a copy
888   which is used to finish compiling the inline function itself.
889
890   If we find a "saved" constant pool entry, one which was replaced with
891   the value of the constant, convert it back to a constant pool entry.
892   Since the pool wasn't touched, this should simply restore the old
893   address.
894
895   All other kinds of rtx are copied except those that can never be
896   changed during compilation.  */
897
898static rtx
899copy_for_inline (orig)
900     rtx orig;
901{
902  register rtx x = orig;
903  register int i;
904  register enum rtx_code code;
905  register char *format_ptr;
906
907  if (x == 0)
908    return x;
909
910  code = GET_CODE (x);
911
912  /* These types may be freely shared.  */
913
914  switch (code)
915    {
916    case QUEUED:
917    case CONST_INT:
918    case SYMBOL_REF:
919    case PC:
920    case CC0:
921      return x;
922
923    case CONST_DOUBLE:
924      /* We have to make a new CONST_DOUBLE to ensure that we account for
925         it correctly.  Using the old CONST_DOUBLE_MEM data is wrong.  */
926      if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
927        {
928          REAL_VALUE_TYPE d;
929
930          REAL_VALUE_FROM_CONST_DOUBLE (d, x);
931          return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
932        }
933      else
934        return immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
935                                   VOIDmode);
936
937    case CONST:
938      /* Get constant pool entry for constant in the pool.  */
939      if (RTX_INTEGRATED_P (x))
940        return validize_mem (force_const_mem (GET_MODE (x),
941                                              copy_for_inline (XEXP (x, 0))));
942      break;
943
944    case SUBREG:
945      /* Get constant pool entry, but access in different mode.  */
946      if (RTX_INTEGRATED_P (x))
947        {
948          rtx new
949            = force_const_mem (GET_MODE (SUBREG_REG (x)),
950                               copy_for_inline (XEXP (SUBREG_REG (x), 0)));
951
952          PUT_MODE (new, GET_MODE (x));
953          return validize_mem (new);
954        }
955      break;
956
957    case ADDRESS:
958      /* If not special for constant pool error.  Else get constant pool
959         address.  */
960      if (! RTX_INTEGRATED_P (x))
961        abort ();
962
963      return XEXP (force_const_mem (GET_MODE (x),
964                                    copy_for_inline (XEXP (x, 0))), 0);
965
966    case ASM_OPERANDS:
967      /* If a single asm insn contains multiple output operands
968         then it contains multiple ASM_OPERANDS rtx's that share operand 3.
969         We must make sure that the copied insn continues to share it.  */
970      if (orig_asm_operands_vector == XVEC (orig, 3))
971        {
972          x = rtx_alloc (ASM_OPERANDS);
973          x->volatil = orig->volatil;
974          XSTR (x, 0) = XSTR (orig, 0);
975          XSTR (x, 1) = XSTR (orig, 1);
976          XINT (x, 2) = XINT (orig, 2);
977          XVEC (x, 3) = copy_asm_operands_vector;
978          XVEC (x, 4) = copy_asm_constraints_vector;
979          XSTR (x, 5) = XSTR (orig, 5);
980          XINT (x, 6) = XINT (orig, 6);
981          return x;
982        }
983      break;
984
985    case MEM:
986      /* A MEM is usually allowed to be shared if its address is constant
987         or is a constant plus one of the special registers.
988
989         We do not allow sharing of addresses that are either a special
990         register or the sum of a constant and a special register because
991         it is possible for unshare_all_rtl to copy the address, into memory
992         that won't be saved.  Although the MEM can safely be shared, and
993         won't be copied there, the address itself cannot be shared, and may
994         need to be copied.
995
996         There are also two exceptions with constants: The first is if the
997         constant is a LABEL_REF or the sum of the LABEL_REF
998         and an integer.  This case can happen if we have an inline
999         function that supplies a constant operand to the call of another
1000         inline function that uses it in a switch statement.  In this case,
1001         we will be replacing the LABEL_REF, so we have to replace this MEM
1002         as well.
1003
1004         The second case is if we have a (const (plus (address ..) ...)).
1005         In that case we need to put back the address of the constant pool
1006         entry.  */
1007
1008      if (CONSTANT_ADDRESS_P (XEXP (x, 0))
1009          && GET_CODE (XEXP (x, 0)) != LABEL_REF
1010          && ! (GET_CODE (XEXP (x, 0)) == CONST
1011                && (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
1012                    && ((GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1013                        == LABEL_REF)
1014                        || (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1015                            == ADDRESS)))))
1016        return x;
1017      break;
1018
1019    case LABEL_REF:
1020      /* If this is a non-local label, just make a new LABEL_REF.
1021         Otherwise, use the new label as well.  */
1022      x = gen_rtx (LABEL_REF, GET_MODE (orig),
1023                   LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1024                   : label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
1025      LABEL_REF_NONLOCAL_P (x) = LABEL_REF_NONLOCAL_P (orig);
1026      LABEL_OUTSIDE_LOOP_P (x) = LABEL_OUTSIDE_LOOP_P (orig);
1027      return x;
1028
1029    case REG:
1030      if (REGNO (x) > LAST_VIRTUAL_REGISTER)
1031        return reg_map [REGNO (x)];
1032      else
1033        return x;
1034
1035    case SET:
1036      /* If a parm that gets modified lives in a pseudo-reg,
1037         clear its TREE_READONLY to prevent certain optimizations.  */
1038      {
1039        rtx dest = SET_DEST (x);
1040
1041        while (GET_CODE (dest) == STRICT_LOW_PART
1042               || GET_CODE (dest) == ZERO_EXTRACT
1043               || GET_CODE (dest) == SUBREG)
1044          dest = XEXP (dest, 0);
1045
1046        if (GET_CODE (dest) == REG
1047            && REGNO (dest) < max_parm_reg
1048            && REGNO (dest) >= FIRST_PSEUDO_REGISTER
1049            && parmdecl_map[REGNO (dest)] != 0
1050            /* The insn to load an arg pseudo from a stack slot
1051               does not count as modifying it.  */
1052            && in_nonparm_insns)
1053          TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0;
1054      }
1055      break;
1056
1057#if 0 /* This is a good idea, but here is the wrong place for it.  */
1058      /* Arrange that CONST_INTs always appear as the second operand
1059         if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
1060         always appear as the first.  */
1061    case PLUS:
1062      if (GET_CODE (XEXP (x, 0)) == CONST_INT
1063          || (XEXP (x, 1) == frame_pointer_rtx
1064              || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1065                  && XEXP (x, 1) == arg_pointer_rtx)))
1066        {
1067          rtx t = XEXP (x, 0);
1068          XEXP (x, 0) = XEXP (x, 1);
1069          XEXP (x, 1) = t;
1070        }
1071      break;
1072#endif
1073    }
1074
1075  /* Replace this rtx with a copy of itself.  */
1076
1077  x = rtx_alloc (code);
1078  bcopy ((char *) orig, (char *) x,
1079         (sizeof (*x) - sizeof (x->fld)
1080          + sizeof (x->fld[0]) * GET_RTX_LENGTH (code)));
1081
1082  /* Now scan the subexpressions recursively.
1083     We can store any replaced subexpressions directly into X
1084     since we know X is not shared!  Any vectors in X
1085     must be copied if X was copied.  */
1086
1087  format_ptr = GET_RTX_FORMAT (code);
1088
1089  for (i = 0; i < GET_RTX_LENGTH (code); i++)
1090    {
1091      switch (*format_ptr++)
1092        {
1093        case 'e':
1094          XEXP (x, i) = copy_for_inline (XEXP (x, i));
1095          break;
1096
1097        case 'u':
1098          /* Change any references to old-insns to point to the
1099             corresponding copied insns.  */
1100          XEXP (x, i) = insn_map[INSN_UID (XEXP (x, i))];
1101          break;
1102
1103        case 'E':
1104          if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
1105            {
1106              register int j;
1107
1108              XVEC (x, i) = gen_rtvec_v (XVECLEN (x, i), &XVECEXP (x, i, 0));
1109              for (j = 0; j < XVECLEN (x, i); j++)
1110                XVECEXP (x, i, j)
1111                  = copy_for_inline (XVECEXP (x, i, j));
1112            }
1113          break;
1114        }
1115    }
1116
1117  if (code == ASM_OPERANDS && orig_asm_operands_vector == 0)
1118    {
1119      orig_asm_operands_vector = XVEC (orig, 3);
1120      copy_asm_operands_vector = XVEC (x, 3);
1121      copy_asm_constraints_vector = XVEC (x, 4);
1122    }
1123
1124  return x;
1125}
1126
1127/* Unfortunately, we need a global copy of const_equiv map for communication
1128   with a function called from note_stores.  Be *very* careful that this
1129   is used properly in the presence of recursion.  */
1130
1131rtx *global_const_equiv_map;
1132int global_const_equiv_map_size;
1133
1134#define FIXED_BASE_PLUS_P(X) \
1135  (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT  \
1136   && GET_CODE (XEXP (X, 0)) == REG                             \
1137   && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER             \
1138   && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
1139
1140/* Integrate the procedure defined by FNDECL.  Note that this function
1141   may wind up calling itself.  Since the static variables are not
1142   reentrant, we do not assign them until after the possibility
1143   of recursion is eliminated.
1144
1145   If IGNORE is nonzero, do not produce a value.
1146   Otherwise store the value in TARGET if it is nonzero and that is convenient.
1147
1148   Value is:
1149   (rtx)-1 if we could not substitute the function
1150   0 if we substituted it and it does not produce a value
1151   else an rtx for where the value is stored.  */
1152
1153rtx
1154expand_inline_function (fndecl, parms, target, ignore, type, structure_value_addr)
1155     tree fndecl, parms;
1156     rtx target;
1157     int ignore;
1158     tree type;
1159     rtx structure_value_addr;
1160{
1161  tree formal, actual, block;
1162  rtx header = DECL_SAVED_INSNS (fndecl);
1163  rtx insns = FIRST_FUNCTION_INSN (header);
1164  rtx parm_insns = FIRST_PARM_INSN (header);
1165  tree *arg_trees;
1166  rtx *arg_vals;
1167  rtx insn;
1168  int max_regno;
1169  register int i;
1170  int min_labelno = FIRST_LABELNO (header);
1171  int max_labelno = LAST_LABELNO (header);
1172  int nargs;
1173  rtx local_return_label = 0;
1174  rtx loc;
1175  rtx stack_save = 0;
1176  rtx temp;
1177  struct inline_remap *map;
1178  rtx cc0_insn = 0;
1179  rtvec arg_vector = ORIGINAL_ARG_VECTOR (header);
1180  rtx static_chain_value = 0;
1181
1182  /* Allow for equivalences of the pseudos we make for virtual fp and ap.  */
1183  max_regno = MAX_REGNUM (header) + 3;
1184  if (max_regno < FIRST_PSEUDO_REGISTER)
1185    abort ();
1186
1187  nargs = list_length (DECL_ARGUMENTS (fndecl));
1188
1189  /* Check that the parms type match and that sufficient arguments were
1190     passed.  Since the appropriate conversions or default promotions have
1191     already been applied, the machine modes should match exactly.  */
1192
1193  for (formal = DECL_ARGUMENTS (fndecl),
1194       actual = parms;
1195       formal;
1196       formal = TREE_CHAIN (formal),
1197       actual = TREE_CHAIN (actual))
1198    {
1199      tree arg;
1200      enum machine_mode mode;
1201
1202      if (actual == 0)
1203        return (rtx) (HOST_WIDE_INT) -1;
1204
1205      arg = TREE_VALUE (actual);
1206      mode= TYPE_MODE (DECL_ARG_TYPE (formal));
1207
1208      if (mode != TYPE_MODE (TREE_TYPE (arg))
1209          /* If they are block mode, the types should match exactly.
1210             They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
1211             which could happen if the parameter has incomplete type.  */
1212          || (mode == BLKmode && TREE_TYPE (arg) != TREE_TYPE (formal)))
1213        return (rtx) (HOST_WIDE_INT) -1;
1214    }
1215
1216  /* Extra arguments are valid, but will be ignored below, so we must
1217     evaluate them here for side-effects.  */
1218  for (; actual; actual = TREE_CHAIN (actual))
1219    expand_expr (TREE_VALUE (actual), const0_rtx,
1220                 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
1221
1222  /* Make a binding contour to keep inline cleanups called at
1223     outer function-scope level from looking like they are shadowing
1224     parameter declarations.  */
1225  pushlevel (0);
1226
1227  /* Make a fresh binding contour that we can easily remove.  */
1228  pushlevel (0);
1229  expand_start_bindings (0);
1230
1231  /* Expand the function arguments.  Do this first so that any
1232     new registers get created before we allocate the maps.  */
1233
1234  arg_vals = (rtx *) alloca (nargs * sizeof (rtx));
1235  arg_trees = (tree *) alloca (nargs * sizeof (tree));
1236
1237  for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
1238       formal;
1239       formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
1240    {
1241      /* Actual parameter, converted to the type of the argument within the
1242         function.  */
1243      tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
1244      /* Mode of the variable used within the function.  */
1245      enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
1246      int invisiref = 0;
1247
1248      arg_trees[i] = arg;
1249      loc = RTVEC_ELT (arg_vector, i);
1250
1251      /* If this is an object passed by invisible reference, we copy the
1252         object into a stack slot and save its address.  If this will go
1253         into memory, we do nothing now.  Otherwise, we just expand the
1254         argument.  */
1255      if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1256          && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1257        {
1258          rtx stack_slot
1259            = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg)),
1260                                 int_size_in_bytes (TREE_TYPE (arg)), 1);
1261          MEM_IN_STRUCT_P (stack_slot) = AGGREGATE_TYPE_P (TREE_TYPE (arg));
1262
1263          store_expr (arg, stack_slot, 0);
1264
1265          arg_vals[i] = XEXP (stack_slot, 0);
1266          invisiref = 1;
1267        }
1268      else if (GET_CODE (loc) != MEM)
1269        {
1270          if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
1271            /* The mode if LOC and ARG can differ if LOC was a variable
1272               that had its mode promoted via PROMOTED_MODE.  */
1273            arg_vals[i] = convert_modes (GET_MODE (loc),
1274                                         TYPE_MODE (TREE_TYPE (arg)),
1275                                         expand_expr (arg, NULL_RTX, mode,
1276                                                      EXPAND_SUM),
1277                                         TREE_UNSIGNED (TREE_TYPE (formal)));
1278          else
1279            arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
1280        }
1281      else
1282        arg_vals[i] = 0;
1283
1284      if (arg_vals[i] != 0
1285          && (! TREE_READONLY (formal)
1286              /* If the parameter is not read-only, copy our argument through
1287                 a register.  Also, we cannot use ARG_VALS[I] if it overlaps
1288                 TARGET in any way.  In the inline function, they will likely
1289                 be two different pseudos, and `safe_from_p' will make all
1290                 sorts of smart assumptions about their not conflicting.
1291                 But if ARG_VALS[I] overlaps TARGET, these assumptions are
1292                 wrong, so put ARG_VALS[I] into a fresh register.
1293                 Don't worry about invisible references, since their stack
1294                 temps will never overlap the target.  */
1295              || (target != 0
1296                  && ! invisiref
1297                  && (GET_CODE (arg_vals[i]) == REG
1298                      || GET_CODE (arg_vals[i]) == SUBREG
1299                      || GET_CODE (arg_vals[i]) == MEM)
1300                  && reg_overlap_mentioned_p (arg_vals[i], target))
1301              /* ??? We must always copy a SUBREG into a REG, because it might
1302                 get substituted into an address, and not all ports correctly
1303                 handle SUBREGs in addresses.  */
1304              || (GET_CODE (arg_vals[i]) == SUBREG)))
1305        arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
1306    }
1307       
1308  /* Allocate the structures we use to remap things.  */
1309
1310  map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
1311  map->fndecl = fndecl;
1312
1313  map->reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
1314  bzero ((char *) map->reg_map, max_regno * sizeof (rtx));
1315
1316  map->label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx));
1317  map->label_map -= min_labelno;
1318
1319  map->insn_map = (rtx *) alloca (INSN_UID (header) * sizeof (rtx));
1320  bzero ((char *) map->insn_map, INSN_UID (header) * sizeof (rtx));
1321  map->min_insnno = 0;
1322  map->max_insnno = INSN_UID (header);
1323
1324  map->integrating = 1;
1325
1326  /* const_equiv_map maps pseudos in our routine to constants, so it needs to
1327     be large enough for all our pseudos.  This is the number we are currently
1328     using plus the number in the called routine, plus 15 for each arg,
1329     five to compute the virtual frame pointer, and five for the return value.
1330     This should be enough for most cases.  We do not reference entries
1331     outside the range of the map.
1332
1333     ??? These numbers are quite arbitrary and were obtained by
1334     experimentation.  At some point, we should try to allocate the
1335     table after all the parameters are set up so we an more accurately
1336     estimate the number of pseudos we will need.  */
1337
1338  map->const_equiv_map_size
1339    = max_reg_num () + (max_regno - FIRST_PSEUDO_REGISTER) + 15 * nargs + 10;
1340
1341  map->const_equiv_map
1342    = (rtx *)alloca (map->const_equiv_map_size * sizeof (rtx));
1343  bzero ((char *) map->const_equiv_map,
1344         map->const_equiv_map_size * sizeof (rtx));
1345
1346  map->const_age_map
1347    = (unsigned *)alloca (map->const_equiv_map_size * sizeof (unsigned));
1348  bzero ((char *) map->const_age_map,
1349         map->const_equiv_map_size * sizeof (unsigned));
1350  map->const_age = 0;
1351
1352  /* Record the current insn in case we have to set up pointers to frame
1353     and argument memory blocks.  */
1354  map->insns_at_start = get_last_insn ();
1355
1356  /* Update the outgoing argument size to allow for those in the inlined
1357     function.  */
1358  if (OUTGOING_ARGS_SIZE (header) > current_function_outgoing_args_size)
1359    current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (header);
1360
1361  /* If the inline function needs to make PIC references, that means
1362     that this function's PIC offset table must be used.  */
1363  if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
1364    current_function_uses_pic_offset_table = 1;
1365
1366  /* If this function needs a context, set it up.  */
1367  if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_NEEDS_CONTEXT)
1368    static_chain_value = lookup_static_chain (fndecl);
1369
1370  if (GET_CODE (parm_insns) == NOTE
1371      && NOTE_LINE_NUMBER (parm_insns) > 0)
1372    {
1373      rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
1374                            NOTE_LINE_NUMBER (parm_insns));
1375      if (note)
1376        RTX_INTEGRATED_P (note) = 1;
1377    }
1378
1379  /* Process each argument.  For each, set up things so that the function's
1380     reference to the argument will refer to the argument being passed.
1381     We only replace REG with REG here.  Any simplifications are done
1382     via const_equiv_map.
1383
1384     We make two passes:  In the first, we deal with parameters that will
1385     be placed into registers, since we need to ensure that the allocated
1386     register number fits in const_equiv_map.  Then we store all non-register
1387     parameters into their memory location.  */
1388
1389  /* Don't try to free temp stack slots here, because we may put one of the
1390     parameters into a temp stack slot.  */
1391
1392  for (i = 0; i < nargs; i++)
1393    {
1394      rtx copy = arg_vals[i];
1395
1396      loc = RTVEC_ELT (arg_vector, i);
1397
1398      /* There are three cases, each handled separately.  */
1399      if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1400          && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1401        {
1402          /* This must be an object passed by invisible reference (it could
1403             also be a variable-sized object, but we forbid inlining functions
1404             with variable-sized arguments).  COPY is the address of the
1405             actual value (this computation will cause it to be copied).  We
1406             map that address for the register, noting the actual address as
1407             an equivalent in case it can be substituted into the insns.  */
1408
1409          if (GET_CODE (copy) != REG)
1410            {
1411              temp = copy_addr_to_reg (copy);
1412              if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1413                  && REGNO (temp) < map->const_equiv_map_size)
1414                {
1415                  map->const_equiv_map[REGNO (temp)] = copy;
1416                  map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1417                }
1418              copy = temp;
1419            }
1420          map->reg_map[REGNO (XEXP (loc, 0))] = copy;
1421        }
1422      else if (GET_CODE (loc) == MEM)
1423        {
1424          /* This is the case of a parameter that lives in memory.
1425             It will live in the block we allocate in the called routine's
1426             frame that simulates the incoming argument area.  Do nothing
1427             now; we will call store_expr later.  */
1428          ;
1429        }
1430      else if (GET_CODE (loc) == REG)
1431        {
1432          /* This is the good case where the parameter is in a register.
1433             If it is read-only and our argument is a constant, set up the
1434             constant equivalence.
1435
1436             If LOC is REG_USERVAR_P, the usual case, COPY must also have
1437             that flag set if it is a register.
1438
1439             Also, don't allow hard registers here; they might not be valid
1440             when substituted into insns. */
1441
1442          if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
1443              || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
1444                  && ! REG_USERVAR_P (copy))
1445              || (GET_CODE (copy) == REG
1446                  && REGNO (copy) < FIRST_PSEUDO_REGISTER))
1447            {
1448              temp = copy_to_mode_reg (GET_MODE (loc), copy);
1449              REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
1450              if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1451                  && REGNO (temp) < map->const_equiv_map_size)
1452                {
1453                  map->const_equiv_map[REGNO (temp)] = copy;
1454                  map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1455                }
1456              copy = temp;
1457            }
1458          map->reg_map[REGNO (loc)] = copy;
1459        }
1460      else if (GET_CODE (loc) == CONCAT)
1461        {
1462          /* This is the good case where the parameter is in a
1463             pair of separate pseudos.
1464             If it is read-only and our argument is a constant, set up the
1465             constant equivalence.
1466
1467             If LOC is REG_USERVAR_P, the usual case, COPY must also have
1468             that flag set if it is a register.
1469
1470             Also, don't allow hard registers here; they might not be valid
1471             when substituted into insns. */
1472          rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
1473          rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
1474          rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
1475          rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
1476
1477          if ((GET_CODE (copyreal) != REG && GET_CODE (copyreal) != SUBREG)
1478              || (GET_CODE (copyreal) == REG && REG_USERVAR_P (locreal)
1479                  && ! REG_USERVAR_P (copyreal))
1480              || (GET_CODE (copyreal) == REG
1481                  && REGNO (copyreal) < FIRST_PSEUDO_REGISTER))
1482            {
1483              temp = copy_to_mode_reg (GET_MODE (locreal), copyreal);
1484              REG_USERVAR_P (temp) = REG_USERVAR_P (locreal);
1485              if ((CONSTANT_P (copyreal) || FIXED_BASE_PLUS_P (copyreal))
1486                  && REGNO (temp) < map->const_equiv_map_size)
1487                {
1488                  map->const_equiv_map[REGNO (temp)] = copyreal;
1489                  map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1490                }
1491              copyreal = temp;
1492            }
1493          map->reg_map[REGNO (locreal)] = copyreal;
1494
1495          if ((GET_CODE (copyimag) != REG && GET_CODE (copyimag) != SUBREG)
1496              || (GET_CODE (copyimag) == REG && REG_USERVAR_P (locimag)
1497                  && ! REG_USERVAR_P (copyimag))
1498              || (GET_CODE (copyimag) == REG
1499                  && REGNO (copyimag) < FIRST_PSEUDO_REGISTER))
1500            {
1501              temp = copy_to_mode_reg (GET_MODE (locimag), copyimag);
1502              REG_USERVAR_P (temp) = REG_USERVAR_P (locimag);
1503              if ((CONSTANT_P (copyimag) || FIXED_BASE_PLUS_P (copyimag))
1504                  && REGNO (temp) < map->const_equiv_map_size)
1505                {
1506                  map->const_equiv_map[REGNO (temp)] = copyimag;
1507                  map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1508                }
1509              copyimag = temp;
1510            }
1511          map->reg_map[REGNO (locimag)] = copyimag;
1512        }
1513      else
1514        abort ();
1515    }
1516
1517  /* Now do the parameters that will be placed in memory.  */
1518
1519  for (formal = DECL_ARGUMENTS (fndecl), i = 0;
1520       formal; formal = TREE_CHAIN (formal), i++)
1521    {
1522      loc = RTVEC_ELT (arg_vector, i);
1523
1524      if (GET_CODE (loc) == MEM
1525          /* Exclude case handled above.  */
1526          && ! (GET_CODE (XEXP (loc, 0)) == REG
1527                && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
1528        {
1529          rtx note = emit_note (DECL_SOURCE_FILE (formal),
1530                                DECL_SOURCE_LINE (formal));
1531          if (note)
1532            RTX_INTEGRATED_P (note) = 1;
1533
1534          /* Compute the address in the area we reserved and store the
1535             value there.  */
1536          temp = copy_rtx_and_substitute (loc, map);
1537          subst_constants (&temp, NULL_RTX, map);
1538          apply_change_group ();
1539          if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1540            temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1541          store_expr (arg_trees[i], temp, 0);
1542        }
1543    }
1544
1545  /* Deal with the places that the function puts its result.
1546     We are driven by what is placed into DECL_RESULT.
1547
1548     Initially, we assume that we don't have anything special handling for
1549     REG_FUNCTION_RETURN_VALUE_P.  */
1550
1551  map->inline_target = 0;
1552  loc = DECL_RTL (DECL_RESULT (fndecl));
1553  if (TYPE_MODE (type) == VOIDmode)
1554    /* There is no return value to worry about.  */
1555    ;
1556  else if (GET_CODE (loc) == MEM)
1557    {
1558      if (! structure_value_addr || ! aggregate_value_p (DECL_RESULT (fndecl)))
1559        abort ();
1560 
1561      /* Pass the function the address in which to return a structure value.
1562         Note that a constructor can cause someone to call us with
1563         STRUCTURE_VALUE_ADDR, but the initialization takes place
1564         via the first parameter, rather than the struct return address.
1565
1566         We have two cases:  If the address is a simple register indirect,
1567         use the mapping mechanism to point that register to our structure
1568         return address.  Otherwise, store the structure return value into
1569         the place that it will be referenced from.  */
1570
1571      if (GET_CODE (XEXP (loc, 0)) == REG)
1572        {
1573          temp = force_reg (Pmode, structure_value_addr);
1574          map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1575          if ((CONSTANT_P (structure_value_addr)
1576               || (GET_CODE (structure_value_addr) == PLUS
1577                   && XEXP (structure_value_addr, 0) == virtual_stack_vars_rtx
1578                   && GET_CODE (XEXP (structure_value_addr, 1)) == CONST_INT))
1579              && REGNO (temp) < map->const_equiv_map_size)
1580            {
1581              map->const_equiv_map[REGNO (temp)] = structure_value_addr;
1582              map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1583            }
1584        }
1585      else
1586        {
1587          temp = copy_rtx_and_substitute (loc, map);
1588          subst_constants (&temp, NULL_RTX, map);
1589          apply_change_group ();
1590          emit_move_insn (temp, structure_value_addr);
1591        }
1592    }
1593  else if (ignore)
1594    /* We will ignore the result value, so don't look at its structure.
1595       Note that preparations for an aggregate return value
1596       do need to be made (above) even if it will be ignored.  */
1597    ;
1598  else if (GET_CODE (loc) == REG)
1599    {
1600      /* The function returns an object in a register and we use the return
1601         value.  Set up our target for remapping.  */
1602
1603      /* Machine mode function was declared to return.   */
1604      enum machine_mode departing_mode = TYPE_MODE (type);
1605      /* (Possibly wider) machine mode it actually computes
1606         (for the sake of callers that fail to declare it right).  */
1607      enum machine_mode arriving_mode
1608        = TYPE_MODE (TREE_TYPE (DECL_RESULT (fndecl)));
1609      rtx reg_to_map;
1610
1611      /* Don't use MEMs as direct targets because on some machines
1612         substituting a MEM for a REG makes invalid insns.
1613         Let the combiner substitute the MEM if that is valid.  */
1614      if (target == 0 || GET_CODE (target) != REG
1615          || GET_MODE (target) != departing_mode)
1616        target = gen_reg_rtx (departing_mode);
1617
1618      /* If function's value was promoted before return,
1619         avoid machine mode mismatch when we substitute INLINE_TARGET.
1620         But TARGET is what we will return to the caller.  */
1621      if (arriving_mode != departing_mode)
1622        reg_to_map = gen_rtx (SUBREG, arriving_mode, target, 0);
1623      else
1624        reg_to_map = target;
1625
1626      /* Usually, the result value is the machine's return register.
1627         Sometimes it may be a pseudo. Handle both cases.  */
1628      if (REG_FUNCTION_VALUE_P (loc))
1629        map->inline_target = reg_to_map;
1630      else
1631        map->reg_map[REGNO (loc)] = reg_to_map;
1632    }
1633
1634  /* Make new label equivalences for the labels in the called function.  */
1635  for (i = min_labelno; i < max_labelno; i++)
1636    map->label_map[i] = gen_label_rtx ();
1637
1638  /* Perform postincrements before actually calling the function.  */
1639  emit_queue ();
1640
1641  /* Clean up stack so that variables might have smaller offsets.  */
1642  do_pending_stack_adjust ();
1643
1644  /* Save a copy of the location of const_equiv_map for mark_stores, called
1645     via note_stores.  */
1646  global_const_equiv_map = map->const_equiv_map;
1647  global_const_equiv_map_size = map->const_equiv_map_size;
1648
1649  /* If the called function does an alloca, save and restore the
1650     stack pointer around the call.  This saves stack space, but
1651     also is required if this inline is being done between two
1652     pushes.  */
1653  if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
1654    emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1655
1656  /* Now copy the insns one by one.  Do this in two passes, first the insns and
1657     then their REG_NOTES, just like save_for_inline.  */
1658
1659  /* This loop is very similar to the loop in copy_loop_body in unroll.c.  */
1660
1661  for (insn = insns; insn; insn = NEXT_INSN (insn))
1662    {
1663      rtx copy, pattern, set;
1664
1665      map->orig_asm_operands_vector = 0;
1666
1667      switch (GET_CODE (insn))
1668        {
1669        case INSN:
1670          pattern = PATTERN (insn);
1671          set = single_set (insn);
1672          copy = 0;
1673          if (GET_CODE (pattern) == USE
1674              && GET_CODE (XEXP (pattern, 0)) == REG
1675              && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1676            /* The (USE (REG n)) at return from the function should
1677               be ignored since we are changing (REG n) into
1678               inline_target.  */
1679            break;
1680
1681          /* Ignore setting a function value that we don't want to use.  */
1682          if (map->inline_target == 0
1683              && set != 0
1684              && GET_CODE (SET_DEST (set)) == REG
1685              && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1686            {
1687              if (volatile_refs_p (SET_SRC (set)))
1688                {
1689                  rtx new_set;
1690
1691                  /* If we must not delete the source,
1692                     load it into a new temporary.  */
1693                  copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1694
1695                  new_set = single_set (copy);
1696                  if (new_set == 0)
1697                    abort ();
1698
1699                  SET_DEST (new_set)
1700                    = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1701                }
1702              /* If the source and destination are the same and it
1703                 has a note on it, keep the insn.  */
1704              else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1705                       && REG_NOTES (insn) != 0)
1706                copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1707              else
1708                break;
1709            }
1710
1711          /* If this is setting the static chain rtx, omit it.  */
1712          else if (static_chain_value != 0
1713                   && set != 0
1714                   && GET_CODE (SET_DEST (set)) == REG
1715                   && rtx_equal_p (SET_DEST (set),
1716                                   static_chain_incoming_rtx))
1717            break;
1718
1719          /* If this is setting the static chain pseudo, set it from
1720             the value we want to give it instead.  */
1721          else if (static_chain_value != 0
1722                   && set != 0
1723                   && rtx_equal_p (SET_SRC (set),
1724                                   static_chain_incoming_rtx))
1725            {
1726              rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map);
1727
1728              copy = emit_move_insn (newdest, static_chain_value);
1729              static_chain_value = 0;
1730            }
1731          else
1732            copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1733          /* REG_NOTES will be copied later.  */
1734
1735#ifdef HAVE_cc0
1736          /* If this insn is setting CC0, it may need to look at
1737             the insn that uses CC0 to see what type of insn it is.
1738             In that case, the call to recog via validate_change will
1739             fail.  So don't substitute constants here.  Instead,
1740             do it when we emit the following insn.
1741
1742             For example, see the pyr.md file.  That machine has signed and
1743             unsigned compares.  The compare patterns must check the
1744             following branch insn to see which what kind of compare to
1745             emit.
1746
1747             If the previous insn set CC0, substitute constants on it as
1748             well.  */
1749          if (sets_cc0_p (PATTERN (copy)) != 0)
1750            cc0_insn = copy;
1751          else
1752            {
1753              if (cc0_insn)
1754                try_constants (cc0_insn, map);
1755              cc0_insn = 0;
1756              try_constants (copy, map);
1757            }
1758#else
1759          try_constants (copy, map);
1760#endif
1761          break;
1762
1763        case JUMP_INSN:
1764          if (GET_CODE (PATTERN (insn)) == RETURN)
1765            {
1766              if (local_return_label == 0)
1767                local_return_label = gen_label_rtx ();
1768              pattern = gen_jump (local_return_label);
1769            }
1770          else
1771            pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1772
1773          copy = emit_jump_insn (pattern);
1774
1775#ifdef HAVE_cc0
1776          if (cc0_insn)
1777            try_constants (cc0_insn, map);
1778          cc0_insn = 0;
1779#endif
1780          try_constants (copy, map);
1781
1782          /* If this used to be a conditional jump insn but whose branch
1783             direction is now know, we must do something special.  */
1784          if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
1785            {
1786#ifdef HAVE_cc0
1787              /* The previous insn set cc0 for us.  So delete it.  */
1788              delete_insn (PREV_INSN (copy));
1789#endif
1790
1791              /* If this is now a no-op, delete it.  */
1792              if (map->last_pc_value == pc_rtx)
1793                {
1794                  delete_insn (copy);
1795                  copy = 0;
1796                }
1797              else
1798                /* Otherwise, this is unconditional jump so we must put a
1799                   BARRIER after it.  We could do some dead code elimination
1800                   here, but jump.c will do it just as well.  */
1801                emit_barrier ();
1802            }
1803          break;
1804
1805        case CALL_INSN:
1806          pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1807          copy = emit_call_insn (pattern);
1808
1809          /* Because the USAGE information potentially contains objects other
1810             than hard registers, we need to copy it.  */
1811          CALL_INSN_FUNCTION_USAGE (copy) =
1812             copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn), map);
1813
1814#ifdef HAVE_cc0
1815          if (cc0_insn)
1816            try_constants (cc0_insn, map);
1817          cc0_insn = 0;
1818#endif
1819          try_constants (copy, map);
1820
1821          /* Be lazy and assume CALL_INSNs clobber all hard registers.  */
1822          for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1823            map->const_equiv_map[i] = 0;
1824          break;
1825
1826        case CODE_LABEL:
1827          copy = emit_label (map->label_map[CODE_LABEL_NUMBER (insn)]);
1828          LABEL_NAME (copy) = LABEL_NAME (insn);
1829          map->const_age++;
1830          break;
1831
1832        case BARRIER:
1833          copy = emit_barrier ();
1834          break;
1835
1836        case NOTE:
1837          /* It is important to discard function-end and function-beg notes,
1838             so we have only one of each in the current function.
1839             Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
1840             deleted these in the copy used for continuing compilation,
1841             not the copy used for inlining).  */
1842          if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1843              && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1844              && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
1845            copy = emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
1846          else
1847            copy = 0;
1848          break;
1849
1850        default:
1851          abort ();
1852          break;
1853        }
1854
1855      if (copy)
1856        RTX_INTEGRATED_P (copy) = 1;
1857
1858      map->insn_map[INSN_UID (insn)] = copy;
1859    }
1860
1861  /* Now copy the REG_NOTES.  Increment const_age, so that only constants
1862     from parameters can be substituted in.  These are the only ones that
1863     are valid across the entire function.  */
1864  map->const_age++;
1865  for (insn = insns; insn; insn = NEXT_INSN (insn))
1866    if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1867        && map->insn_map[INSN_UID (insn)]
1868        && REG_NOTES (insn))
1869      {
1870        rtx tem = copy_rtx_and_substitute (REG_NOTES (insn), map);
1871        /* We must also do subst_constants, in case one of our parameters
1872           has const type and constant value.  */
1873        subst_constants (&tem, NULL_RTX, map);
1874        apply_change_group ();
1875        REG_NOTES (map->insn_map[INSN_UID (insn)]) = tem;
1876      }
1877
1878  if (local_return_label)
1879    emit_label (local_return_label);
1880
1881  /* Restore the stack pointer if we saved it above.  */
1882  if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
1883    emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1884
1885  /* Make copies of the decls of the symbols in the inline function, so that
1886     the copies of the variables get declared in the current function.  Set
1887     up things so that lookup_static_chain knows that to interpret registers
1888     in SAVE_EXPRs for TYPE_SIZEs as local.  */
1889
1890  inline_function_decl = fndecl;
1891  integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1892  integrate_decl_tree ((tree) ORIGINAL_DECL_INITIAL (header), 0, map);
1893  inline_function_decl = 0;
1894
1895  /* End the scope containing the copied formal parameter variables
1896     and copied LABEL_DECLs.  */
1897
1898  expand_end_bindings (getdecls (), 1, 1);
1899  block = poplevel (1, 1, 0);
1900  BLOCK_ABSTRACT_ORIGIN (block) = (DECL_ABSTRACT_ORIGIN (fndecl) == NULL
1901                                   ? fndecl : DECL_ABSTRACT_ORIGIN (fndecl));
1902  poplevel (0, 0, 0);
1903  emit_line_note (input_filename, lineno);
1904
1905  if (structure_value_addr)
1906    {
1907      target = gen_rtx (MEM, TYPE_MODE (type),
1908                        memory_address (TYPE_MODE (type), structure_value_addr));
1909      MEM_IN_STRUCT_P (target) = 1;
1910    }
1911  return target;
1912}
1913
1914/* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1915   push all of those decls and give each one the corresponding home.  */
1916
1917static void
1918integrate_parm_decls (args, map, arg_vector)
1919     tree args;
1920     struct inline_remap *map;
1921     rtvec arg_vector;
1922{
1923  register tree tail;
1924  register int i;
1925
1926  for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1927    {
1928      register tree decl = build_decl (VAR_DECL, DECL_NAME (tail),
1929                                       TREE_TYPE (tail));
1930      rtx new_decl_rtl
1931        = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map);
1932
1933      DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (tail);
1934      /* We really should be setting DECL_INCOMING_RTL to something reasonable
1935         here, but that's going to require some more work.  */
1936      /* DECL_INCOMING_RTL (decl) = ?; */
1937      /* These args would always appear unused, if not for this.  */
1938      TREE_USED (decl) = 1;
1939      /* Prevent warning for shadowing with these.  */
1940      DECL_ABSTRACT_ORIGIN (decl) = tail;
1941      pushdecl (decl);
1942      /* Fully instantiate the address with the equivalent form so that the
1943         debugging information contains the actual register, instead of the
1944         virtual register.   Do this by not passing an insn to
1945         subst_constants.  */
1946      subst_constants (&new_decl_rtl, NULL_RTX, map);
1947      apply_change_group ();
1948      DECL_RTL (decl) = new_decl_rtl;
1949    }
1950}
1951
1952/* Given a BLOCK node LET, push decls and levels so as to construct in the
1953   current function a tree of contexts isomorphic to the one that is given.
1954
1955   LEVEL indicates how far down into the BLOCK tree is the node we are
1956   currently traversing.  It is always zero except for recursive calls.
1957
1958   MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1959   registers used in the DECL_RTL field should be remapped.  If it is zero,
1960   no mapping is necessary.  */
1961
1962static void
1963integrate_decl_tree (let, level, map)
1964     tree let;
1965     int level;
1966     struct inline_remap *map;
1967{
1968  tree t, node;
1969
1970  if (level > 0)
1971    pushlevel (0);
1972 
1973  for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1974    {
1975      tree d;
1976      tree newd;
1977
1978      push_obstacks_nochange ();
1979      saveable_allocation ();
1980      d = copy_node (t);
1981      pop_obstacks ();
1982
1983      if (DECL_RTL (t) != 0)
1984        {
1985          DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map);
1986          /* Fully instantiate the address with the equivalent form so that the
1987             debugging information contains the actual register, instead of the
1988             virtual register.   Do this by not passing an insn to
1989             subst_constants.  */
1990          subst_constants (&DECL_RTL (d), NULL_RTX, map);
1991          apply_change_group ();
1992        }
1993      /* These args would always appear unused, if not for this.  */
1994      TREE_USED (d) = 1;
1995
1996      if (DECL_LANG_SPECIFIC (d))
1997        copy_lang_decl (d);
1998
1999      /* Must set DECL_ABSTRACT_ORIGIN here for local variables, to ensure
2000         that we don't get -Wshadow warnings.  But don't set it here if
2001         pushdecl might return a duplicate decl, as that will result in
2002         incorrect DWARF debug info.  */
2003      if (! DECL_EXTERNAL (d) || ! TREE_PUBLIC (d))
2004        /* Prevent warning for shadowing with these.  */
2005        DECL_ABSTRACT_ORIGIN (d) = t;
2006
2007      newd = pushdecl (d);
2008
2009      /* If we didn't set DECL_ABSTRACT_ORIGIN above, then set it now.
2010         Simpler to just set it always rather than checking.
2011         If the decl we get back is the copy of 't' that we started with,
2012         then set the DECL_ABSTRACT_ORIGIN.  Otherwise, we must have a
2013         duplicate decl, and we got the older one back.  In that case, setting
2014         DECL_ABSTRACT_ORIGIN is not appropriate.  */
2015      if (newd == d)
2016        DECL_ABSTRACT_ORIGIN (d) = t;
2017    }
2018
2019  for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2020    integrate_decl_tree (t, level + 1, map);
2021
2022  if (level > 0)
2023    {
2024      node = poplevel (1, 0, 0);
2025      if (node)
2026        {
2027          TREE_USED (node) = TREE_USED (let);
2028          BLOCK_ABSTRACT_ORIGIN (node) = let;
2029        }
2030    }
2031}
2032
2033/* Create a new copy of an rtx.
2034   Recursively copies the operands of the rtx,
2035   except for those few rtx codes that are sharable.
2036
2037   We always return an rtx that is similar to that incoming rtx, with the
2038   exception of possibly changing a REG to a SUBREG or vice versa.  No
2039   rtl is ever emitted.
2040
2041   Handle constants that need to be placed in the constant pool by
2042   calling `force_const_mem'.  */
2043
2044rtx
2045copy_rtx_and_substitute (orig, map)
2046     register rtx orig;
2047     struct inline_remap *map;
2048{
2049  register rtx copy, temp;
2050  register int i, j;
2051  register RTX_CODE code;
2052  register enum machine_mode mode;
2053  register char *format_ptr;
2054  int regno;
2055
2056  if (orig == 0)
2057    return 0;
2058
2059  code = GET_CODE (orig);
2060  mode = GET_MODE (orig);
2061
2062  switch (code)
2063    {
2064    case REG:
2065      /* If the stack pointer register shows up, it must be part of
2066         stack-adjustments (*not* because we eliminated the frame pointer!).
2067         Small hard registers are returned as-is.  Pseudo-registers
2068         go through their `reg_map'.  */
2069      regno = REGNO (orig);
2070      if (regno <= LAST_VIRTUAL_REGISTER)
2071        {
2072          /* Some hard registers are also mapped,
2073             but others are not translated.  */
2074          if (map->reg_map[regno] != 0)
2075            return map->reg_map[regno];
2076
2077          /* If this is the virtual frame pointer, make space in current
2078             function's stack frame for the stack frame of the inline function.
2079
2080             Copy the address of this area into a pseudo.  Map
2081             virtual_stack_vars_rtx to this pseudo and set up a constant
2082             equivalence for it to be the address.  This will substitute the
2083             address into insns where it can be substituted and use the new
2084             pseudo where it can't.  */
2085          if (regno == VIRTUAL_STACK_VARS_REGNUM)
2086            {
2087              rtx loc, seq;
2088              int size = DECL_FRAME_SIZE (map->fndecl);
2089              int rounded;
2090
2091              start_sequence ();
2092              loc = assign_stack_temp (BLKmode, size, 1);
2093              loc = XEXP (loc, 0);
2094#ifdef FRAME_GROWS_DOWNWARD
2095              /* In this case, virtual_stack_vars_rtx points to one byte
2096                 higher than the top of the frame area.  So compute the offset
2097                 to one byte higher than our substitute frame.
2098                 Keep the fake frame pointer aligned like a real one.  */
2099              rounded = CEIL_ROUND (size, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2100              loc = plus_constant (loc, rounded);
2101#endif
2102              map->reg_map[regno] = temp
2103                = force_reg (Pmode, force_operand (loc, NULL_RTX));
2104
2105              if (REGNO (temp) < map->const_equiv_map_size)
2106                {
2107                  map->const_equiv_map[REGNO (temp)] = loc;
2108                  map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2109                }
2110
2111              seq = gen_sequence ();
2112              end_sequence ();
2113              emit_insn_after (seq, map->insns_at_start);
2114              return temp;
2115            }
2116          else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
2117            {
2118              /* Do the same for a block to contain any arguments referenced
2119                 in memory. */
2120              rtx loc, seq;
2121              int size = FUNCTION_ARGS_SIZE (DECL_SAVED_INSNS (map->fndecl));
2122
2123              start_sequence ();
2124              loc = assign_stack_temp (BLKmode, size, 1);
2125              loc = XEXP (loc, 0);
2126              /* When arguments grow downward, the virtual incoming
2127                 args pointer points to the top of the argument block,
2128                 so the remapped location better do the same. */
2129#ifdef ARGS_GROW_DOWNWARD
2130              loc = plus_constant (loc, size);
2131#endif
2132              map->reg_map[regno] = temp
2133                = force_reg (Pmode, force_operand (loc, NULL_RTX));
2134
2135              if (REGNO (temp) < map->const_equiv_map_size)
2136                {
2137                  map->const_equiv_map[REGNO (temp)] = loc;
2138                  map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2139                }
2140
2141              seq = gen_sequence ();
2142              end_sequence ();
2143              emit_insn_after (seq, map->insns_at_start);
2144              return temp;
2145            }
2146          else if (REG_FUNCTION_VALUE_P (orig))
2147            {
2148              /* This is a reference to the function return value.  If
2149                 the function doesn't have a return value, error.  If the
2150                 mode doesn't agree, make a SUBREG.  */
2151              if (map->inline_target == 0)
2152                /* Must be unrolling loops or replicating code if we
2153                   reach here, so return the register unchanged.  */
2154                return orig;
2155              else if (mode != GET_MODE (map->inline_target))
2156                return gen_lowpart (mode, map->inline_target);
2157              else
2158                return map->inline_target;
2159            }
2160          return orig;
2161        }
2162      if (map->reg_map[regno] == NULL)
2163        {
2164          map->reg_map[regno] = gen_reg_rtx (mode);
2165          REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
2166          REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
2167          RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
2168          /* A reg with REG_FUNCTION_VALUE_P true will never reach here.  */
2169        }
2170      return map->reg_map[regno];
2171
2172    case SUBREG:
2173      copy = copy_rtx_and_substitute (SUBREG_REG (orig), map);
2174      /* SUBREG is ordinary, but don't make nested SUBREGs.  */
2175      if (GET_CODE (copy) == SUBREG)
2176        return gen_rtx (SUBREG, GET_MODE (orig), SUBREG_REG (copy),
2177                        SUBREG_WORD (orig) + SUBREG_WORD (copy));
2178      else if (GET_CODE (copy) == CONCAT)
2179        return (subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1));
2180      else
2181        return gen_rtx (SUBREG, GET_MODE (orig), copy,
2182                        SUBREG_WORD (orig));
2183
2184    case USE:
2185    case CLOBBER:
2186      /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2187         to (use foo) if the original insn didn't have a subreg.
2188         Removing the subreg distorts the VAX movstrhi pattern
2189         by changing the mode of an operand.  */
2190      copy = copy_rtx_and_substitute (XEXP (orig, 0), map);
2191      if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
2192        copy = SUBREG_REG (copy);
2193      return gen_rtx (code, VOIDmode, copy);
2194
2195    case CODE_LABEL:
2196      LABEL_PRESERVE_P (map->label_map[CODE_LABEL_NUMBER (orig)])
2197        = LABEL_PRESERVE_P (orig);
2198      return map->label_map[CODE_LABEL_NUMBER (orig)];
2199
2200    case LABEL_REF:
2201      copy = gen_rtx (LABEL_REF, mode,
2202                      LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2203                      : map->label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
2204      LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2205
2206      /* The fact that this label was previously nonlocal does not mean
2207         it still is, so we must check if it is within the range of
2208         this function's labels.  */
2209      LABEL_REF_NONLOCAL_P (copy)
2210        = (LABEL_REF_NONLOCAL_P (orig)
2211           && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2212                 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2213
2214      /* If we have made a nonlocal label local, it means that this
2215         inlined call will be referring to our nonlocal goto handler.
2216         So make sure we create one for this block; we normally would
2217         not since this is not otherwise considered a "call".  */
2218      if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2219        function_call_count++;
2220
2221      return copy;
2222
2223    case PC:
2224    case CC0:
2225    case CONST_INT:
2226      return orig;
2227
2228    case SYMBOL_REF:
2229      /* Symbols which represent the address of a label stored in the constant
2230         pool must be modified to point to a constant pool entry for the
2231         remapped label.  Otherwise, symbols are returned unchanged.  */
2232      if (CONSTANT_POOL_ADDRESS_P (orig))
2233        {
2234          rtx constant = get_pool_constant (orig);
2235          if (GET_CODE (constant) == LABEL_REF)
2236            return XEXP (force_const_mem (Pmode,
2237                                          copy_rtx_and_substitute (constant,
2238                                                                   map)),
2239                         0);
2240        }
2241
2242      return orig;
2243
2244    case CONST_DOUBLE:
2245      /* We have to make a new copy of this CONST_DOUBLE because don't want
2246         to use the old value of CONST_DOUBLE_MEM.  Also, this may be a
2247         duplicate of a CONST_DOUBLE we have already seen.  */
2248      if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2249        {
2250          REAL_VALUE_TYPE d;
2251
2252          REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2253          return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2254        }
2255      else
2256        return immed_double_const (CONST_DOUBLE_LOW (orig),
2257                                   CONST_DOUBLE_HIGH (orig), VOIDmode);
2258
2259    case CONST:
2260      /* Make new constant pool entry for a constant
2261         that was in the pool of the inline function.  */
2262      if (RTX_INTEGRATED_P (orig))
2263        {
2264          /* If this was an address of a constant pool entry that itself
2265             had to be placed in the constant pool, it might not be a
2266             valid address.  So the recursive call below might turn it
2267             into a register.  In that case, it isn't a constant any
2268             more, so return it.  This has the potential of changing a
2269             MEM into a REG, but we'll assume that it safe.  */
2270          temp = copy_rtx_and_substitute (XEXP (orig, 0), map);
2271          if (! CONSTANT_P (temp))
2272            return temp;
2273          return validize_mem (force_const_mem (GET_MODE (orig), temp));
2274        }
2275      break;
2276
2277    case ADDRESS:
2278      /* If from constant pool address, make new constant pool entry and
2279         return its address.  */
2280      if (! RTX_INTEGRATED_P (orig))
2281        abort ();
2282
2283      temp = force_const_mem (GET_MODE (orig),
2284                              copy_rtx_and_substitute (XEXP (orig, 0), map));
2285
2286#if 0
2287      /* Legitimizing the address here is incorrect.
2288
2289         The only ADDRESS rtx's that can reach here are ones created by
2290         save_constants.  Hence the operand of the ADDRESS is always valid
2291         in this position of the instruction, since the original rtx without
2292         the ADDRESS was valid.
2293
2294         The reason we don't legitimize the address here is that on the
2295         Sparc, the caller may have a (high ...) surrounding this ADDRESS.
2296         This code forces the operand of the address to a register, which
2297         fails because we can not take the HIGH part of a register.
2298
2299         Also, change_address may create new registers.  These registers
2300         will not have valid reg_map entries.  This can cause try_constants()
2301         to fail because assumes that all registers in the rtx have valid
2302         reg_map entries, and it may end up replacing one of these new
2303         registers with junk. */
2304
2305      if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2306        temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2307#endif
2308
2309      return XEXP (temp, 0);
2310
2311    case ASM_OPERANDS:
2312      /* If a single asm insn contains multiple output operands
2313         then it contains multiple ASM_OPERANDS rtx's that share operand 3.
2314         We must make sure that the copied insn continues to share it.  */
2315      if (map->orig_asm_operands_vector == XVEC (orig, 3))
2316        {
2317          copy = rtx_alloc (ASM_OPERANDS);
2318          copy->volatil = orig->volatil;
2319          XSTR (copy, 0) = XSTR (orig, 0);
2320          XSTR (copy, 1) = XSTR (orig, 1);
2321          XINT (copy, 2) = XINT (orig, 2);
2322          XVEC (copy, 3) = map->copy_asm_operands_vector;
2323          XVEC (copy, 4) = map->copy_asm_constraints_vector;
2324          XSTR (copy, 5) = XSTR (orig, 5);
2325          XINT (copy, 6) = XINT (orig, 6);
2326          return copy;
2327        }
2328      break;
2329
2330    case CALL:
2331      /* This is given special treatment because the first
2332         operand of a CALL is a (MEM ...) which may get
2333         forced into a register for cse.  This is undesirable
2334         if function-address cse isn't wanted or if we won't do cse.  */
2335#ifndef NO_FUNCTION_CSE
2336      if (! (optimize && ! flag_no_function_cse))
2337#endif
2338        return gen_rtx (CALL, GET_MODE (orig),
2339                        gen_rtx (MEM, GET_MODE (XEXP (orig, 0)),
2340                                 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0), map)),
2341                        copy_rtx_and_substitute (XEXP (orig, 1), map));
2342      break;
2343
2344#if 0
2345      /* Must be ifdefed out for loop unrolling to work.  */
2346    case RETURN:
2347      abort ();
2348#endif
2349
2350    case SET:
2351      /* If this is setting fp or ap, it means that we have a nonlocal goto.
2352         Don't alter that.
2353         If the nonlocal goto is into the current function,
2354         this will result in unnecessarily bad code, but should work.  */
2355      if (SET_DEST (orig) == virtual_stack_vars_rtx
2356          || SET_DEST (orig) == virtual_incoming_args_rtx)
2357        return gen_rtx (SET, VOIDmode, SET_DEST (orig),
2358                        copy_rtx_and_substitute (SET_SRC (orig), map));
2359      break;
2360
2361    case MEM:
2362      copy = rtx_alloc (MEM);
2363      PUT_MODE (copy, mode);
2364      XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map);
2365      MEM_IN_STRUCT_P (copy) = MEM_IN_STRUCT_P (orig);
2366      MEM_VOLATILE_P (copy) = MEM_VOLATILE_P (orig);
2367
2368      /* If doing function inlining, this MEM might not be const in the
2369         function that it is being inlined into, and thus may not be
2370         unchanging after function inlining.  Constant pool references are
2371         handled elsewhere, so this doesn't lose RTX_UNCHANGING_P bits
2372         for them.  */
2373      if (! map->integrating)
2374        RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig);
2375
2376      return copy;
2377    }
2378
2379  copy = rtx_alloc (code);
2380  PUT_MODE (copy, mode);
2381  copy->in_struct = orig->in_struct;
2382  copy->volatil = orig->volatil;
2383  copy->unchanging = orig->unchanging;
2384
2385  format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2386
2387  for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2388    {
2389      switch (*format_ptr++)
2390        {
2391        case '0':
2392          break;
2393
2394        case 'e':
2395          XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i), map);
2396          break;
2397
2398        case 'u':
2399          /* Change any references to old-insns to point to the
2400             corresponding copied insns.  */
2401          XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2402          break;
2403
2404        case 'E':
2405          XVEC (copy, i) = XVEC (orig, i);
2406          if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2407            {
2408              XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2409              for (j = 0; j < XVECLEN (copy, i); j++)
2410                XVECEXP (copy, i, j)
2411                  = copy_rtx_and_substitute (XVECEXP (orig, i, j), map);
2412            }
2413          break;
2414
2415        case 'w':
2416          XWINT (copy, i) = XWINT (orig, i);
2417          break;
2418
2419        case 'i':
2420          XINT (copy, i) = XINT (orig, i);
2421          break;
2422
2423        case 's':
2424          XSTR (copy, i) = XSTR (orig, i);
2425          break;
2426
2427        default:
2428          abort ();
2429        }
2430    }
2431
2432  if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2433    {
2434      map->orig_asm_operands_vector = XVEC (orig, 3);
2435      map->copy_asm_operands_vector = XVEC (copy, 3);
2436      map->copy_asm_constraints_vector = XVEC (copy, 4);
2437    }
2438
2439  return copy;
2440}
2441
2442/* Substitute known constant values into INSN, if that is valid.  */
2443
2444void
2445try_constants (insn, map)
2446     rtx insn;
2447     struct inline_remap *map;
2448{
2449  int i;
2450
2451  map->num_sets = 0;
2452  subst_constants (&PATTERN (insn), insn, map);
2453
2454  /* Apply the changes if they are valid; otherwise discard them.  */
2455  apply_change_group ();
2456
2457  /* Show we don't know the value of anything stored or clobbered.  */
2458  note_stores (PATTERN (insn), mark_stores);
2459  map->last_pc_value = 0;
2460#ifdef HAVE_cc0
2461  map->last_cc0_value = 0;
2462#endif
2463
2464  /* Set up any constant equivalences made in this insn.  */
2465  for (i = 0; i < map->num_sets; i++)
2466    {
2467      if (GET_CODE (map->equiv_sets[i].dest) == REG)
2468        {
2469          int regno = REGNO (map->equiv_sets[i].dest);
2470
2471          if (regno < map->const_equiv_map_size
2472              && (map->const_equiv_map[regno] == 0
2473                  /* Following clause is a hack to make case work where GNU C++
2474                     reassigns a variable to make cse work right.  */
2475                  || ! rtx_equal_p (map->const_equiv_map[regno],
2476                                    map->equiv_sets[i].equiv)))
2477            {
2478              map->const_equiv_map[regno] = map->equiv_sets[i].equiv;
2479              map->const_age_map[regno] = map->const_age;
2480            }
2481        }
2482      else if (map->equiv_sets[i].dest == pc_rtx)
2483        map->last_pc_value = map->equiv_sets[i].equiv;
2484#ifdef HAVE_cc0
2485      else if (map->equiv_sets[i].dest == cc0_rtx)
2486        map->last_cc0_value = map->equiv_sets[i].equiv;
2487#endif
2488    }
2489}
2490
2491/* Substitute known constants for pseudo regs in the contents of LOC,
2492   which are part of INSN.
2493   If INSN is zero, the substitution should always be done (this is used to
2494   update DECL_RTL).
2495   These changes are taken out by try_constants if the result is not valid.
2496
2497   Note that we are more concerned with determining when the result of a SET
2498   is a constant, for further propagation, than actually inserting constants
2499   into insns; cse will do the latter task better.
2500
2501   This function is also used to adjust address of items previously addressed
2502   via the virtual stack variable or virtual incoming arguments registers.  */
2503
2504static void
2505subst_constants (loc, insn, map)
2506     rtx *loc;
2507     rtx insn;
2508     struct inline_remap *map;
2509{
2510  rtx x = *loc;
2511  register int i;
2512  register enum rtx_code code;
2513  register char *format_ptr;
2514  int num_changes = num_validated_changes ();
2515  rtx new = 0;
2516  enum machine_mode op0_mode;
2517
2518  code = GET_CODE (x);
2519
2520  switch (code)
2521    {
2522    case PC:
2523    case CONST_INT:
2524    case CONST_DOUBLE:
2525    case SYMBOL_REF:
2526    case CONST:
2527    case LABEL_REF:
2528    case ADDRESS:
2529      return;
2530
2531#ifdef HAVE_cc0
2532    case CC0:
2533      validate_change (insn, loc, map->last_cc0_value, 1);
2534      return;
2535#endif
2536
2537    case USE:
2538    case CLOBBER:
2539      /* The only thing we can do with a USE or CLOBBER is possibly do
2540         some substitutions in a MEM within it.  */
2541      if (GET_CODE (XEXP (x, 0)) == MEM)
2542        subst_constants (&XEXP (XEXP (x, 0), 0), insn, map);
2543      return;
2544
2545    case REG:
2546      /* Substitute for parms and known constants.  Don't replace
2547         hard regs used as user variables with constants.  */
2548      {
2549        int regno = REGNO (x);
2550
2551        if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2552            && regno < map->const_equiv_map_size
2553            && map->const_equiv_map[regno] != 0
2554            && map->const_age_map[regno] >= map->const_age)
2555          validate_change (insn, loc, map->const_equiv_map[regno], 1);
2556        return;
2557      }
2558
2559    case SUBREG:
2560      /* SUBREG applied to something other than a reg
2561         should be treated as ordinary, since that must
2562         be a special hack and we don't know how to treat it specially.
2563         Consider for example mulsidi3 in m68k.md.
2564         Ordinary SUBREG of a REG needs this special treatment.  */
2565      if (GET_CODE (SUBREG_REG (x)) == REG)
2566        {
2567          rtx inner = SUBREG_REG (x);
2568          rtx new = 0;
2569
2570          /* We can't call subst_constants on &SUBREG_REG (x) because any
2571             constant or SUBREG wouldn't be valid inside our SUBEG.  Instead,
2572             see what is inside, try to form the new SUBREG and see if that is
2573             valid.  We handle two cases: extracting a full word in an
2574             integral mode and extracting the low part.  */
2575          subst_constants (&inner, NULL_RTX, map);
2576
2577          if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2578              && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2579              && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2580            new = operand_subword (inner, SUBREG_WORD (x), 0,
2581                                   GET_MODE (SUBREG_REG (x)));
2582
2583          if (new == 0 && subreg_lowpart_p (x))
2584            new = gen_lowpart_common (GET_MODE (x), inner);
2585
2586          if (new)
2587            validate_change (insn, loc, new, 1);
2588
2589          return;
2590        }
2591      break;
2592
2593    case MEM:
2594      subst_constants (&XEXP (x, 0), insn, map);
2595
2596      /* If a memory address got spoiled, change it back.  */
2597      if (insn != 0 && num_validated_changes () != num_changes
2598          && !memory_address_p (GET_MODE (x), XEXP (x, 0)))
2599        cancel_changes (num_changes);
2600      return;
2601
2602    case SET:
2603      {
2604        /* Substitute constants in our source, and in any arguments to a
2605           complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2606           itself.  */
2607        rtx *dest_loc = &SET_DEST (x);
2608        rtx dest = *dest_loc;
2609        rtx src, tem;
2610
2611        subst_constants (&SET_SRC (x), insn, map);
2612        src = SET_SRC (x);
2613
2614        while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2615               /* By convention, we always use ZERO_EXTRACT in the dest.  */
2616/*             || GET_CODE (*dest_loc) == SIGN_EXTRACT */
2617               || GET_CODE (*dest_loc) == SUBREG
2618               || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2619          {
2620            if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2621              {
2622                subst_constants (&XEXP (*dest_loc, 1), insn, map);
2623                subst_constants (&XEXP (*dest_loc, 2), insn, map);
2624              }
2625            dest_loc = &XEXP (*dest_loc, 0);
2626          }
2627
2628        /* Do substitute in the address of a destination in memory.  */
2629        if (GET_CODE (*dest_loc) == MEM)
2630          subst_constants (&XEXP (*dest_loc, 0), insn, map);
2631
2632        /* Check for the case of DEST a SUBREG, both it and the underlying
2633           register are less than one word, and the SUBREG has the wider mode.
2634           In the case, we are really setting the underlying register to the
2635           source converted to the mode of DEST.  So indicate that.  */
2636        if (GET_CODE (dest) == SUBREG
2637            && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2638            && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2639            && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2640                      <= GET_MODE_SIZE (GET_MODE (dest)))
2641            && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2642                                               src)))
2643          src = tem, dest = SUBREG_REG (dest);
2644
2645        /* If storing a recognizable value save it for later recording.  */
2646        if ((map->num_sets < MAX_RECOG_OPERANDS)
2647            && (CONSTANT_P (src)
2648                || (GET_CODE (src) == REG
2649                    && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2650                        || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2651                || (GET_CODE (src) == PLUS
2652                    && GET_CODE (XEXP (src, 0)) == REG
2653                    && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2654                        || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2655                    && CONSTANT_P (XEXP (src, 1)))
2656                || GET_CODE (src) == COMPARE
2657#ifdef HAVE_cc0
2658                || dest == cc0_rtx
2659#endif
2660                || (dest == pc_rtx
2661                    && (src == pc_rtx || GET_CODE (src) == RETURN
2662                        || GET_CODE (src) == LABEL_REF))))
2663          {
2664            /* Normally, this copy won't do anything.  But, if SRC is a COMPARE
2665               it will cause us to save the COMPARE with any constants
2666               substituted, which is what we want for later.  */
2667            map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
2668            map->equiv_sets[map->num_sets++].dest = dest;
2669          }
2670
2671        return;
2672      }
2673    }
2674
2675  format_ptr = GET_RTX_FORMAT (code);
2676 
2677  /* If the first operand is an expression, save its mode for later.  */
2678  if (*format_ptr == 'e')
2679    op0_mode = GET_MODE (XEXP (x, 0));
2680
2681  for (i = 0; i < GET_RTX_LENGTH (code); i++)
2682    {
2683      switch (*format_ptr++)
2684        {
2685        case '0':
2686          break;
2687
2688        case 'e':
2689          if (XEXP (x, i))
2690            subst_constants (&XEXP (x, i), insn, map);
2691          break;
2692
2693        case 'u':
2694        case 'i':
2695        case 's':
2696        case 'w':
2697          break;
2698
2699        case 'E':
2700          if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2701            {
2702              int j;
2703              for (j = 0; j < XVECLEN (x, i); j++)
2704                subst_constants (&XVECEXP (x, i, j), insn, map);
2705            }
2706          break;
2707
2708        default:
2709          abort ();
2710        }
2711    }
2712
2713  /* If this is a commutative operation, move a constant to the second
2714     operand unless the second operand is already a CONST_INT.  */
2715  if ((GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2716      && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2717    {
2718      rtx tem = XEXP (x, 0);
2719      validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2720      validate_change (insn, &XEXP (x, 1), tem, 1);
2721    }
2722
2723  /* Simplify the expression in case we put in some constants.  */
2724  switch (GET_RTX_CLASS (code))
2725    {
2726    case '1':
2727      new = simplify_unary_operation (code, GET_MODE (x),
2728                                      XEXP (x, 0), op0_mode);
2729      break;
2730
2731    case '<':
2732      {
2733        enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2734        if (op_mode == VOIDmode)
2735          op_mode = GET_MODE (XEXP (x, 1));
2736        new = simplify_relational_operation (code, op_mode,
2737                                             XEXP (x, 0), XEXP (x, 1));
2738#ifdef FLOAT_STORE_FLAG_VALUE
2739        if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2740          new = ((new == const0_rtx) ? CONST0_RTX (GET_MODE (x))
2741                 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
2742                                                 GET_MODE (x)));
2743#endif
2744        break;
2745      }
2746
2747    case '2':
2748    case 'c':
2749      new = simplify_binary_operation (code, GET_MODE (x),
2750                                       XEXP (x, 0), XEXP (x, 1));
2751      break;
2752
2753    case 'b':
2754    case '3':
2755      new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2756                                        XEXP (x, 0), XEXP (x, 1), XEXP (x, 2));
2757      break;
2758    }
2759
2760  if (new)
2761    validate_change (insn, loc, new, 1);
2762}
2763
2764/* Show that register modified no longer contain known constants.  We are
2765   called from note_stores with parts of the new insn.  */
2766
2767void
2768mark_stores (dest, x)
2769     rtx dest;
2770     rtx x;
2771{
2772  int regno = -1;
2773  enum machine_mode mode;
2774
2775  /* DEST is always the innermost thing set, except in the case of
2776     SUBREGs of hard registers.  */
2777
2778  if (GET_CODE (dest) == REG)
2779    regno = REGNO (dest), mode = GET_MODE (dest);
2780  else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2781    {
2782      regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
2783      mode = GET_MODE (SUBREG_REG (dest));
2784    }
2785
2786  if (regno >= 0)
2787    {
2788      int last_reg = (regno >= FIRST_PSEUDO_REGISTER ? regno
2789                      : regno + HARD_REGNO_NREGS (regno, mode) - 1);
2790      int i;
2791
2792      for (i = regno; i <= last_reg; i++)
2793        if (i < global_const_equiv_map_size)
2794          global_const_equiv_map[i] = 0;
2795    }
2796}
2797
2798/* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
2799   pointed to by PX, they represent constants in the constant pool.
2800   Replace these with a new memory reference obtained from force_const_mem.
2801   Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
2802   address of a constant pool entry.  Replace them with the address of
2803   a new constant pool entry obtained from force_const_mem.  */
2804
2805static void
2806restore_constants (px)
2807     rtx *px;
2808{
2809  rtx x = *px;
2810  int i, j;
2811  char *fmt;
2812
2813  if (x == 0)
2814    return;
2815
2816  if (GET_CODE (x) == CONST_DOUBLE)
2817    {
2818      /* We have to make a new CONST_DOUBLE to ensure that we account for
2819         it correctly.  Using the old CONST_DOUBLE_MEM data is wrong.  */
2820      if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2821        {
2822          REAL_VALUE_TYPE d;
2823
2824          REAL_VALUE_FROM_CONST_DOUBLE (d, x);
2825          *px = CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
2826        }
2827      else
2828        *px = immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
2829                                  VOIDmode);
2830    }
2831
2832  else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == CONST)
2833    {
2834      restore_constants (&XEXP (x, 0));
2835      *px = validize_mem (force_const_mem (GET_MODE (x), XEXP (x, 0)));
2836    }
2837  else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == SUBREG)
2838    {
2839      /* This must be (subreg/i:M1 (const/i:M2 ...) 0).  */
2840      rtx new = XEXP (SUBREG_REG (x), 0);
2841
2842      restore_constants (&new);
2843      new = force_const_mem (GET_MODE (SUBREG_REG (x)), new);
2844      PUT_MODE (new, GET_MODE (x));
2845      *px = validize_mem (new);
2846    }
2847  else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == ADDRESS)
2848    {
2849      restore_constants (&XEXP (x, 0));
2850      *px = XEXP (force_const_mem (GET_MODE (x), XEXP (x, 0)), 0);
2851    }
2852  else
2853    {
2854      fmt = GET_RTX_FORMAT (GET_CODE (x));
2855      for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
2856        {
2857          switch (*fmt++)
2858            {
2859            case 'E':
2860              for (j = 0; j < XVECLEN (x, i); j++)
2861                restore_constants (&XVECEXP (x, i, j));
2862              break;
2863
2864            case 'e':
2865              restore_constants (&XEXP (x, i));
2866              break;
2867            }
2868        }
2869    }
2870}
2871
2872/* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2873   given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2874   that it points to the node itself, thus indicating that the node is its
2875   own (abstract) origin.  Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2876   the given node is NULL, recursively descend the decl/block tree which
2877   it is the root of, and for each other ..._DECL or BLOCK node contained
2878   therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2879   still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2880   values to point to themselves.  */
2881
2882static void
2883set_block_origin_self (stmt)
2884     register tree stmt;
2885{
2886  if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2887    {
2888      BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2889
2890      {
2891        register tree local_decl;
2892
2893        for (local_decl = BLOCK_VARS (stmt);
2894             local_decl != NULL_TREE;
2895             local_decl = TREE_CHAIN (local_decl))
2896          set_decl_origin_self (local_decl);    /* Potential recursion.  */
2897      }
2898
2899      {
2900        register tree subblock;
2901
2902        for (subblock = BLOCK_SUBBLOCKS (stmt);
2903             subblock != NULL_TREE;
2904             subblock = BLOCK_CHAIN (subblock))
2905          set_block_origin_self (subblock);     /* Recurse.  */
2906      }
2907    }
2908}
2909
2910/* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2911   the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2912   node to so that it points to the node itself, thus indicating that the
2913   node represents its own (abstract) origin.  Additionally, if the
2914   DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2915   the decl/block tree of which the given node is the root of, and for
2916   each other ..._DECL or BLOCK node contained therein whose
2917   DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2918   set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2919   point to themselves.  */
2920
2921static void
2922set_decl_origin_self (decl)
2923     register tree decl;
2924{
2925  if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2926    {
2927      DECL_ABSTRACT_ORIGIN (decl) = decl;
2928      if (TREE_CODE (decl) == FUNCTION_DECL)
2929        {
2930          register tree arg;
2931
2932          for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2933            DECL_ABSTRACT_ORIGIN (arg) = arg;
2934          if (DECL_INITIAL (decl) != NULL_TREE
2935              && DECL_INITIAL (decl) != error_mark_node)
2936            set_block_origin_self (DECL_INITIAL (decl));
2937        }
2938    }
2939}
2940
2941/* Given a pointer to some BLOCK node, and a boolean value to set the
2942   "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2943   the given block, and for all local decls and all local sub-blocks
2944   (recursively) which are contained therein.  */
2945
2946static void
2947set_block_abstract_flags (stmt, setting)
2948     register tree stmt;
2949     register int setting;
2950{
2951  BLOCK_ABSTRACT (stmt) = setting;
2952
2953  {
2954    register tree local_decl;
2955
2956    for (local_decl = BLOCK_VARS (stmt);
2957         local_decl != NULL_TREE;
2958         local_decl = TREE_CHAIN (local_decl))
2959      set_decl_abstract_flags (local_decl, setting);
2960  }
2961
2962  {
2963    register tree subblock;
2964
2965    for (subblock = BLOCK_SUBBLOCKS (stmt);
2966         subblock != NULL_TREE;
2967         subblock = BLOCK_CHAIN (subblock))
2968      set_block_abstract_flags (subblock, setting);
2969  }
2970}
2971
2972/* Given a pointer to some ..._DECL node, and a boolean value to set the
2973   "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2974   given decl, and (in the case where the decl is a FUNCTION_DECL) also
2975   set the abstract flags for all of the parameters, local vars, local
2976   blocks and sub-blocks (recursively) to the same setting.  */
2977
2978void
2979set_decl_abstract_flags (decl, setting)
2980     register tree decl;
2981     register int setting;
2982{
2983  DECL_ABSTRACT (decl) = setting;
2984  if (TREE_CODE (decl) == FUNCTION_DECL)
2985    {
2986      register tree arg;
2987
2988      for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2989        DECL_ABSTRACT (arg) = setting;
2990      if (DECL_INITIAL (decl) != NULL_TREE
2991          && DECL_INITIAL (decl) != error_mark_node)
2992        set_block_abstract_flags (DECL_INITIAL (decl), setting);
2993    }
2994}
2995
2996/* Output the assembly language code for the function FNDECL
2997   from its DECL_SAVED_INSNS.  Used for inline functions that are output
2998   at end of compilation instead of where they came in the source.  */
2999
3000void
3001output_inline_function (fndecl)
3002     tree fndecl;
3003{
3004  rtx head;
3005  rtx last;
3006  int save_flag_no_inline = flag_no_inline;
3007
3008  if (output_bytecode)
3009    {
3010      warning ("`inline' ignored for bytecode output");
3011      return;
3012    }
3013
3014  /* Things we allocate from here on are part of this function, not
3015     permanent.  */
3016  temporary_allocation ();
3017
3018  head = DECL_SAVED_INSNS (fndecl);
3019  current_function_decl = fndecl;
3020
3021  /* This call is only used to initialize global variables.  */
3022  init_function_start (fndecl, "lossage", 1);
3023
3024  /* Redo parameter determinations in case the FUNCTION_...
3025     macros took machine-specific actions that need to be redone.  */
3026  assign_parms (fndecl, 1);
3027
3028  /* Set stack frame size.  */
3029  assign_stack_local (BLKmode, DECL_FRAME_SIZE (fndecl), 0);
3030
3031  restore_reg_data (FIRST_PARM_INSN (head));
3032
3033  stack_slot_list = STACK_SLOT_LIST (head);
3034  forced_labels = FORCED_LABELS (head);
3035
3036  if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_ALLOCA)
3037    current_function_calls_alloca = 1;
3038
3039  if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_SETJMP)
3040    current_function_calls_setjmp = 1;
3041
3042  if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_LONGJMP)
3043    current_function_calls_longjmp = 1;
3044
3045  if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_STRUCT)
3046    current_function_returns_struct = 1;
3047
3048  if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_PCC_STRUCT)
3049    current_function_returns_pcc_struct = 1;
3050
3051  if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_NEEDS_CONTEXT)
3052    current_function_needs_context = 1;
3053
3054  if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL)
3055    current_function_has_nonlocal_label = 1;
3056
3057  if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_POINTER)
3058    current_function_returns_pointer = 1;
3059
3060  if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_CONST_POOL)
3061    current_function_uses_const_pool = 1;
3062
3063  if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
3064    current_function_uses_pic_offset_table = 1;
3065
3066  current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (head);
3067  current_function_pops_args = POPS_ARGS (head);
3068
3069  /* This is the only thing the expand_function_end call that uses to be here
3070     actually does and that call can cause problems.  */
3071  immediate_size_expand--;
3072
3073  /* Find last insn and rebuild the constant pool.  */
3074  for (last = FIRST_PARM_INSN (head);
3075       NEXT_INSN (last); last = NEXT_INSN (last))
3076    {
3077      if (GET_RTX_CLASS (GET_CODE (last)) == 'i')
3078        {
3079          restore_constants (&PATTERN (last));
3080          restore_constants (&REG_NOTES (last));
3081        }
3082    }
3083
3084  set_new_first_and_last_insn (FIRST_PARM_INSN (head), last);
3085  set_new_first_and_last_label_num (FIRST_LABELNO (head), LAST_LABELNO (head));
3086
3087  /* We must have already output DWARF debugging information for the
3088     original (abstract) inline function declaration/definition, so
3089     we want to make sure that the debugging information we generate
3090     for this special instance of the inline function refers back to
3091     the information we already generated.  To make sure that happens,
3092     we simply have to set the DECL_ABSTRACT_ORIGIN for the function
3093     node (and for all of the local ..._DECL nodes which are its children)
3094     so that they all point to themselves.  */
3095
3096  set_decl_origin_self (fndecl);
3097
3098  /* We're not deferring this any longer.  */
3099  DECL_DEFER_OUTPUT (fndecl) = 0;
3100
3101  /* Integrating function calls isn't safe anymore, so turn on
3102     flag_no_inline.  */
3103  flag_no_inline = 1;
3104
3105  /* Compile this function all the way down to assembly code.  */
3106  rest_of_compilation (fndecl);
3107
3108  /* Reset flag_no_inline to its original value.  */
3109  flag_no_inline = save_flag_no_inline;
3110
3111  current_function_decl = 0;
3112}
Note: See TracBrowser for help on using the repository browser.