source: trunk/third/gcc/calls.c @ 8834

Revision 8834, 101.5 KB checked in by ghudson, 28 years ago (diff)
This commit was generated by cvs2svn to compensate for changes in r8833, which included commits to RCS files with non-trunk default branches.
Line 
1/* Convert function calls to rtl insns, for GNU C compiler.
2   Copyright (C) 1989, 1992, 1993, 1994, 1995 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING.  If not, write to
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA.  */
20
21#include "config.h"
22#include "rtl.h"
23#include "tree.h"
24#include "flags.h"
25#include "expr.h"
26#ifdef __STDC__
27#include <stdarg.h>
28#else
29#include <varargs.h>
30#endif
31#include "insn-flags.h"
32
33/* Decide whether a function's arguments should be processed
34   from first to last or from last to first.
35
36   They should if the stack and args grow in opposite directions, but
37   only if we have push insns.  */
38
39#ifdef PUSH_ROUNDING
40
41#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
42#define PUSH_ARGS_REVERSED      /* If it's last to first */
43#endif
44
45#endif
46
47/* Like STACK_BOUNDARY but in units of bytes, not bits.  */
48#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
49
50/* Data structure and subroutines used within expand_call.  */
51
52struct arg_data
53{
54  /* Tree node for this argument.  */
55  tree tree_value;
56  /* Mode for value; TYPE_MODE unless promoted.  */
57  enum machine_mode mode;
58  /* Current RTL value for argument, or 0 if it isn't precomputed.  */
59  rtx value;
60  /* Initially-compute RTL value for argument; only for const functions.  */
61  rtx initial_value;
62  /* Register to pass this argument in, 0 if passed on stack, or an
63     EXPR_LIST if the arg is to be copied into multiple different
64     registers.  */
65  rtx reg;
66  /* If REG was promoted from the actual mode of the argument expression,
67     indicates whether the promotion is sign- or zero-extended.  */
68  int unsignedp;
69  /* Number of registers to use.  0 means put the whole arg in registers.
70     Also 0 if not passed in registers.  */
71  int partial;
72  /* Non-zero if argument must be passed on stack.
73     Note that some arguments may be passed on the stack
74     even though pass_on_stack is zero, just because FUNCTION_ARG says so.
75     pass_on_stack identifies arguments that *cannot* go in registers.  */
76  int pass_on_stack;
77  /* Offset of this argument from beginning of stack-args.  */
78  struct args_size offset;
79  /* Similar, but offset to the start of the stack slot.  Different from
80     OFFSET if this arg pads downward.  */
81  struct args_size slot_offset;
82  /* Size of this argument on the stack, rounded up for any padding it gets,
83     parts of the argument passed in registers do not count.
84     If REG_PARM_STACK_SPACE is defined, then register parms
85     are counted here as well.  */
86  struct args_size size;
87  /* Location on the stack at which parameter should be stored.  The store
88     has already been done if STACK == VALUE.  */
89  rtx stack;
90  /* Location on the stack of the start of this argument slot.  This can
91     differ from STACK if this arg pads downward.  This location is known
92     to be aligned to FUNCTION_ARG_BOUNDARY.  */
93  rtx stack_slot;
94#ifdef ACCUMULATE_OUTGOING_ARGS
95  /* Place that this stack area has been saved, if needed.  */
96  rtx save_area;
97#endif
98#ifdef STRICT_ALIGNMENT
99  /* If an argument's alignment does not permit direct copying into registers,
100     copy in smaller-sized pieces into pseudos.  These are stored in a
101     block pointed to by this field.  The next field says how many
102     word-sized pseudos we made.  */
103  rtx *aligned_regs;
104  int n_aligned_regs;
105#endif
106};
107
108#ifdef ACCUMULATE_OUTGOING_ARGS
109/* A vector of one char per byte of stack space.  A byte if non-zero if
110   the corresponding stack location has been used.
111   This vector is used to prevent a function call within an argument from
112   clobbering any stack already set up.  */
113static char *stack_usage_map;
114
115/* Size of STACK_USAGE_MAP.  */
116static int highest_outgoing_arg_in_use;
117
118/* stack_arg_under_construction is nonzero when an argument may be
119   initialized with a constructor call (including a C function that
120   returns a BLKmode struct) and expand_call must take special action
121   to make sure the object being constructed does not overlap the
122   argument list for the constructor call.  */
123int stack_arg_under_construction;
124#endif
125
126static int calls_function       PROTO((tree, int));
127static int calls_function_1     PROTO((tree, int));
128static void emit_call_1         PROTO((rtx, tree, tree, int, int, rtx, rtx,
129                                       int, rtx, int));
130static void store_one_arg       PROTO ((struct arg_data *, rtx, int, int,
131                                        tree, int));
132
133/* If WHICH is 1, return 1 if EXP contains a call to the built-in function
134   `alloca'.
135
136   If WHICH is 0, return 1 if EXP contains a call to any function.
137   Actually, we only need return 1 if evaluating EXP would require pushing
138   arguments on the stack, but that is too difficult to compute, so we just
139   assume any function call might require the stack.  */
140
141static tree calls_function_save_exprs;
142
143static int
144calls_function (exp, which)
145     tree exp;
146     int which;
147{
148  int val;
149  calls_function_save_exprs = 0;
150  val = calls_function_1 (exp, which);
151  calls_function_save_exprs = 0;
152  return val;
153}
154
155static int
156calls_function_1 (exp, which)
157     tree exp;
158     int which;
159{
160  register int i;
161  enum tree_code code = TREE_CODE (exp);
162  int type = TREE_CODE_CLASS (code);
163  int length = tree_code_length[(int) code];
164
165  /* If this code is language-specific, we don't know what it will do.  */
166  if ((int) code >= NUM_TREE_CODES)
167    return 1;
168
169  /* Only expressions and references can contain calls.  */
170  if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
171      && type != 'b')
172    return 0;
173
174  switch (code)
175    {
176    case CALL_EXPR:
177      if (which == 0)
178        return 1;
179      else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
180               && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
181                   == FUNCTION_DECL))
182        {
183          tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
184
185          if ((DECL_BUILT_IN (fndecl)
186               && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
187              || (DECL_SAVED_INSNS (fndecl)
188                  && (FUNCTION_FLAGS (DECL_SAVED_INSNS (fndecl))
189                      & FUNCTION_FLAGS_CALLS_ALLOCA)))
190            return 1;
191        }
192
193      /* Third operand is RTL.  */
194      length = 2;
195      break;
196
197    case SAVE_EXPR:
198      if (SAVE_EXPR_RTL (exp) != 0)
199        return 0;
200      if (value_member (exp, calls_function_save_exprs))
201        return 0;
202      calls_function_save_exprs = tree_cons (NULL_TREE, exp,
203                                             calls_function_save_exprs);
204      return (TREE_OPERAND (exp, 0) != 0
205              && calls_function_1 (TREE_OPERAND (exp, 0), which));
206
207    case BLOCK:
208      {
209        register tree local;
210
211        for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
212          if (DECL_INITIAL (local) != 0
213              && calls_function_1 (DECL_INITIAL (local), which))
214            return 1;
215      }
216      {
217        register tree subblock;
218
219        for (subblock = BLOCK_SUBBLOCKS (exp);
220             subblock;
221             subblock = TREE_CHAIN (subblock))
222          if (calls_function_1 (subblock, which))
223            return 1;
224      }
225      return 0;
226
227    case METHOD_CALL_EXPR:
228      length = 3;
229      break;
230
231    case WITH_CLEANUP_EXPR:
232      length = 1;
233      break;
234
235    case RTL_EXPR:
236      return 0;
237    }
238
239  for (i = 0; i < length; i++)
240    if (TREE_OPERAND (exp, i) != 0
241        && calls_function_1 (TREE_OPERAND (exp, i), which))
242      return 1;
243
244  return 0;
245}
246
247/* Force FUNEXP into a form suitable for the address of a CALL,
248   and return that as an rtx.  Also load the static chain register
249   if FNDECL is a nested function.
250
251   CALL_FUSAGE points to a variable holding the prospective
252   CALL_INSN_FUNCTION_USAGE information.  */
253
254rtx
255prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
256     rtx funexp;
257     tree fndecl;
258     rtx *call_fusage;
259     int reg_parm_seen;
260{
261  rtx static_chain_value = 0;
262
263  funexp = protect_from_queue (funexp, 0);
264
265  if (fndecl != 0)
266    /* Get possible static chain value for nested function in C. */
267    static_chain_value = lookup_static_chain (fndecl);
268
269  /* Make a valid memory address and copy constants thru pseudo-regs,
270     but not for a constant address if -fno-function-cse.  */
271  if (GET_CODE (funexp) != SYMBOL_REF)
272    funexp =
273#ifdef SMALL_REGISTER_CLASSES
274    /* If we are using registers for parameters, force the
275         function address into a register now.  */
276      reg_parm_seen ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
277                    :
278#endif
279                      memory_address (FUNCTION_MODE, funexp);
280  else
281    {
282#ifndef NO_FUNCTION_CSE
283      if (optimize && ! flag_no_function_cse)
284#ifdef NO_RECURSIVE_FUNCTION_CSE
285        if (fndecl != current_function_decl)
286#endif
287          funexp = force_reg (Pmode, funexp);
288#endif
289    }
290
291  if (static_chain_value != 0)
292    {
293      emit_move_insn (static_chain_rtx, static_chain_value);
294
295      if (GET_CODE (static_chain_rtx) == REG)
296        use_reg (call_fusage, static_chain_rtx);
297    }
298
299  return funexp;
300}
301
302/* Generate instructions to call function FUNEXP,
303   and optionally pop the results.
304   The CALL_INSN is the first insn generated.
305
306   FNDECL is the declaration node of the function.  This is given ot the
307   macro RETURN_POPS_ARGS to determine whether this function pops its own args.
308
309   FUNTYPE is the data type of the function, or, for a library call,
310   the identifier for the name of the call.  This is given to the
311   macro RETURN_POPS_ARGS to determine whether this function pops its own args.
312
313   STACK_SIZE is the number of bytes of arguments on the stack,
314   rounded up to STACK_BOUNDARY; zero if the size is variable.
315   This is both to put into the call insn and
316   to generate explicit popping code if necessary.
317
318   STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
319   It is zero if this call doesn't want a structure value.
320
321   NEXT_ARG_REG is the rtx that results from executing
322     FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
323   just after all the args have had their registers assigned.
324   This could be whatever you like, but normally it is the first
325   arg-register beyond those used for args in this call,
326   or 0 if all the arg-registers are used in this call.
327   It is passed on to `gen_call' so you can put this info in the call insn.
328
329   VALREG is a hard register in which a value is returned,
330   or 0 if the call does not return a value.
331
332   OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
333   the args to this call were processed.
334   We restore `inhibit_defer_pop' to that value.
335
336   CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
337   denote registers used by the called function.
338
339   IS_CONST is true if this is a `const' call.  */
340
341static void
342emit_call_1 (funexp, fndecl, funtype, stack_size, struct_value_size,
343             next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
344             is_const)
345     rtx funexp;
346     tree fndecl;
347     tree funtype;
348     int stack_size;
349     int struct_value_size;
350     rtx next_arg_reg;
351     rtx valreg;
352     int old_inhibit_defer_pop;
353     rtx call_fusage;
354     int is_const;
355{
356  rtx stack_size_rtx = GEN_INT (stack_size);
357  rtx struct_value_size_rtx = GEN_INT (struct_value_size);
358  rtx call_insn;
359  int already_popped = 0;
360
361  /* Ensure address is valid.  SYMBOL_REF is already valid, so no need,
362     and we don't want to load it into a register as an optimization,
363     because prepare_call_address already did it if it should be done.  */
364  if (GET_CODE (funexp) != SYMBOL_REF)
365    funexp = memory_address (FUNCTION_MODE, funexp);
366
367#ifndef ACCUMULATE_OUTGOING_ARGS
368#if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
369  if (HAVE_call_pop && HAVE_call_value_pop
370      && (RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0
371          || stack_size == 0))
372    {
373      rtx n_pop = GEN_INT (RETURN_POPS_ARGS (fndecl, funtype, stack_size));
374      rtx pat;
375
376      /* If this subroutine pops its own args, record that in the call insn
377         if possible, for the sake of frame pointer elimination.  */
378
379      if (valreg)
380        pat = gen_call_value_pop (valreg,
381                                  gen_rtx (MEM, FUNCTION_MODE, funexp),
382                                  stack_size_rtx, next_arg_reg, n_pop);
383      else
384        pat = gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, funexp),
385                            stack_size_rtx, next_arg_reg, n_pop);
386
387      emit_call_insn (pat);
388      already_popped = 1;
389    }
390  else
391#endif
392#endif
393
394#if defined (HAVE_call) && defined (HAVE_call_value)
395  if (HAVE_call && HAVE_call_value)
396    {
397      if (valreg)
398        emit_call_insn (gen_call_value (valreg,
399                                        gen_rtx (MEM, FUNCTION_MODE, funexp),
400                                        stack_size_rtx, next_arg_reg,
401                                        NULL_RTX));
402      else
403        emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, funexp),
404                                  stack_size_rtx, next_arg_reg,
405                                  struct_value_size_rtx));
406    }
407  else
408#endif
409    abort ();
410
411  /* Find the CALL insn we just emitted.  */
412  for (call_insn = get_last_insn ();
413       call_insn && GET_CODE (call_insn) != CALL_INSN;
414       call_insn = PREV_INSN (call_insn))
415    ;
416
417  if (! call_insn)
418    abort ();
419
420  /* Put the register usage information on the CALL.  If there is already
421     some usage information, put ours at the end.  */
422  if (CALL_INSN_FUNCTION_USAGE (call_insn))
423    {
424      rtx link;
425
426      for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
427           link = XEXP (link, 1))
428        ;
429
430      XEXP (link, 1) = call_fusage;
431    }
432  else
433    CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
434
435  /* If this is a const call, then set the insn's unchanging bit.  */
436  if (is_const)
437    CONST_CALL_P (call_insn) = 1;
438
439  /* Restore this now, so that we do defer pops for this call's args
440     if the context of the call as a whole permits.  */
441  inhibit_defer_pop = old_inhibit_defer_pop;
442
443#ifndef ACCUMULATE_OUTGOING_ARGS
444  /* If returning from the subroutine does not automatically pop the args,
445     we need an instruction to pop them sooner or later.
446     Perhaps do it now; perhaps just record how much space to pop later.
447
448     If returning from the subroutine does pop the args, indicate that the
449     stack pointer will be changed.  */
450
451  if (stack_size != 0 && RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0)
452    {
453      if (!already_popped)
454        CALL_INSN_FUNCTION_USAGE (call_insn) =
455           gen_rtx (EXPR_LIST, VOIDmode,
456                    gen_rtx (CLOBBER, VOIDmode, stack_pointer_rtx),
457                    CALL_INSN_FUNCTION_USAGE (call_insn));
458      stack_size -= RETURN_POPS_ARGS (fndecl, funtype, stack_size);
459      stack_size_rtx = GEN_INT (stack_size);
460    }
461
462  if (stack_size != 0)
463    {
464      if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
465        pending_stack_adjust += stack_size;
466      else
467        adjust_stack (stack_size_rtx);
468    }
469#endif
470}
471
472/* Generate all the code for a function call
473   and return an rtx for its value.
474   Store the value in TARGET (specified as an rtx) if convenient.
475   If the value is stored in TARGET then TARGET is returned.
476   If IGNORE is nonzero, then we ignore the value of the function call.  */
477
478rtx
479expand_call (exp, target, ignore)
480     tree exp;
481     rtx target;
482     int ignore;
483{
484  /* List of actual parameters.  */
485  tree actparms = TREE_OPERAND (exp, 1);
486  /* RTX for the function to be called.  */
487  rtx funexp;
488  /* Tree node for the function to be called (not the address!).  */
489  tree funtree;
490  /* Data type of the function.  */
491  tree funtype;
492  /* Declaration of the function being called,
493     or 0 if the function is computed (not known by name).  */
494  tree fndecl = 0;
495  char *name = 0;
496
497  /* Register in which non-BLKmode value will be returned,
498     or 0 if no value or if value is BLKmode.  */
499  rtx valreg;
500  /* Address where we should return a BLKmode value;
501     0 if value not BLKmode.  */
502  rtx structure_value_addr = 0;
503  /* Nonzero if that address is being passed by treating it as
504     an extra, implicit first parameter.  Otherwise,
505     it is passed by being copied directly into struct_value_rtx.  */
506  int structure_value_addr_parm = 0;
507  /* Size of aggregate value wanted, or zero if none wanted
508     or if we are using the non-reentrant PCC calling convention
509     or expecting the value in registers.  */
510  int struct_value_size = 0;
511  /* Nonzero if called function returns an aggregate in memory PCC style,
512     by returning the address of where to find it.  */
513  int pcc_struct_value = 0;
514
515  /* Number of actual parameters in this call, including struct value addr.  */
516  int num_actuals;
517  /* Number of named args.  Args after this are anonymous ones
518     and they must all go on the stack.  */
519  int n_named_args;
520  /* Count arg position in order args appear.  */
521  int argpos;
522
523  /* Vector of information about each argument.
524     Arguments are numbered in the order they will be pushed,
525     not the order they are written.  */
526  struct arg_data *args;
527
528  /* Total size in bytes of all the stack-parms scanned so far.  */
529  struct args_size args_size;
530  /* Size of arguments before any adjustments (such as rounding).  */
531  struct args_size original_args_size;
532  /* Data on reg parms scanned so far.  */
533  CUMULATIVE_ARGS args_so_far;
534  /* Nonzero if a reg parm has been scanned.  */
535  int reg_parm_seen;
536  /* Nonzero if this is an indirect function call.  */
537  int current_call_is_indirect = 0;
538
539  /* Nonzero if we must avoid push-insns in the args for this call.
540     If stack space is allocated for register parameters, but not by the
541     caller, then it is preallocated in the fixed part of the stack frame.
542     So the entire argument block must then be preallocated (i.e., we
543     ignore PUSH_ROUNDING in that case).  */
544
545#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
546  int must_preallocate = 1;
547#else
548#ifdef PUSH_ROUNDING
549  int must_preallocate = 0;
550#else
551  int must_preallocate = 1;
552#endif
553#endif
554
555  /* Size of the stack reserved for parameter registers.  */
556  int reg_parm_stack_space = 0;
557
558  /* 1 if scanning parms front to back, -1 if scanning back to front.  */
559  int inc;
560  /* Address of space preallocated for stack parms
561     (on machines that lack push insns), or 0 if space not preallocated.  */
562  rtx argblock = 0;
563
564  /* Nonzero if it is plausible that this is a call to alloca.  */
565  int may_be_alloca;
566  /* Nonzero if this is a call to setjmp or a related function.  */
567  int returns_twice;
568  /* Nonzero if this is a call to `longjmp'.  */
569  int is_longjmp;
570  /* Nonzero if this is a call to an inline function.  */
571  int is_integrable = 0;
572  /* Nonzero if this is a call to a `const' function.
573     Note that only explicitly named functions are handled as `const' here.  */
574  int is_const = 0;
575  /* Nonzero if this is a call to a `volatile' function.  */
576  int is_volatile = 0;
577#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
578  /* Define the boundary of the register parm stack space that needs to be
579     save, if any.  */
580  int low_to_save = -1, high_to_save;
581  rtx save_area = 0;            /* Place that it is saved */
582#endif
583
584#ifdef ACCUMULATE_OUTGOING_ARGS
585  int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
586  char *initial_stack_usage_map = stack_usage_map;
587#endif
588
589  rtx old_stack_level = 0;
590  int old_pending_adj = 0;
591  int old_stack_arg_under_construction;
592  int old_inhibit_defer_pop = inhibit_defer_pop;
593  tree old_cleanups = cleanups_this_call;
594  rtx call_fusage = 0;
595  register tree p;
596  register int i, j;
597
598  /* See if we can find a DECL-node for the actual function.
599     As a result, decide whether this is a call to an integrable function.  */
600
601  p = TREE_OPERAND (exp, 0);
602  if (TREE_CODE (p) == ADDR_EXPR)
603    {
604      fndecl = TREE_OPERAND (p, 0);
605      if (TREE_CODE (fndecl) != FUNCTION_DECL)
606        fndecl = 0;
607      else
608        {
609          if (!flag_no_inline
610              && fndecl != current_function_decl
611              && DECL_INLINE (fndecl)
612              && DECL_SAVED_INSNS (fndecl))
613            is_integrable = 1;
614          else if (! TREE_ADDRESSABLE (fndecl))
615            {
616              /* In case this function later becomes inlinable,
617                 record that there was already a non-inline call to it.
618
619                 Use abstraction instead of setting TREE_ADDRESSABLE
620                 directly.  */
621              if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline)
622                {
623                  warning_with_decl (fndecl, "can't inline call to `%s'");
624                  warning ("called from here");
625                }
626              mark_addressable (fndecl);
627            }
628
629          if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
630              && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
631            is_const = 1;
632
633          if (TREE_THIS_VOLATILE (fndecl))
634            is_volatile = 1;
635        }
636    }
637
638  /* If we don't have specific function to call, see if we have a
639     constant or `noreturn' function from the type.  */
640  if (fndecl == 0)
641    {
642      is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p)));
643      is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
644    }
645
646#ifdef REG_PARM_STACK_SPACE
647#ifdef MAYBE_REG_PARM_STACK_SPACE
648  reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
649#else
650  reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
651#endif
652#endif
653
654  /* Warn if this value is an aggregate type,
655     regardless of which calling convention we are using for it.  */
656  if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
657    warning ("function call has aggregate value");
658
659  /* Set up a place to return a structure.  */
660
661  /* Cater to broken compilers.  */
662  if (aggregate_value_p (exp))
663    {
664      /* This call returns a big structure.  */
665      is_const = 0;
666
667#ifdef PCC_STATIC_STRUCT_RETURN
668      {
669        pcc_struct_value = 1;
670        /* Easier than making that case work right.  */
671        if (is_integrable)
672          {
673            /* In case this is a static function, note that it has been
674               used.  */
675            if (! TREE_ADDRESSABLE (fndecl))
676              mark_addressable (fndecl);
677            is_integrable = 0;
678          }
679      }
680#else /* not PCC_STATIC_STRUCT_RETURN */
681      {
682        struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
683
684        if (target && GET_CODE (target) == MEM)
685          structure_value_addr = XEXP (target, 0);
686        else
687          {
688            /* Assign a temporary on the stack to hold the value.  */
689
690            /* For variable-sized objects, we must be called with a target
691               specified.  If we were to allocate space on the stack here,
692               we would have no way of knowing when to free it.  */
693
694            if (struct_value_size < 0)
695              abort ();
696
697            structure_value_addr
698              = XEXP (assign_stack_temp (BLKmode, struct_value_size, 1), 0);
699            MEM_IN_STRUCT_P (structure_value_addr)
700              = AGGREGATE_TYPE_P (TREE_TYPE (exp));
701            target = 0;
702          }
703      }
704#endif /* not PCC_STATIC_STRUCT_RETURN */
705    }
706
707  /* If called function is inline, try to integrate it.  */
708
709  if (is_integrable)
710    {
711      rtx temp;
712      rtx before_call = get_last_insn ();
713
714      temp = expand_inline_function (fndecl, actparms, target,
715                                     ignore, TREE_TYPE (exp),
716                                     structure_value_addr);
717
718      /* If inlining succeeded, return.  */
719      if ((HOST_WIDE_INT) temp != -1)
720        {
721          if (flag_short_temps)
722            {
723              /* Perform all cleanups needed for the arguments of this
724                 call (i.e. destructors in C++).  It is ok if these
725                 destructors clobber RETURN_VALUE_REG, because the
726                 only time we care about this is when TARGET is that
727                 register.  But in C++, we take care to never return
728                 that register directly.  */
729              expand_cleanups_to (old_cleanups);
730            }
731
732#ifdef ACCUMULATE_OUTGOING_ARGS
733          /* If the outgoing argument list must be preserved, push
734             the stack before executing the inlined function if it
735             makes any calls.  */
736
737          for (i = reg_parm_stack_space - 1; i >= 0; i--)
738            if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
739              break;
740
741          if (stack_arg_under_construction || i >= 0)
742            {
743              rtx insn = NEXT_INSN (before_call), seq;
744
745              /* Look for a call in the inline function code.
746                 If OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) is
747                 nonzero then there is a call and it is not necessary
748                 to scan the insns.  */
749
750              if (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) == 0)
751                for (; insn; insn = NEXT_INSN (insn))
752                  if (GET_CODE (insn) == CALL_INSN)
753                    break;
754
755              if (insn)
756                {
757                  /* Reserve enough stack space so that the largest
758                     argument list of any function call in the inline
759                     function does not overlap the argument list being
760                     evaluated.  This is usually an overestimate because
761                     allocate_dynamic_stack_space reserves space for an
762                     outgoing argument list in addition to the requested
763                     space, but there is no way to ask for stack space such
764                     that an argument list of a certain length can be
765                     safely constructed.  */
766
767                  int adjust = OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl));
768#ifdef REG_PARM_STACK_SPACE
769                  /* Add the stack space reserved for register arguments
770                     in the inline function.  What is really needed is the
771                     largest value of reg_parm_stack_space in the inline
772                     function, but that is not available.  Using the current
773                     value of reg_parm_stack_space is wrong, but gives
774                     correct results on all supported machines.  */
775                  adjust += reg_parm_stack_space;
776#endif
777                  start_sequence ();
778                  emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
779                  allocate_dynamic_stack_space (GEN_INT (adjust),
780                                                NULL_RTX, BITS_PER_UNIT);
781                  seq = get_insns ();
782                  end_sequence ();
783                  emit_insns_before (seq, NEXT_INSN (before_call));
784                  emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
785                }
786            }
787#endif
788
789          /* If the result is equivalent to TARGET, return TARGET to simplify
790             checks in store_expr.  They can be equivalent but not equal in the
791             case of a function that returns BLKmode.  */
792          if (temp != target && rtx_equal_p (temp, target))
793            return target;
794          return temp;
795        }
796
797      /* If inlining failed, mark FNDECL as needing to be compiled
798         separately after all.  If function was declared inline,
799         give a warning.  */
800      if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
801          && ! TREE_ADDRESSABLE (fndecl))
802        {
803          warning_with_decl (fndecl, "inlining failed in call to `%s'");
804          warning ("called from here");
805        }
806      mark_addressable (fndecl);
807    }
808
809  /* When calling a const function, we must pop the stack args right away,
810     so that the pop is deleted or moved with the call.  */
811  if (is_const)
812    NO_DEFER_POP;
813
814  function_call_count++;
815
816  if (fndecl && DECL_NAME (fndecl))
817    name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
818
819  /* On some machines (such as the PA) indirect calls have a different
820     calling convention than normal calls.  FUNCTION_ARG in the target
821     description can look at current_call_is_indirect to determine which
822     calling convention to use.  */
823  current_call_is_indirect = (fndecl == 0);
824#if 0
825    = TREE_CODE (TREE_OPERAND (exp, 0)) == NON_LVALUE_EXPR ? 1 : 0;
826#endif
827
828#if 0
829  /* Unless it's a call to a specific function that isn't alloca,
830     if it has one argument, we must assume it might be alloca.  */
831
832  may_be_alloca =
833    (!(fndecl != 0 && strcmp (name, "alloca"))
834     && actparms != 0
835     && TREE_CHAIN (actparms) == 0);
836#else
837  /* We assume that alloca will always be called by name.  It
838     makes no sense to pass it as a pointer-to-function to
839     anything that does not understand its behavior.  */
840  may_be_alloca =
841    (name && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
842                 && name[0] == 'a'
843                 && ! strcmp (name, "alloca"))
844                || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
845                    && name[0] == '_'
846                    && ! strcmp (name, "__builtin_alloca"))));
847#endif
848
849  /* See if this is a call to a function that can return more than once
850     or a call to longjmp.  */
851
852  returns_twice = 0;
853  is_longjmp = 0;
854
855  if (name != 0 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 15)
856    {
857      char *tname = name;
858
859      /* Disregard prefix _, __ or __x.  */
860      if (name[0] == '_')
861        {
862          if (name[1] == '_' && name[2] == 'x')
863            tname += 3;
864          else if (name[1] == '_')
865            tname += 2;
866          else
867            tname += 1;
868        }
869
870      if (tname[0] == 's')
871        {
872          returns_twice
873            = ((tname[1] == 'e'
874                && (! strcmp (tname, "setjmp")
875                    || ! strcmp (tname, "setjmp_syscall")))
876               || (tname[1] == 'i'
877                   && ! strcmp (tname, "sigsetjmp"))
878               || (tname[1] == 'a'
879                   && ! strcmp (tname, "savectx")));
880          if (tname[1] == 'i'
881              && ! strcmp (tname, "siglongjmp"))
882            is_longjmp = 1;
883        }
884      else if ((tname[0] == 'q' && tname[1] == 's'
885                && ! strcmp (tname, "qsetjmp"))
886               || (tname[0] == 'v' && tname[1] == 'f'
887                   && ! strcmp (tname, "vfork")))
888        returns_twice = 1;
889
890      else if (tname[0] == 'l' && tname[1] == 'o'
891               && ! strcmp (tname, "longjmp"))
892        is_longjmp = 1;
893    }
894
895  if (may_be_alloca)
896    current_function_calls_alloca = 1;
897
898  /* Don't let pending stack adjusts add up to too much.
899     Also, do all pending adjustments now
900     if there is any chance this might be a call to alloca.  */
901
902  if (pending_stack_adjust >= 32
903      || (pending_stack_adjust > 0 && may_be_alloca))
904    do_pending_stack_adjust ();
905
906  /* Operand 0 is a pointer-to-function; get the type of the function.  */
907  funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
908  if (TREE_CODE (funtype) != POINTER_TYPE)
909    abort ();
910  funtype = TREE_TYPE (funtype);
911
912  /* Push the temporary stack slot level so that we can free any temporaries
913     we make.  */
914  push_temp_slots ();
915
916  /* Start updating where the next arg would go.  */
917  INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX);
918
919  /* If struct_value_rtx is 0, it means pass the address
920     as if it were an extra parameter.  */
921  if (structure_value_addr && struct_value_rtx == 0)
922    {
923      /* If structure_value_addr is a REG other than
924         virtual_outgoing_args_rtx, we can use always use it.  If it
925         is not a REG, we must always copy it into a register.
926         If it is virtual_outgoing_args_rtx, we must copy it to another
927         register in some cases.  */
928      rtx temp = (GET_CODE (structure_value_addr) != REG
929#ifdef ACCUMULATE_OUTGOING_ARGS
930                  || (stack_arg_under_construction
931                      && structure_value_addr == virtual_outgoing_args_rtx)
932#endif
933                  ? copy_addr_to_reg (structure_value_addr)
934                  : structure_value_addr);
935
936      actparms
937        = tree_cons (error_mark_node,
938                     make_tree (build_pointer_type (TREE_TYPE (funtype)),
939                                temp),
940                     actparms);
941      structure_value_addr_parm = 1;
942    }
943
944  /* Count the arguments and set NUM_ACTUALS.  */
945  for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
946  num_actuals = i;
947
948  /* Compute number of named args.
949     Normally, don't include the last named arg if anonymous args follow.
950     We do include the last named arg if STRICT_ARGUMENT_NAMING is defined.
951     (If no anonymous args follow, the result of list_length is actually
952     one too large.  This is harmless.)
953
954     If SETUP_INCOMING_VARARGS is defined and STRICT_ARGUMENT_NAMING is not,
955     this machine will be able to place unnamed args that were passed in
956     registers into the stack.  So treat all args as named.  This allows the
957     insns emitting for a specific argument list to be independent of the
958     function declaration.
959
960     If SETUP_INCOMING_VARARGS is not defined, we do not have any reliable
961     way to pass unnamed args in registers, so we must force them into
962     memory.  */
963#if !defined(SETUP_INCOMING_VARARGS) || defined(STRICT_ARGUMENT_NAMING)
964  if (TYPE_ARG_TYPES (funtype) != 0)
965    n_named_args
966      = (list_length (TYPE_ARG_TYPES (funtype))
967#ifndef STRICT_ARGUMENT_NAMING
968         /* Don't include the last named arg.  */
969         - 1
970#endif
971         /* Count the struct value address, if it is passed as a parm.  */
972         + structure_value_addr_parm);
973  else
974#endif
975    /* If we know nothing, treat all args as named.  */
976    n_named_args = num_actuals;
977
978  /* Make a vector to hold all the information about each arg.  */
979  args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
980  bzero ((char *) args, num_actuals * sizeof (struct arg_data));
981
982  args_size.constant = 0;
983  args_size.var = 0;
984
985  /* In this loop, we consider args in the order they are written.
986     We fill up ARGS from the front or from the back if necessary
987     so that in any case the first arg to be pushed ends up at the front.  */
988
989#ifdef PUSH_ARGS_REVERSED
990  i = num_actuals - 1, inc = -1;
991  /* In this case, must reverse order of args
992     so that we compute and push the last arg first.  */
993#else
994  i = 0, inc = 1;
995#endif
996
997  /* I counts args in order (to be) pushed; ARGPOS counts in order written.  */
998  for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
999    {
1000      tree type = TREE_TYPE (TREE_VALUE (p));
1001      int unsignedp;
1002      enum machine_mode mode;
1003
1004      args[i].tree_value = TREE_VALUE (p);
1005
1006      /* Replace erroneous argument with constant zero.  */
1007      if (type == error_mark_node || TYPE_SIZE (type) == 0)
1008        args[i].tree_value = integer_zero_node, type = integer_type_node;
1009
1010      /* If TYPE is a transparent union, pass things the way we would
1011         pass the first field of the union.  We have already verified that
1012         the modes are the same.  */
1013      if (TYPE_TRANSPARENT_UNION (type))
1014        type = TREE_TYPE (TYPE_FIELDS (type));
1015
1016      /* Decide where to pass this arg.
1017
1018         args[i].reg is nonzero if all or part is passed in registers.
1019
1020         args[i].partial is nonzero if part but not all is passed in registers,
1021         and the exact value says how many words are passed in registers.
1022
1023         args[i].pass_on_stack is nonzero if the argument must at least be
1024         computed on the stack.  It may then be loaded back into registers
1025         if args[i].reg is nonzero.
1026
1027         These decisions are driven by the FUNCTION_... macros and must agree
1028         with those made by function.c.  */
1029
1030      /* See if this argument should be passed by invisible reference.  */
1031      if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1032           && contains_placeholder_p (TYPE_SIZE (type)))
1033          || TREE_ADDRESSABLE (type)
1034#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1035          || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, TYPE_MODE (type),
1036                                             type, argpos < n_named_args)
1037#endif
1038          )
1039        {
1040#ifdef FUNCTION_ARG_CALLEE_COPIES
1041          if (FUNCTION_ARG_CALLEE_COPIES (args_so_far, TYPE_MODE (type), type,
1042                                          argpos < n_named_args)
1043              /* If it's in a register, we must make a copy of it too.  */
1044              /* ??? Is this a sufficient test?  Is there a better one? */
1045              && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1046                   && REG_P (DECL_RTL (args[i].tree_value)))
1047              && ! TREE_ADDRESSABLE (type))
1048            {
1049              args[i].tree_value = build1 (ADDR_EXPR,
1050                                           build_pointer_type (type),
1051                                           args[i].tree_value);
1052              type = build_pointer_type (type);
1053            }
1054          else
1055#endif
1056            {
1057              /* We make a copy of the object and pass the address to the
1058                 function being called.  */
1059              rtx copy;
1060
1061              if (TYPE_SIZE (type) == 0
1062                  || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1063                {
1064                  /* This is a variable-sized object.  Make space on the stack
1065                     for it.  */
1066                  rtx size_rtx = expr_size (TREE_VALUE (p));
1067
1068                  if (old_stack_level == 0)
1069                    {
1070                      emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1071                      old_pending_adj = pending_stack_adjust;
1072                      pending_stack_adjust = 0;
1073                    }
1074
1075                  copy = gen_rtx (MEM, BLKmode,
1076                                  allocate_dynamic_stack_space (size_rtx,
1077                                                                NULL_RTX,
1078                                                                TYPE_ALIGN (type)));
1079                }
1080              else
1081                {
1082                  int size = int_size_in_bytes (type);
1083                  copy = assign_stack_temp (TYPE_MODE (type), size, 0);
1084                }
1085
1086              MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
1087
1088              store_expr (args[i].tree_value, copy, 0);
1089
1090              args[i].tree_value = build1 (ADDR_EXPR,
1091                                           build_pointer_type (type),
1092                                           make_tree (type, copy));
1093              type = build_pointer_type (type);
1094            }
1095        }
1096
1097      mode = TYPE_MODE (type);
1098      unsignedp = TREE_UNSIGNED (type);
1099
1100#ifdef PROMOTE_FUNCTION_ARGS
1101      mode = promote_mode (type, mode, &unsignedp, 1);
1102#endif
1103
1104      args[i].unsignedp = unsignedp;
1105      args[i].mode = mode;
1106      args[i].reg = FUNCTION_ARG (args_so_far, mode, type,
1107                                  argpos < n_named_args);
1108#ifdef FUNCTION_ARG_PARTIAL_NREGS
1109      if (args[i].reg)
1110        args[i].partial
1111          = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, type,
1112                                        argpos < n_named_args);
1113#endif
1114
1115      args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1116
1117      /* If FUNCTION_ARG returned an (expr_list (nil) FOO), it means that
1118         we are to pass this arg in the register(s) designated by FOO, but
1119         also to pass it in the stack.  */
1120      if (args[i].reg && GET_CODE (args[i].reg) == EXPR_LIST
1121          && XEXP (args[i].reg, 0) == 0)
1122        args[i].pass_on_stack = 1, args[i].reg = XEXP (args[i].reg, 1);
1123
1124      /* If this is an addressable type, we must preallocate the stack
1125         since we must evaluate the object into its final location.
1126
1127         If this is to be passed in both registers and the stack, it is simpler
1128         to preallocate.  */
1129      if (TREE_ADDRESSABLE (type)
1130          || (args[i].pass_on_stack && args[i].reg != 0))
1131        must_preallocate = 1;
1132
1133      /* If this is an addressable type, we cannot pre-evaluate it.  Thus,
1134         we cannot consider this function call constant.  */
1135      if (TREE_ADDRESSABLE (type))
1136        is_const = 0;
1137
1138      /* Compute the stack-size of this argument.  */
1139      if (args[i].reg == 0 || args[i].partial != 0
1140#ifdef REG_PARM_STACK_SPACE
1141          || reg_parm_stack_space > 0
1142#endif
1143          || args[i].pass_on_stack)
1144        locate_and_pad_parm (mode, type,
1145#ifdef STACK_PARMS_IN_REG_PARM_AREA
1146                             1,
1147#else
1148                             args[i].reg != 0,
1149#endif
1150                             fndecl, &args_size, &args[i].offset,
1151                             &args[i].size);
1152
1153#ifndef ARGS_GROW_DOWNWARD
1154      args[i].slot_offset = args_size;
1155#endif
1156
1157#ifndef REG_PARM_STACK_SPACE
1158      /* If a part of the arg was put into registers,
1159         don't include that part in the amount pushed.  */
1160      if (! args[i].pass_on_stack)
1161        args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1162                                  / (PARM_BOUNDARY / BITS_PER_UNIT)
1163                                  * (PARM_BOUNDARY / BITS_PER_UNIT));
1164#endif
1165     
1166      /* Update ARGS_SIZE, the total stack space for args so far.  */
1167
1168      args_size.constant += args[i].size.constant;
1169      if (args[i].size.var)
1170        {
1171          ADD_PARM_SIZE (args_size, args[i].size.var);
1172        }
1173
1174      /* Since the slot offset points to the bottom of the slot,
1175         we must record it after incrementing if the args grow down.  */
1176#ifdef ARGS_GROW_DOWNWARD
1177      args[i].slot_offset = args_size;
1178
1179      args[i].slot_offset.constant = -args_size.constant;
1180      if (args_size.var)
1181        {
1182          SUB_PARM_SIZE (args[i].slot_offset, args_size.var);
1183        }
1184#endif
1185
1186      /* Increment ARGS_SO_FAR, which has info about which arg-registers
1187         have been used, etc.  */
1188
1189      FUNCTION_ARG_ADVANCE (args_so_far, TYPE_MODE (type), type,
1190                            argpos < n_named_args);
1191    }
1192
1193#ifdef FINAL_REG_PARM_STACK_SPACE
1194  reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
1195                                                     args_size.var);
1196#endif
1197     
1198  /* Compute the actual size of the argument block required.  The variable
1199     and constant sizes must be combined, the size may have to be rounded,
1200     and there may be a minimum required size.  */
1201
1202  original_args_size = args_size;
1203  if (args_size.var)
1204    {
1205      /* If this function requires a variable-sized argument list, don't try to
1206         make a cse'able block for this call.  We may be able to do this
1207         eventually, but it is too complicated to keep track of what insns go
1208         in the cse'able block and which don't.  */
1209
1210      is_const = 0;
1211      must_preallocate = 1;
1212
1213      args_size.var = ARGS_SIZE_TREE (args_size);
1214      args_size.constant = 0;
1215
1216#ifdef STACK_BOUNDARY
1217      if (STACK_BOUNDARY != BITS_PER_UNIT)
1218        args_size.var = round_up (args_size.var, STACK_BYTES);
1219#endif
1220
1221#ifdef REG_PARM_STACK_SPACE
1222      if (reg_parm_stack_space > 0)
1223        {
1224          args_size.var
1225            = size_binop (MAX_EXPR, args_size.var,
1226                          size_int (REG_PARM_STACK_SPACE (fndecl)));
1227
1228#ifndef OUTGOING_REG_PARM_STACK_SPACE
1229          /* The area corresponding to register parameters is not to count in
1230             the size of the block we need.  So make the adjustment.  */
1231          args_size.var
1232            = size_binop (MINUS_EXPR, args_size.var,
1233                          size_int (reg_parm_stack_space));
1234#endif
1235        }
1236#endif
1237    }
1238  else
1239    {
1240#ifdef STACK_BOUNDARY
1241      args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
1242                             / STACK_BYTES) * STACK_BYTES);
1243#endif
1244
1245#ifdef REG_PARM_STACK_SPACE
1246      args_size.constant = MAX (args_size.constant,
1247                                reg_parm_stack_space);
1248#ifdef MAYBE_REG_PARM_STACK_SPACE
1249      if (reg_parm_stack_space == 0)
1250        args_size.constant = 0;
1251#endif
1252#ifndef OUTGOING_REG_PARM_STACK_SPACE
1253      args_size.constant -= reg_parm_stack_space;
1254#endif
1255#endif
1256    }
1257
1258  /* See if we have or want to preallocate stack space.
1259
1260     If we would have to push a partially-in-regs parm
1261     before other stack parms, preallocate stack space instead.
1262
1263     If the size of some parm is not a multiple of the required stack
1264     alignment, we must preallocate.
1265
1266     If the total size of arguments that would otherwise create a copy in
1267     a temporary (such as a CALL) is more than half the total argument list
1268     size, preallocation is faster.
1269
1270     Another reason to preallocate is if we have a machine (like the m88k)
1271     where stack alignment is required to be maintained between every
1272     pair of insns, not just when the call is made.  However, we assume here
1273     that such machines either do not have push insns (and hence preallocation
1274     would occur anyway) or the problem is taken care of with
1275     PUSH_ROUNDING.  */
1276
1277  if (! must_preallocate)
1278    {
1279      int partial_seen = 0;
1280      int copy_to_evaluate_size = 0;
1281
1282      for (i = 0; i < num_actuals && ! must_preallocate; i++)
1283        {
1284          if (args[i].partial > 0 && ! args[i].pass_on_stack)
1285            partial_seen = 1;
1286          else if (partial_seen && args[i].reg == 0)
1287            must_preallocate = 1;
1288
1289          if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1290              && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1291                  || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1292                  || TREE_CODE (args[i].tree_value) == COND_EXPR
1293                  || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1294            copy_to_evaluate_size
1295              += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1296        }
1297
1298      if (copy_to_evaluate_size * 2 >= args_size.constant
1299          && args_size.constant > 0)
1300        must_preallocate = 1;
1301    }
1302
1303  /* If the structure value address will reference the stack pointer, we must
1304     stabilize it.  We don't need to do this if we know that we are not going
1305     to adjust the stack pointer in processing this call.  */
1306
1307  if (structure_value_addr
1308      && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
1309       || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
1310      && (args_size.var
1311#ifndef ACCUMULATE_OUTGOING_ARGS
1312          || args_size.constant
1313#endif
1314          ))
1315    structure_value_addr = copy_to_reg (structure_value_addr);
1316
1317  /* If this function call is cse'able, precompute all the parameters.
1318     Note that if the parameter is constructed into a temporary, this will
1319     cause an additional copy because the parameter will be constructed
1320     into a temporary location and then copied into the outgoing arguments.
1321     If a parameter contains a call to alloca and this function uses the
1322     stack, precompute the parameter.  */
1323
1324  /* If we preallocated the stack space, and some arguments must be passed
1325     on the stack, then we must precompute any parameter which contains a
1326     function call which will store arguments on the stack.
1327     Otherwise, evaluating the parameter may clobber previous parameters
1328     which have already been stored into the stack.  */
1329
1330  for (i = 0; i < num_actuals; i++)
1331    if (is_const
1332        || ((args_size.var != 0 || args_size.constant != 0)
1333            && calls_function (args[i].tree_value, 1))
1334        || (must_preallocate && (args_size.var != 0 || args_size.constant != 0)
1335            && calls_function (args[i].tree_value, 0)))
1336      {
1337        /* If this is an addressable type, we cannot pre-evaluate it.  */
1338        if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1339          abort ();
1340
1341        push_temp_slots ();
1342
1343        args[i].initial_value = args[i].value
1344          = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1345
1346        preserve_temp_slots (args[i].value);
1347        pop_temp_slots ();
1348
1349        /* ANSI doesn't require a sequence point here,
1350           but PCC has one, so this will avoid some problems.  */
1351        emit_queue ();
1352
1353        args[i].initial_value = args[i].value
1354          = protect_from_queue (args[i].initial_value, 0);
1355
1356        if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
1357          args[i].value
1358            = convert_modes (args[i].mode,
1359                             TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1360                             args[i].value, args[i].unsignedp);
1361      }
1362
1363  /* Now we are about to start emitting insns that can be deleted
1364     if a libcall is deleted.  */
1365  if (is_const)
1366    start_sequence ();
1367
1368  /* If we have no actual push instructions, or shouldn't use them,
1369     make space for all args right now.  */
1370
1371  if (args_size.var != 0)
1372    {
1373      if (old_stack_level == 0)
1374        {
1375          emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1376          old_pending_adj = pending_stack_adjust;
1377          pending_stack_adjust = 0;
1378#ifdef ACCUMULATE_OUTGOING_ARGS
1379          /* stack_arg_under_construction says whether a stack arg is
1380             being constructed at the old stack level.  Pushing the stack
1381             gets a clean outgoing argument block.  */
1382          old_stack_arg_under_construction = stack_arg_under_construction;
1383          stack_arg_under_construction = 0;
1384#endif
1385        }
1386      argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
1387    }
1388  else
1389    {
1390      /* Note that we must go through the motions of allocating an argument
1391         block even if the size is zero because we may be storing args
1392         in the area reserved for register arguments, which may be part of
1393         the stack frame.  */
1394
1395      int needed = args_size.constant;
1396
1397      /* Store the maximum argument space used.  It will be pushed by the
1398         prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow checking). */
1399
1400      if (needed > current_function_outgoing_args_size)
1401        current_function_outgoing_args_size = needed;
1402
1403      if (must_preallocate)
1404        {
1405#ifdef ACCUMULATE_OUTGOING_ARGS
1406          /* Since the stack pointer will never be pushed, it is possible for
1407             the evaluation of a parm to clobber something we have already
1408             written to the stack.  Since most function calls on RISC machines
1409             do not use the stack, this is uncommon, but must work correctly.
1410
1411             Therefore, we save any area of the stack that was already written
1412             and that we are using.  Here we set up to do this by making a new
1413             stack usage map from the old one.  The actual save will be done
1414             by store_one_arg.
1415
1416             Another approach might be to try to reorder the argument
1417             evaluations to avoid this conflicting stack usage.  */
1418
1419#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1420          /* Since we will be writing into the entire argument area, the
1421             map must be allocated for its entire size, not just the part that
1422             is the responsibility of the caller.  */
1423          needed += reg_parm_stack_space;
1424#endif
1425
1426#ifdef ARGS_GROW_DOWNWARD
1427          highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
1428                                             needed + 1);
1429#else
1430          highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
1431                                             needed);
1432#endif
1433          stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
1434
1435          if (initial_highest_arg_in_use)
1436            bcopy (initial_stack_usage_map, stack_usage_map,
1437                   initial_highest_arg_in_use);
1438
1439          if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
1440            bzero (&stack_usage_map[initial_highest_arg_in_use],
1441                   highest_outgoing_arg_in_use - initial_highest_arg_in_use);
1442          needed = 0;
1443
1444          /* The address of the outgoing argument list must not be copied to a
1445             register here, because argblock would be left pointing to the
1446             wrong place after the call to allocate_dynamic_stack_space below.
1447             */
1448
1449          argblock = virtual_outgoing_args_rtx;
1450
1451#else /* not ACCUMULATE_OUTGOING_ARGS */
1452          if (inhibit_defer_pop == 0)
1453            {
1454              /* Try to reuse some or all of the pending_stack_adjust
1455                 to get this space.  Maybe we can avoid any pushing.  */
1456              if (needed > pending_stack_adjust)
1457                {
1458                  needed -= pending_stack_adjust;
1459                  pending_stack_adjust = 0;
1460                }
1461              else
1462                {
1463                  pending_stack_adjust -= needed;
1464                  needed = 0;
1465                }
1466            }
1467          /* Special case this because overhead of `push_block' in this
1468             case is non-trivial.  */
1469          if (needed == 0)
1470            argblock = virtual_outgoing_args_rtx;
1471          else
1472            argblock = push_block (GEN_INT (needed), 0, 0);
1473
1474          /* We only really need to call `copy_to_reg' in the case where push
1475             insns are going to be used to pass ARGBLOCK to a function
1476             call in ARGS.  In that case, the stack pointer changes value
1477             from the allocation point to the call point, and hence
1478             the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
1479             But might as well always do it.  */
1480          argblock = copy_to_reg (argblock);
1481#endif /* not ACCUMULATE_OUTGOING_ARGS */
1482        }
1483    }
1484
1485#ifdef ACCUMULATE_OUTGOING_ARGS
1486  /* The save/restore code in store_one_arg handles all cases except one:
1487     a constructor call (including a C function returning a BLKmode struct)
1488     to initialize an argument.  */
1489  if (stack_arg_under_construction)
1490    {
1491#if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1492      rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
1493#else
1494      rtx push_size = GEN_INT (args_size.constant);
1495#endif
1496      if (old_stack_level == 0)
1497        {
1498          emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1499          old_pending_adj = pending_stack_adjust;
1500          pending_stack_adjust = 0;
1501          /* stack_arg_under_construction says whether a stack arg is
1502             being constructed at the old stack level.  Pushing the stack
1503             gets a clean outgoing argument block.  */
1504          old_stack_arg_under_construction = stack_arg_under_construction;
1505          stack_arg_under_construction = 0;
1506          /* Make a new map for the new argument list.  */
1507          stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
1508          bzero (stack_usage_map, highest_outgoing_arg_in_use);
1509          highest_outgoing_arg_in_use = 0;
1510        }
1511      allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
1512    }
1513  /* If argument evaluation might modify the stack pointer, copy the
1514     address of the argument list to a register.  */
1515  for (i = 0; i < num_actuals; i++)
1516    if (args[i].pass_on_stack)
1517      {
1518        argblock = copy_addr_to_reg (argblock);
1519        break;
1520      }
1521#endif
1522
1523
1524  /* If we preallocated stack space, compute the address of each argument.
1525     We need not ensure it is a valid memory address here; it will be
1526     validized when it is used.  */
1527  if (argblock)
1528    {
1529      rtx arg_reg = argblock;
1530      int arg_offset = 0;
1531
1532      if (GET_CODE (argblock) == PLUS)
1533        arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1534
1535      for (i = 0; i < num_actuals; i++)
1536        {
1537          rtx offset = ARGS_SIZE_RTX (args[i].offset);
1538          rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1539          rtx addr;
1540
1541          /* Skip this parm if it will not be passed on the stack.  */
1542          if (! args[i].pass_on_stack && args[i].reg != 0)
1543            continue;
1544
1545          if (GET_CODE (offset) == CONST_INT)
1546            addr = plus_constant (arg_reg, INTVAL (offset));
1547          else
1548            addr = gen_rtx (PLUS, Pmode, arg_reg, offset);
1549
1550          addr = plus_constant (addr, arg_offset);
1551          args[i].stack = gen_rtx (MEM, args[i].mode, addr);
1552          MEM_IN_STRUCT_P (args[i].stack)
1553            = AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value));
1554
1555          if (GET_CODE (slot_offset) == CONST_INT)
1556            addr = plus_constant (arg_reg, INTVAL (slot_offset));
1557          else
1558            addr = gen_rtx (PLUS, Pmode, arg_reg, slot_offset);
1559
1560          addr = plus_constant (addr, arg_offset);
1561          args[i].stack_slot = gen_rtx (MEM, args[i].mode, addr);
1562        }
1563    }
1564                                               
1565#ifdef PUSH_ARGS_REVERSED
1566#ifdef STACK_BOUNDARY
1567  /* If we push args individually in reverse order, perform stack alignment
1568     before the first push (the last arg).  */
1569  if (argblock == 0)
1570    anti_adjust_stack (GEN_INT (args_size.constant
1571                                - original_args_size.constant));
1572#endif
1573#endif
1574
1575  /* Don't try to defer pops if preallocating, not even from the first arg,
1576     since ARGBLOCK probably refers to the SP.  */
1577  if (argblock)
1578    NO_DEFER_POP;
1579
1580  /* Get the function to call, in the form of RTL.  */
1581  if (fndecl)
1582    {
1583      /* If this is the first use of the function, see if we need to
1584         make an external definition for it.  */
1585      if (! TREE_USED (fndecl))
1586        {
1587          assemble_external (fndecl);
1588          TREE_USED (fndecl) = 1;
1589        }
1590
1591      /* Get a SYMBOL_REF rtx for the function address.  */
1592      funexp = XEXP (DECL_RTL (fndecl), 0);
1593    }
1594  else
1595    /* Generate an rtx (probably a pseudo-register) for the address.  */
1596    {
1597      push_temp_slots ();
1598      funexp = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
1599      pop_temp_slots ();        /* FUNEXP can't be BLKmode */
1600      emit_queue ();
1601    }
1602
1603  /* Figure out the register where the value, if any, will come back.  */
1604  valreg = 0;
1605  if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
1606      && ! structure_value_addr)
1607    {
1608      if (pcc_struct_value)
1609        valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
1610                                      fndecl);
1611      else
1612        valreg = hard_function_value (TREE_TYPE (exp), fndecl);
1613    }
1614
1615  /* Precompute all register parameters.  It isn't safe to compute anything
1616     once we have started filling any specific hard regs. */
1617  reg_parm_seen = 0;
1618  for (i = 0; i < num_actuals; i++)
1619    if (args[i].reg != 0 && ! args[i].pass_on_stack)
1620      {
1621        reg_parm_seen = 1;
1622
1623        if (args[i].value == 0)
1624          {
1625            push_temp_slots ();
1626            args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
1627                                         VOIDmode, 0);
1628            preserve_temp_slots (args[i].value);
1629            pop_temp_slots ();
1630
1631            /* ANSI doesn't require a sequence point here,
1632               but PCC has one, so this will avoid some problems.  */
1633            emit_queue ();
1634          }
1635
1636        /* If we are to promote the function arg to a wider mode,
1637           do it now.  */
1638
1639        if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
1640          args[i].value
1641            = convert_modes (args[i].mode,
1642                             TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1643                             args[i].value, args[i].unsignedp);
1644
1645        /* If the value is expensive, and we are inside an appropriately
1646           short loop, put the value into a pseudo and then put the pseudo
1647           into the hard reg.
1648
1649           For small register classes, also do this if this call uses
1650           register parameters.  This is to avoid reload conflicts while
1651           loading the parameters registers.  */
1652
1653        if ((! (GET_CODE (args[i].value) == REG
1654                || (GET_CODE (args[i].value) == SUBREG
1655                    && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
1656            && args[i].mode != BLKmode
1657            && rtx_cost (args[i].value, SET) > 2
1658#ifdef SMALL_REGISTER_CLASSES
1659            && (reg_parm_seen || preserve_subexpressions_p ())
1660#else
1661            && preserve_subexpressions_p ()
1662#endif
1663            )
1664          args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
1665      }
1666
1667#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1668  /* The argument list is the property of the called routine and it
1669     may clobber it.  If the fixed area has been used for previous
1670     parameters, we must save and restore it.
1671
1672     Here we compute the boundary of the that needs to be saved, if any.  */
1673
1674#ifdef ARGS_GROW_DOWNWARD
1675  for (i = 0; i < reg_parm_stack_space + 1; i++)
1676#else
1677  for (i = 0; i < reg_parm_stack_space; i++)
1678#endif
1679    {
1680      if (i >=  highest_outgoing_arg_in_use
1681          || stack_usage_map[i] == 0)
1682        continue;
1683
1684      if (low_to_save == -1)
1685        low_to_save = i;
1686
1687      high_to_save = i;
1688    }
1689
1690  if (low_to_save >= 0)
1691    {
1692      int num_to_save = high_to_save - low_to_save + 1;
1693      enum machine_mode save_mode
1694        = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
1695      rtx stack_area;
1696
1697      /* If we don't have the required alignment, must do this in BLKmode.  */
1698      if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
1699                               BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
1700        save_mode = BLKmode;
1701
1702      stack_area = gen_rtx (MEM, save_mode,
1703                            memory_address (save_mode,
1704                                           
1705#ifdef ARGS_GROW_DOWNWARD
1706                                            plus_constant (argblock,
1707                                                           - high_to_save)
1708#else
1709                                            plus_constant (argblock,
1710                                                           low_to_save)
1711#endif
1712                                            ));
1713      if (save_mode == BLKmode)
1714        {
1715          save_area = assign_stack_temp (BLKmode, num_to_save, 0);
1716          MEM_IN_STRUCT_P (save_area) = 0;
1717          emit_block_move (validize_mem (save_area), stack_area,
1718                           GEN_INT (num_to_save),
1719                           PARM_BOUNDARY / BITS_PER_UNIT);
1720        }
1721      else
1722        {
1723          save_area = gen_reg_rtx (save_mode);
1724          emit_move_insn (save_area, stack_area);
1725        }
1726    }
1727#endif
1728         
1729
1730  /* Now store (and compute if necessary) all non-register parms.
1731     These come before register parms, since they can require block-moves,
1732     which could clobber the registers used for register parms.
1733     Parms which have partial registers are not stored here,
1734     but we do preallocate space here if they want that.  */
1735
1736  for (i = 0; i < num_actuals; i++)
1737    if (args[i].reg == 0 || args[i].pass_on_stack)
1738      store_one_arg (&args[i], argblock, may_be_alloca,
1739                     args_size.var != 0, fndecl, reg_parm_stack_space);
1740
1741#ifdef STRICT_ALIGNMENT
1742  /* If we have a parm that is passed in registers but not in memory
1743     and whose alignment does not permit a direct copy into registers,
1744     make a group of pseudos that correspond to each register that we
1745     will later fill.  */
1746
1747  for (i = 0; i < num_actuals; i++)
1748    if (args[i].reg != 0 && ! args[i].pass_on_stack
1749        && args[i].mode == BLKmode
1750        && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1751            < MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1752      {
1753        int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1754        int big_endian_correction = 0;
1755
1756        args[i].n_aligned_regs
1757          = args[i].partial ? args[i].partial
1758            : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1759
1760        args[i].aligned_regs = (rtx *) alloca (sizeof (rtx)
1761                                               * args[i].n_aligned_regs);
1762
1763        /* Structures smaller than a word are aligned to the least significant
1764           byte (to the right).  On a BYTES_BIG_ENDIAN machine, this means we
1765           must skip the empty high order bytes when calculating the bit
1766           offset.  */
1767        if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
1768          big_endian_correction = (BITS_PER_WORD  - (bytes * BITS_PER_UNIT));
1769
1770        for (j = 0; j < args[i].n_aligned_regs; j++)
1771          {
1772            rtx reg = gen_reg_rtx (word_mode);
1773            rtx word = operand_subword_force (args[i].value, j, BLKmode);
1774            int bitsize = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
1775            int bitpos;
1776
1777            args[i].aligned_regs[j] = reg;
1778
1779            /* Clobber REG and move each partword into it.  Ensure we don't
1780               go past the end of the structure.  Note that the loop below
1781               works because we've already verified that padding
1782               and endianness are compatible.  */
1783
1784            emit_insn (gen_rtx (CLOBBER, VOIDmode, reg));
1785
1786            for (bitpos = 0;
1787                 bitpos < BITS_PER_WORD && bytes > 0;
1788                 bitpos += bitsize, bytes -= bitsize / BITS_PER_UNIT)
1789              {
1790                int xbitpos = bitpos + big_endian_correction;
1791
1792                store_bit_field (reg, bitsize, xbitpos, word_mode,
1793                                 extract_bit_field (word, bitsize, bitpos, 1,
1794                                                    NULL_RTX, word_mode,
1795                                                    word_mode,
1796                                                    bitsize / BITS_PER_UNIT,
1797                                                    BITS_PER_WORD),
1798                                 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
1799              }
1800          }
1801      }
1802#endif
1803
1804  /* Now store any partially-in-registers parm.
1805     This is the last place a block-move can happen.  */
1806  if (reg_parm_seen)
1807    for (i = 0; i < num_actuals; i++)
1808      if (args[i].partial != 0 && ! args[i].pass_on_stack)
1809        store_one_arg (&args[i], argblock, may_be_alloca,
1810                       args_size.var != 0, fndecl, reg_parm_stack_space);
1811
1812#ifndef PUSH_ARGS_REVERSED
1813#ifdef STACK_BOUNDARY
1814  /* If we pushed args in forward order, perform stack alignment
1815     after pushing the last arg.  */
1816  if (argblock == 0)
1817    anti_adjust_stack (GEN_INT (args_size.constant
1818                                - original_args_size.constant));
1819#endif
1820#endif
1821
1822  /* If register arguments require space on the stack and stack space
1823     was not preallocated, allocate stack space here for arguments
1824     passed in registers.  */
1825#if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
1826  if (must_preallocate == 0 && reg_parm_stack_space > 0)
1827    anti_adjust_stack (GEN_INT (reg_parm_stack_space));
1828#endif
1829
1830  /* Pass the function the address in which to return a structure value.  */
1831  if (structure_value_addr && ! structure_value_addr_parm)
1832    {
1833      emit_move_insn (struct_value_rtx,
1834                      force_reg (Pmode,
1835                                 force_operand (structure_value_addr,
1836                                                NULL_RTX)));
1837      if (GET_CODE (struct_value_rtx) == REG)
1838          use_reg (&call_fusage, struct_value_rtx);
1839    }
1840
1841  funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen);
1842
1843  /* Now do the register loads required for any wholly-register parms or any
1844     parms which are passed both on the stack and in a register.  Their
1845     expressions were already evaluated.
1846
1847     Mark all register-parms as living through the call, putting these USE
1848     insns in the CALL_INSN_FUNCTION_USAGE field.  */
1849
1850  for (i = 0; i < num_actuals; i++)
1851    {
1852      rtx list = args[i].reg;
1853      int partial = args[i].partial;
1854
1855      while (list)
1856        {
1857          rtx reg;
1858          int nregs;
1859
1860          /* Process each register that needs to get this arg.  */
1861          if (GET_CODE (list) == EXPR_LIST)
1862            reg = XEXP (list, 0), list = XEXP (list, 1);
1863          else
1864            reg = list, list = 0;
1865
1866          /* Set to non-negative if must move a word at a time, even if just
1867             one word (e.g, partial == 1 && mode == DFmode).  Set to -1 if
1868             we just use a normal move insn.  This value can be zero if the
1869             argument is a zero size structure with no fields.  */
1870          nregs = (partial ? partial
1871                   : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1872                      ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1873                          + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1874                      : -1));
1875
1876          /* If simple case, just do move.  If normal partial, store_one_arg
1877             has already loaded the register for us.  In all other cases,
1878             load the register(s) from memory.  */
1879
1880          if (nregs == -1)
1881            emit_move_insn (reg, args[i].value);
1882
1883#ifdef STRICT_ALIGNMENT
1884          /* If we have pre-computed the values to put in the registers in
1885             the case of non-aligned structures, copy them in now.  */
1886
1887          else if (args[i].n_aligned_regs != 0)
1888            for (j = 0; j < args[i].n_aligned_regs; j++)
1889              emit_move_insn (gen_rtx (REG, word_mode, REGNO (reg) + j),
1890                              args[i].aligned_regs[j]);
1891#endif
1892
1893          else if (args[i].partial == 0 || args[i].pass_on_stack)
1894            move_block_to_reg (REGNO (reg),
1895                               validize_mem (args[i].value), nregs,
1896                               args[i].mode);
1897
1898          if (nregs == -1)
1899            use_reg (&call_fusage, reg);
1900          else
1901            use_regs (&call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1902
1903          /* PARTIAL referred only to the first register, so clear it for the
1904             next time.  */
1905          partial = 0;
1906        }
1907    }
1908
1909  /* Perform postincrements before actually calling the function.  */
1910  emit_queue ();
1911
1912  /* All arguments and registers used for the call must be set up by now!  */
1913
1914  /* Generate the actual call instruction.  */
1915  emit_call_1 (funexp, fndecl, funtype, args_size.constant, struct_value_size,
1916               FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
1917               valreg, old_inhibit_defer_pop, call_fusage, is_const);
1918
1919  /* If call is cse'able, make appropriate pair of reg-notes around it.
1920     Test valreg so we don't crash; may safely ignore `const'
1921     if return type is void.  */
1922  if (is_const && valreg != 0)
1923    {
1924      rtx note = 0;
1925      rtx temp = gen_reg_rtx (GET_MODE (valreg));
1926      rtx insns;
1927
1928      /* Construct an "equal form" for the value which mentions all the
1929         arguments in order as well as the function name.  */
1930#ifdef PUSH_ARGS_REVERSED
1931      for (i = 0; i < num_actuals; i++)
1932        note = gen_rtx (EXPR_LIST, VOIDmode, args[i].initial_value, note);
1933#else
1934      for (i = num_actuals - 1; i >= 0; i--)
1935        note = gen_rtx (EXPR_LIST, VOIDmode, args[i].initial_value, note);
1936#endif
1937      note = gen_rtx (EXPR_LIST, VOIDmode, funexp, note);
1938
1939      insns = get_insns ();
1940      end_sequence ();
1941
1942      emit_libcall_block (insns, temp, valreg, note);
1943
1944      valreg = temp;
1945    }
1946  else if (is_const)
1947    {
1948      /* Otherwise, just write out the sequence without a note.  */
1949      rtx insns = get_insns ();
1950
1951      end_sequence ();
1952      emit_insns (insns);
1953    }
1954
1955  /* For calls to `setjmp', etc., inform flow.c it should complain
1956     if nonvolatile values are live.  */
1957
1958  if (returns_twice)
1959    {
1960      emit_note (name, NOTE_INSN_SETJMP);
1961      current_function_calls_setjmp = 1;
1962    }
1963
1964  if (is_longjmp)
1965    current_function_calls_longjmp = 1;
1966
1967  /* Notice functions that cannot return.
1968     If optimizing, insns emitted below will be dead.
1969     If not optimizing, they will exist, which is useful
1970     if the user uses the `return' command in the debugger.  */
1971
1972  if (is_volatile || is_longjmp)
1973    emit_barrier ();
1974
1975  /* If value type not void, return an rtx for the value.  */
1976
1977  /* If there are cleanups to be called, don't use a hard reg as target.  */
1978  if (cleanups_this_call != old_cleanups
1979      && target && REG_P (target)
1980      && REGNO (target) < FIRST_PSEUDO_REGISTER)
1981    target = 0;
1982
1983  if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
1984      || ignore)
1985    {
1986      target = const0_rtx;
1987    }
1988  else if (structure_value_addr)
1989    {
1990      if (target == 0 || GET_CODE (target) != MEM)
1991        {
1992          target = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)),
1993                            memory_address (TYPE_MODE (TREE_TYPE (exp)),
1994                                            structure_value_addr));
1995          MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
1996        }
1997    }
1998  else if (pcc_struct_value)
1999    {
2000      if (target == 0)
2001        {
2002          /* We used leave the value in the location that it is
2003             returned in, but that causes problems if it is used more
2004             than once in one expression.  Rather than trying to track
2005             when a copy is required, we always copy when TARGET is
2006             not specified.  This calling sequence is only used on
2007             a few machines and TARGET is usually nonzero.  */
2008          if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2009            {
2010              target = assign_stack_temp (BLKmode,
2011                                          int_size_in_bytes (TREE_TYPE (exp)),
2012                                          0);
2013
2014              MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
2015
2016              /* Save this temp slot around the pop below.  */
2017              preserve_temp_slots (target);
2018            }
2019          else
2020            target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2021        }
2022
2023      if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
2024        emit_move_insn (target, gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)),
2025                                         copy_to_reg (valreg)));
2026      else
2027        emit_block_move (target, gen_rtx (MEM, BLKmode, copy_to_reg (valreg)),
2028                         expr_size (exp),
2029                         TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2030    }
2031  else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2032           && GET_MODE (target) == GET_MODE (valreg))
2033    /* TARGET and VALREG cannot be equal at this point because the latter
2034       would not have REG_FUNCTION_VALUE_P true, while the former would if
2035       it were referring to the same register.
2036
2037       If they refer to the same register, this move will be a no-op, except
2038       when function inlining is being done.  */
2039    emit_move_insn (target, valreg);
2040  else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2041    {
2042      /* Some machines (the PA for example) want to return all small
2043         structures in registers regardless of the structure's alignment.
2044         
2045         Deal with them explicitly by copying from the return registers
2046         into the target MEM locations.  */
2047      int bytes = int_size_in_bytes (TREE_TYPE (exp));
2048      int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2049      int i;
2050      enum machine_mode tmpmode;
2051      rtx src, dst;
2052      int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (exp)), BITS_PER_WORD);
2053      int bitpos, xbitpos, big_endian_correction = 0;
2054     
2055      if (target == 0)
2056        {
2057          target = assign_stack_temp (BLKmode, bytes, 0);
2058          MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
2059          preserve_temp_slots (target);
2060        }
2061
2062      /* This code assumes valreg is at least a full word.  If it isn't,
2063         copy it into a new pseudo which is a full word.  */
2064      if (GET_MODE (valreg) != BLKmode
2065          && GET_MODE_SIZE (GET_MODE (valreg)) < UNITS_PER_WORD)
2066        valreg = convert_to_mode (SImode, valreg,
2067                                  TREE_UNSIGNED (TREE_TYPE (exp)));
2068
2069      /* Structures whose size is not a multiple of a word are aligned
2070         to the least significant byte (to the right).  On a BYTES_BIG_ENDIAN
2071         machine, this means we must skip the empty high order bytes when
2072         calculating the bit offset.  */
2073      if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2074        big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2075                                                  * BITS_PER_UNIT));
2076
2077      /* Copy the structure BITSIZE bites at a time.
2078
2079         We could probably emit more efficient code for machines
2080         which do not use strict alignment, but it doesn't seem
2081         worth the effort at the current time.  */
2082      for (bitpos = 0, xbitpos = big_endian_correction;
2083           bitpos < bytes * BITS_PER_UNIT;
2084           bitpos += bitsize, xbitpos += bitsize)
2085        {
2086
2087          /* We need a new source operand each time xbitpos is on a
2088             word boundary and when xbitpos == big_endian_correction
2089             (the first time through).  */
2090          if (xbitpos % BITS_PER_WORD == 0
2091              || xbitpos == big_endian_correction)
2092            src = operand_subword_force (valreg,
2093                                         xbitpos / BITS_PER_WORD,
2094                                         BLKmode);
2095
2096          /* We need a new destination operand each time bitpos is on
2097             a word boundary.  */
2098          if (bitpos % BITS_PER_WORD == 0)
2099            dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, BLKmode);
2100             
2101          /* Use xbitpos for the source extraction (right justified) and
2102             xbitpos for the destination store (left justified).  */
2103          store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2104                           extract_bit_field (src, bitsize,
2105                                              xbitpos % BITS_PER_WORD, 1,
2106                                              NULL_RTX, word_mode,
2107                                              word_mode,
2108                                              bitsize / BITS_PER_UNIT,
2109                                              BITS_PER_WORD),
2110                           bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2111        }
2112    }
2113  else
2114    target = copy_to_reg (valreg);
2115
2116#ifdef PROMOTE_FUNCTION_RETURN
2117  /* If we promoted this return value, make the proper SUBREG.  TARGET
2118     might be const0_rtx here, so be careful.  */
2119  if (GET_CODE (target) == REG
2120      && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2121      && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2122    {
2123      tree type = TREE_TYPE (exp);
2124      int unsignedp = TREE_UNSIGNED (type);
2125
2126      /* If we don't promote as expected, something is wrong.  */
2127      if (GET_MODE (target)
2128          != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
2129        abort ();
2130
2131      target = gen_rtx (SUBREG, TYPE_MODE (type), target, 0);
2132      SUBREG_PROMOTED_VAR_P (target) = 1;
2133      SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
2134    }
2135#endif
2136
2137  if (flag_short_temps)
2138    {
2139      /* Perform all cleanups needed for the arguments of this call
2140         (i.e. destructors in C++).  */
2141      expand_cleanups_to (old_cleanups);
2142    }
2143
2144  /* If size of args is variable or this was a constructor call for a stack
2145     argument, restore saved stack-pointer value.  */
2146
2147  if (old_stack_level)
2148    {
2149      emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2150      pending_stack_adjust = old_pending_adj;
2151#ifdef ACCUMULATE_OUTGOING_ARGS
2152      stack_arg_under_construction = old_stack_arg_under_construction;
2153      highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2154      stack_usage_map = initial_stack_usage_map;
2155#endif
2156    }
2157#ifdef ACCUMULATE_OUTGOING_ARGS
2158  else
2159    {
2160#ifdef REG_PARM_STACK_SPACE
2161      if (save_area)
2162        {
2163          enum machine_mode save_mode = GET_MODE (save_area);
2164          rtx stack_area
2165            = gen_rtx (MEM, save_mode,
2166                       memory_address (save_mode,
2167#ifdef ARGS_GROW_DOWNWARD
2168                                       plus_constant (argblock, - high_to_save)
2169#else
2170                                       plus_constant (argblock, low_to_save)
2171#endif
2172                                       ));
2173
2174          if (save_mode != BLKmode)
2175            emit_move_insn (stack_area, save_area);
2176          else
2177            emit_block_move (stack_area, validize_mem (save_area),
2178                             GEN_INT (high_to_save - low_to_save + 1),
2179                             PARM_BOUNDARY / BITS_PER_UNIT);
2180        }
2181#endif
2182         
2183      /* If we saved any argument areas, restore them.  */
2184      for (i = 0; i < num_actuals; i++)
2185        if (args[i].save_area)
2186          {
2187            enum machine_mode save_mode = GET_MODE (args[i].save_area);
2188            rtx stack_area
2189              = gen_rtx (MEM, save_mode,
2190                         memory_address (save_mode,
2191                                         XEXP (args[i].stack_slot, 0)));
2192
2193            if (save_mode != BLKmode)
2194              emit_move_insn (stack_area, args[i].save_area);
2195            else
2196              emit_block_move (stack_area, validize_mem (args[i].save_area),
2197                               GEN_INT (args[i].size.constant),
2198                               PARM_BOUNDARY / BITS_PER_UNIT);
2199          }
2200
2201      highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2202      stack_usage_map = initial_stack_usage_map;
2203    }
2204#endif
2205
2206  /* If this was alloca, record the new stack level for nonlocal gotos. 
2207     Check for the handler slots since we might not have a save area
2208     for non-local gotos. */
2209
2210  if (may_be_alloca && nonlocal_goto_handler_slot != 0)
2211    emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
2212
2213  pop_temp_slots ();
2214
2215  return target;
2216}
2217
2218/* Output a library call to function FUN (a SYMBOL_REF rtx)
2219   (emitting the queue unless NO_QUEUE is nonzero),
2220   for a value of mode OUTMODE,
2221   with NARGS different arguments, passed as alternating rtx values
2222   and machine_modes to convert them to.
2223   The rtx values should have been passed through protect_from_queue already.
2224
2225   NO_QUEUE will be true if and only if the library call is a `const' call
2226   which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2227   to the variable is_const in expand_call.
2228
2229   NO_QUEUE must be true for const calls, because if it isn't, then
2230   any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2231   and will be lost if the libcall sequence is optimized away.
2232
2233   NO_QUEUE must be false for non-const calls, because if it isn't, the
2234   call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2235   optimized.  For instance, the instruction scheduler may incorrectly
2236   move memory references across the non-const call.  */
2237
2238void
2239emit_library_call VPROTO((rtx orgfun, int no_queue, enum machine_mode outmode,
2240                          int nargs, ...))
2241{
2242#ifndef __STDC__
2243  rtx orgfun;
2244  int no_queue;
2245  enum machine_mode outmode;
2246  int nargs;
2247#endif
2248  va_list p;
2249  /* Total size in bytes of all the stack-parms scanned so far.  */
2250  struct args_size args_size;
2251  /* Size of arguments before any adjustments (such as rounding).  */
2252  struct args_size original_args_size;
2253  register int argnum;
2254  rtx fun;
2255  int inc;
2256  int count;
2257  rtx argblock = 0;
2258  CUMULATIVE_ARGS args_so_far;
2259  struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2260               struct args_size offset; struct args_size size; };
2261  struct arg *argvec;
2262  int old_inhibit_defer_pop = inhibit_defer_pop;
2263  rtx call_fusage = 0;
2264  /* library calls are never indirect calls.  */
2265  int current_call_is_indirect = 0;
2266
2267  VA_START (p, nargs);
2268
2269#ifndef __STDC__
2270  orgfun = va_arg (p, rtx);
2271  no_queue = va_arg (p, int);
2272  outmode = va_arg (p, enum machine_mode);
2273  nargs = va_arg (p, int);
2274#endif
2275
2276  fun = orgfun;
2277
2278  /* Copy all the libcall-arguments out of the varargs data
2279     and into a vector ARGVEC.
2280
2281     Compute how to pass each argument.  We only support a very small subset
2282     of the full argument passing conventions to limit complexity here since
2283     library functions shouldn't have many args.  */
2284
2285  argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2286
2287  INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun);
2288
2289  args_size.constant = 0;
2290  args_size.var = 0;
2291
2292  push_temp_slots ();
2293
2294  for (count = 0; count < nargs; count++)
2295    {
2296      rtx val = va_arg (p, rtx);
2297      enum machine_mode mode = va_arg (p, enum machine_mode);
2298
2299      /* We cannot convert the arg value to the mode the library wants here;
2300         must do it earlier where we know the signedness of the arg.  */
2301      if (mode == BLKmode
2302          || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2303        abort ();
2304
2305      /* On some machines, there's no way to pass a float to a library fcn.
2306         Pass it as a double instead.  */
2307#ifdef LIBGCC_NEEDS_DOUBLE
2308      if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2309        val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
2310#endif
2311
2312      /* There's no need to call protect_from_queue, because
2313         either emit_move_insn or emit_push_insn will do that.  */
2314
2315      /* Make sure it is a reasonable operand for a move or push insn.  */
2316      if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2317          && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2318        val = force_operand (val, NULL_RTX);
2319
2320#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2321      if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2322        {
2323          /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2324             be viewed as just an efficiency improvement.  */
2325          rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2326          emit_move_insn (slot, val);
2327          val = force_operand (XEXP (slot, 0), NULL_RTX);
2328          mode = Pmode;
2329        }
2330#endif
2331
2332      argvec[count].value = val;
2333      argvec[count].mode = mode;
2334
2335      argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2336      if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2337        abort ();
2338#ifdef FUNCTION_ARG_PARTIAL_NREGS
2339      argvec[count].partial
2340        = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2341#else
2342      argvec[count].partial = 0;
2343#endif
2344
2345      locate_and_pad_parm (mode, NULL_TREE,
2346                           argvec[count].reg && argvec[count].partial == 0,
2347                           NULL_TREE, &args_size, &argvec[count].offset,
2348                           &argvec[count].size);
2349
2350      if (argvec[count].size.var)
2351        abort ();
2352
2353#ifndef REG_PARM_STACK_SPACE
2354      if (argvec[count].partial)
2355        argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2356#endif
2357
2358      if (argvec[count].reg == 0 || argvec[count].partial != 0
2359#ifdef REG_PARM_STACK_SPACE
2360          || 1
2361#endif
2362          )
2363        args_size.constant += argvec[count].size.constant;
2364
2365#ifdef ACCUMULATE_OUTGOING_ARGS
2366      /* If this arg is actually passed on the stack, it might be
2367         clobbering something we already put there (this library call might
2368         be inside the evaluation of an argument to a function whose call
2369         requires the stack).  This will only occur when the library call
2370         has sufficient args to run out of argument registers.  Abort in
2371         this case; if this ever occurs, code must be added to save and
2372         restore the arg slot.  */
2373
2374      if (argvec[count].reg == 0 || argvec[count].partial != 0)
2375        abort ();
2376#endif
2377
2378      FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2379    }
2380  va_end (p);
2381
2382  /* If this machine requires an external definition for library
2383     functions, write one out.  */
2384  assemble_external_libcall (fun);
2385
2386  original_args_size = args_size;
2387#ifdef STACK_BOUNDARY
2388  args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2389                         / STACK_BYTES) * STACK_BYTES);
2390#endif
2391
2392#ifdef REG_PARM_STACK_SPACE
2393  args_size.constant = MAX (args_size.constant,
2394                            REG_PARM_STACK_SPACE (NULL_TREE));
2395#ifndef OUTGOING_REG_PARM_STACK_SPACE
2396  args_size.constant -= REG_PARM_STACK_SPACE (NULL_TREE);
2397#endif
2398#endif
2399
2400  if (args_size.constant > current_function_outgoing_args_size)
2401    current_function_outgoing_args_size = args_size.constant;
2402
2403#ifdef ACCUMULATE_OUTGOING_ARGS
2404  args_size.constant = 0;
2405#endif
2406
2407#ifndef PUSH_ROUNDING
2408  argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2409#endif
2410
2411#ifdef PUSH_ARGS_REVERSED
2412#ifdef STACK_BOUNDARY
2413  /* If we push args individually in reverse order, perform stack alignment
2414     before the first push (the last arg).  */
2415  if (argblock == 0)
2416    anti_adjust_stack (GEN_INT (args_size.constant
2417                                - original_args_size.constant));
2418#endif
2419#endif
2420
2421#ifdef PUSH_ARGS_REVERSED
2422  inc = -1;
2423  argnum = nargs - 1;
2424#else
2425  inc = 1;
2426  argnum = 0;
2427#endif
2428
2429  /* Push the args that need to be pushed.  */
2430
2431  for (count = 0; count < nargs; count++, argnum += inc)
2432    {
2433      register enum machine_mode mode = argvec[argnum].mode;
2434      register rtx val = argvec[argnum].value;
2435      rtx reg = argvec[argnum].reg;
2436      int partial = argvec[argnum].partial;
2437
2438      if (! (reg != 0 && partial == 0))
2439        emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2440                        argblock, GEN_INT (argvec[count].offset.constant));
2441      NO_DEFER_POP;
2442    }
2443
2444#ifndef PUSH_ARGS_REVERSED
2445#ifdef STACK_BOUNDARY
2446  /* If we pushed args in forward order, perform stack alignment
2447     after pushing the last arg.  */
2448  if (argblock == 0)
2449    anti_adjust_stack (GEN_INT (args_size.constant
2450                                - original_args_size.constant));
2451#endif
2452#endif
2453
2454#ifdef PUSH_ARGS_REVERSED
2455  argnum = nargs - 1;
2456#else
2457  argnum = 0;
2458#endif
2459
2460  fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
2461
2462  /* Now load any reg parms into their regs.  */
2463
2464  for (count = 0; count < nargs; count++, argnum += inc)
2465    {
2466      register enum machine_mode mode = argvec[argnum].mode;
2467      register rtx val = argvec[argnum].value;
2468      rtx reg = argvec[argnum].reg;
2469      int partial = argvec[argnum].partial;
2470
2471      if (reg != 0 && partial == 0)
2472        emit_move_insn (reg, val);
2473      NO_DEFER_POP;
2474    }
2475
2476  /* For version 1.37, try deleting this entirely.  */
2477  if (! no_queue)
2478    emit_queue ();
2479
2480  /* Any regs containing parms remain in use through the call.  */
2481  for (count = 0; count < nargs; count++)
2482    if (argvec[count].reg != 0)
2483       use_reg (&call_fusage, argvec[count].reg);
2484
2485  /* Don't allow popping to be deferred, since then
2486     cse'ing of library calls could delete a call and leave the pop.  */
2487  NO_DEFER_POP;
2488
2489  /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2490     will set inhibit_defer_pop to that value.  */
2491
2492  emit_call_1 (fun,
2493               get_identifier (XSTR (orgfun, 0)),
2494               get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2495               FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2496               outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2497               old_inhibit_defer_pop + 1, call_fusage, no_queue);
2498
2499  pop_temp_slots ();
2500
2501  /* Now restore inhibit_defer_pop to its actual original value.  */
2502  OK_DEFER_POP;
2503}
2504
2505/* Like emit_library_call except that an extra argument, VALUE,
2506   comes second and says where to store the result.
2507   (If VALUE is zero, this function chooses a convenient way
2508   to return the value.
2509
2510   This function returns an rtx for where the value is to be found.
2511   If VALUE is nonzero, VALUE is returned.  */
2512
2513rtx
2514emit_library_call_value VPROTO((rtx orgfun, rtx value, int no_queue,
2515                                enum machine_mode outmode, int nargs, ...))
2516{
2517#ifndef __STDC__
2518  rtx orgfun;
2519  rtx value;
2520  int no_queue;
2521  enum machine_mode outmode;
2522  int nargs;
2523#endif
2524  va_list p;
2525  /* Total size in bytes of all the stack-parms scanned so far.  */
2526  struct args_size args_size;
2527  /* Size of arguments before any adjustments (such as rounding).  */
2528  struct args_size original_args_size;
2529  register int argnum;
2530  rtx fun;
2531  int inc;
2532  int count;
2533  rtx argblock = 0;
2534  CUMULATIVE_ARGS args_so_far;
2535  struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2536               struct args_size offset; struct args_size size; };
2537  struct arg *argvec;
2538  int old_inhibit_defer_pop = inhibit_defer_pop;
2539  rtx call_fusage = 0;
2540  rtx mem_value = 0;
2541  int pcc_struct_value = 0;
2542  int struct_value_size = 0;
2543  /* library calls are never indirect calls.  */
2544  int current_call_is_indirect = 0;
2545  int is_const;
2546
2547  VA_START (p, nargs);
2548
2549#ifndef __STDC__
2550  orgfun = va_arg (p, rtx);
2551  value = va_arg (p, rtx);
2552  no_queue = va_arg (p, int);
2553  outmode = va_arg (p, enum machine_mode);
2554  nargs = va_arg (p, int);
2555#endif
2556
2557  is_const = no_queue;
2558  fun = orgfun;
2559
2560  /* If this kind of value comes back in memory,
2561     decide where in memory it should come back.  */
2562  if (aggregate_value_p (type_for_mode (outmode, 0)))
2563    {
2564#ifdef PCC_STATIC_STRUCT_RETURN
2565      rtx pointer_reg
2566        = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
2567                               0);
2568      mem_value = gen_rtx (MEM, outmode, pointer_reg);
2569      pcc_struct_value = 1;
2570      if (value == 0)
2571        value = gen_reg_rtx (outmode);
2572#else /* not PCC_STATIC_STRUCT_RETURN */
2573      struct_value_size = GET_MODE_SIZE (outmode);
2574      if (value != 0 && GET_CODE (value) == MEM)
2575        mem_value = value;
2576      else
2577        mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
2578#endif
2579
2580      /* This call returns a big structure.  */
2581      is_const = 0;
2582    }
2583
2584  /* ??? Unfinished: must pass the memory address as an argument.  */
2585
2586  /* Copy all the libcall-arguments out of the varargs data
2587     and into a vector ARGVEC.
2588
2589     Compute how to pass each argument.  We only support a very small subset
2590     of the full argument passing conventions to limit complexity here since
2591     library functions shouldn't have many args.  */
2592
2593  argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
2594
2595  INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun);
2596
2597  args_size.constant = 0;
2598  args_size.var = 0;
2599
2600  count = 0;
2601
2602  push_temp_slots ();
2603
2604  /* If there's a structure value address to be passed,
2605     either pass it in the special place, or pass it as an extra argument.  */
2606  if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
2607    {
2608      rtx addr = XEXP (mem_value, 0);
2609      nargs++;
2610
2611      /* Make sure it is a reasonable operand for a move or push insn.  */
2612      if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
2613          && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
2614        addr = force_operand (addr, NULL_RTX);
2615
2616      argvec[count].value = addr;
2617      argvec[count].mode = Pmode;
2618      argvec[count].partial = 0;
2619
2620      argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
2621#ifdef FUNCTION_ARG_PARTIAL_NREGS
2622      if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
2623        abort ();
2624#endif
2625
2626      locate_and_pad_parm (Pmode, NULL_TREE,
2627                           argvec[count].reg && argvec[count].partial == 0,
2628                           NULL_TREE, &args_size, &argvec[count].offset,
2629                           &argvec[count].size);
2630
2631
2632      if (argvec[count].reg == 0 || argvec[count].partial != 0
2633#ifdef REG_PARM_STACK_SPACE
2634          || 1
2635#endif
2636          )
2637        args_size.constant += argvec[count].size.constant;
2638
2639      FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree)0, 1);
2640
2641      count++;
2642    }
2643
2644  for (; count < nargs; count++)
2645    {
2646      rtx val = va_arg (p, rtx);
2647      enum machine_mode mode = va_arg (p, enum machine_mode);
2648
2649      /* We cannot convert the arg value to the mode the library wants here;
2650         must do it earlier where we know the signedness of the arg.  */
2651      if (mode == BLKmode
2652          || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2653        abort ();
2654
2655      /* On some machines, there's no way to pass a float to a library fcn.
2656         Pass it as a double instead.  */
2657#ifdef LIBGCC_NEEDS_DOUBLE
2658      if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2659        val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
2660#endif
2661
2662      /* There's no need to call protect_from_queue, because
2663         either emit_move_insn or emit_push_insn will do that.  */
2664
2665      /* Make sure it is a reasonable operand for a move or push insn.  */
2666      if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2667          && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2668        val = force_operand (val, NULL_RTX);
2669
2670#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2671      if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2672        {
2673          /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2674             be viewed as just an efficiency improvement.  */
2675          rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2676          emit_move_insn (slot, val);
2677          val = XEXP (slot, 0);
2678          mode = Pmode;
2679        }
2680#endif
2681
2682      argvec[count].value = val;
2683      argvec[count].mode = mode;
2684
2685      argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2686      if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2687        abort ();
2688#ifdef FUNCTION_ARG_PARTIAL_NREGS
2689      argvec[count].partial
2690        = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2691#else
2692      argvec[count].partial = 0;
2693#endif
2694
2695      locate_and_pad_parm (mode, NULL_TREE,
2696                           argvec[count].reg && argvec[count].partial == 0,
2697                           NULL_TREE, &args_size, &argvec[count].offset,
2698                           &argvec[count].size);
2699
2700      if (argvec[count].size.var)
2701        abort ();
2702
2703#ifndef REG_PARM_STACK_SPACE
2704      if (argvec[count].partial)
2705        argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2706#endif
2707
2708      if (argvec[count].reg == 0 || argvec[count].partial != 0
2709#ifdef REG_PARM_STACK_SPACE
2710          || 1
2711#endif
2712          )
2713        args_size.constant += argvec[count].size.constant;
2714
2715#ifdef ACCUMULATE_OUTGOING_ARGS
2716      /* If this arg is actually passed on the stack, it might be
2717         clobbering something we already put there (this library call might
2718         be inside the evaluation of an argument to a function whose call
2719         requires the stack).  This will only occur when the library call
2720         has sufficient args to run out of argument registers.  Abort in
2721         this case; if this ever occurs, code must be added to save and
2722         restore the arg slot.  */
2723
2724      if (argvec[count].reg == 0 || argvec[count].partial != 0)
2725        abort ();
2726#endif
2727
2728      FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2729    }
2730  va_end (p);
2731
2732  /* If this machine requires an external definition for library
2733     functions, write one out.  */
2734  assemble_external_libcall (fun);
2735
2736  original_args_size = args_size;
2737#ifdef STACK_BOUNDARY
2738  args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2739                         / STACK_BYTES) * STACK_BYTES);
2740#endif
2741
2742#ifdef REG_PARM_STACK_SPACE
2743  args_size.constant = MAX (args_size.constant,
2744                            REG_PARM_STACK_SPACE (NULL_TREE));
2745#ifndef OUTGOING_REG_PARM_STACK_SPACE
2746  args_size.constant -= REG_PARM_STACK_SPACE (NULL_TREE);
2747#endif
2748#endif
2749
2750  if (args_size.constant > current_function_outgoing_args_size)
2751    current_function_outgoing_args_size = args_size.constant;
2752
2753#ifdef ACCUMULATE_OUTGOING_ARGS
2754  args_size.constant = 0;
2755#endif
2756
2757#ifndef PUSH_ROUNDING
2758  argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2759#endif
2760
2761#ifdef PUSH_ARGS_REVERSED
2762#ifdef STACK_BOUNDARY
2763  /* If we push args individually in reverse order, perform stack alignment
2764     before the first push (the last arg).  */
2765  if (argblock == 0)
2766    anti_adjust_stack (GEN_INT (args_size.constant
2767                                - original_args_size.constant));
2768#endif
2769#endif
2770
2771#ifdef PUSH_ARGS_REVERSED
2772  inc = -1;
2773  argnum = nargs - 1;
2774#else
2775  inc = 1;
2776  argnum = 0;
2777#endif
2778
2779  /* Push the args that need to be pushed.  */
2780
2781  for (count = 0; count < nargs; count++, argnum += inc)
2782    {
2783      register enum machine_mode mode = argvec[argnum].mode;
2784      register rtx val = argvec[argnum].value;
2785      rtx reg = argvec[argnum].reg;
2786      int partial = argvec[argnum].partial;
2787
2788      if (! (reg != 0 && partial == 0))
2789        emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2790                        argblock, GEN_INT (argvec[count].offset.constant));
2791      NO_DEFER_POP;
2792    }
2793
2794#ifndef PUSH_ARGS_REVERSED
2795#ifdef STACK_BOUNDARY
2796  /* If we pushed args in forward order, perform stack alignment
2797     after pushing the last arg.  */
2798  if (argblock == 0)
2799    anti_adjust_stack (GEN_INT (args_size.constant
2800                                - original_args_size.constant));
2801#endif
2802#endif
2803
2804#ifdef PUSH_ARGS_REVERSED
2805  argnum = nargs - 1;
2806#else
2807  argnum = 0;
2808#endif
2809
2810  fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
2811
2812  /* Now load any reg parms into their regs.  */
2813
2814  for (count = 0; count < nargs; count++, argnum += inc)
2815    {
2816      register enum machine_mode mode = argvec[argnum].mode;
2817      register rtx val = argvec[argnum].value;
2818      rtx reg = argvec[argnum].reg;
2819      int partial = argvec[argnum].partial;
2820
2821      if (reg != 0 && partial == 0)
2822        emit_move_insn (reg, val);
2823      NO_DEFER_POP;
2824    }
2825
2826#if 0
2827  /* For version 1.37, try deleting this entirely.  */
2828  if (! no_queue)
2829    emit_queue ();
2830#endif
2831
2832  /* Any regs containing parms remain in use through the call.  */
2833  for (count = 0; count < nargs; count++)
2834    if (argvec[count].reg != 0)
2835       use_reg (&call_fusage, argvec[count].reg);
2836
2837  /* Pass the function the address in which to return a structure value.  */
2838  if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
2839    {
2840      emit_move_insn (struct_value_rtx,
2841                      force_reg (Pmode,
2842                                 force_operand (XEXP (mem_value, 0),
2843                                                NULL_RTX)));
2844      if (GET_CODE (struct_value_rtx) == REG)
2845          use_reg (&call_fusage, struct_value_rtx);
2846    }
2847
2848  /* Don't allow popping to be deferred, since then
2849     cse'ing of library calls could delete a call and leave the pop.  */
2850  NO_DEFER_POP;
2851
2852  /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2853     will set inhibit_defer_pop to that value.  */
2854
2855  emit_call_1 (fun,
2856               get_identifier (XSTR (orgfun, 0)),
2857               get_identifier (XSTR (orgfun, 0)), args_size.constant,
2858               struct_value_size,
2859               FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2860               (outmode != VOIDmode && mem_value == 0
2861                ? hard_libcall_value (outmode) : NULL_RTX),
2862               old_inhibit_defer_pop + 1, call_fusage, is_const);
2863
2864  /* Now restore inhibit_defer_pop to its actual original value.  */
2865  OK_DEFER_POP;
2866
2867  pop_temp_slots ();
2868
2869  /* Copy the value to the right place.  */
2870  if (outmode != VOIDmode)
2871    {
2872      if (mem_value)
2873        {
2874          if (value == 0)
2875            value = mem_value;
2876          if (value != mem_value)
2877            emit_move_insn (value, mem_value);
2878        }
2879      else if (value != 0)
2880        emit_move_insn (value, hard_libcall_value (outmode));
2881      else
2882        value = hard_libcall_value (outmode);
2883    }
2884
2885  return value;
2886}
2887
2888#if 0
2889/* Return an rtx which represents a suitable home on the stack
2890   given TYPE, the type of the argument looking for a home.
2891   This is called only for BLKmode arguments.
2892
2893   SIZE is the size needed for this target.
2894   ARGS_ADDR is the address of the bottom of the argument block for this call.
2895   OFFSET describes this parameter's offset into ARGS_ADDR.  It is meaningless
2896   if this machine uses push insns.  */
2897
2898static rtx
2899target_for_arg (type, size, args_addr, offset)
2900     tree type;
2901     rtx size;
2902     rtx args_addr;
2903     struct args_size offset;
2904{
2905  rtx target;
2906  rtx offset_rtx = ARGS_SIZE_RTX (offset);
2907
2908  /* We do not call memory_address if possible,
2909     because we want to address as close to the stack
2910     as possible.  For non-variable sized arguments,
2911     this will be stack-pointer relative addressing.  */
2912  if (GET_CODE (offset_rtx) == CONST_INT)
2913    target = plus_constant (args_addr, INTVAL (offset_rtx));
2914  else
2915    {
2916      /* I have no idea how to guarantee that this
2917         will work in the presence of register parameters.  */
2918      target = gen_rtx (PLUS, Pmode, args_addr, offset_rtx);
2919      target = memory_address (QImode, target);
2920    }
2921
2922  return gen_rtx (MEM, BLKmode, target);
2923}
2924#endif
2925
2926/* Store a single argument for a function call
2927   into the register or memory area where it must be passed.
2928   *ARG describes the argument value and where to pass it.
2929
2930   ARGBLOCK is the address of the stack-block for all the arguments,
2931   or 0 on a machine where arguments are pushed individually.
2932
2933   MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
2934   so must be careful about how the stack is used.
2935
2936   VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
2937   argument stack.  This is used if ACCUMULATE_OUTGOING_ARGS to indicate
2938   that we need not worry about saving and restoring the stack.
2939
2940   FNDECL is the declaration of the function we are calling.  */
2941
2942static void
2943store_one_arg (arg, argblock, may_be_alloca, variable_size, fndecl,
2944               reg_parm_stack_space)
2945     struct arg_data *arg;
2946     rtx argblock;
2947     int may_be_alloca;
2948     int variable_size;
2949     tree fndecl;
2950     int reg_parm_stack_space;
2951{
2952  register tree pval = arg->tree_value;
2953  rtx reg = 0;
2954  int partial = 0;
2955  int used = 0;
2956  int i, lower_bound, upper_bound;
2957
2958  if (TREE_CODE (pval) == ERROR_MARK)
2959    return;
2960
2961  /* Push a new temporary level for any temporaries we make for
2962     this argument.  */
2963  push_temp_slots ();
2964
2965#ifdef ACCUMULATE_OUTGOING_ARGS
2966  /* If this is being stored into a pre-allocated, fixed-size, stack area,
2967     save any previous data at that location.  */
2968  if (argblock && ! variable_size && arg->stack)
2969    {
2970#ifdef ARGS_GROW_DOWNWARD
2971      /* stack_slot is negative, but we want to index stack_usage_map */
2972      /* with positive values. */
2973      if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
2974        upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
2975      else
2976        abort ();
2977
2978      lower_bound = upper_bound - arg->size.constant;
2979#else
2980      if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
2981        lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
2982      else
2983        lower_bound = 0;
2984
2985      upper_bound = lower_bound + arg->size.constant;
2986#endif
2987
2988      for (i = lower_bound; i < upper_bound; i++)
2989        if (stack_usage_map[i]
2990#ifdef REG_PARM_STACK_SPACE
2991            /* Don't store things in the fixed argument area at this point;
2992               it has already been saved.  */
2993            && i > reg_parm_stack_space
2994#endif
2995            )
2996          break;
2997
2998      if (i != upper_bound)
2999        {
3000          /* We need to make a save area.  See what mode we can make it.  */
3001          enum machine_mode save_mode
3002            = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
3003          rtx stack_area
3004            = gen_rtx (MEM, save_mode,
3005                       memory_address (save_mode, XEXP (arg->stack_slot, 0)));
3006
3007          if (save_mode == BLKmode)
3008            {
3009              arg->save_area = assign_stack_temp (BLKmode,
3010                                                  arg->size.constant, 0);
3011              MEM_IN_STRUCT_P (arg->save_area)
3012                = AGGREGATE_TYPE_P (TREE_TYPE (arg->tree_value));
3013              preserve_temp_slots (arg->save_area);
3014              emit_block_move (validize_mem (arg->save_area), stack_area,
3015                               GEN_INT (arg->size.constant),
3016                               PARM_BOUNDARY / BITS_PER_UNIT);
3017            }
3018          else
3019            {
3020              arg->save_area = gen_reg_rtx (save_mode);
3021              emit_move_insn (arg->save_area, stack_area);
3022            }
3023        }
3024    }
3025#endif
3026
3027  /* If this isn't going to be placed on both the stack and in registers,
3028     set up the register and number of words.  */
3029  if (! arg->pass_on_stack)
3030    reg = arg->reg, partial = arg->partial;
3031
3032  if (reg != 0 && partial == 0)
3033    /* Being passed entirely in a register.  We shouldn't be called in
3034       this case.   */
3035    abort ();
3036
3037#ifdef STRICT_ALIGNMENT
3038  /* If this arg needs special alignment, don't load the registers
3039     here.  */
3040  if (arg->n_aligned_regs != 0)
3041    reg = 0;
3042#endif
3043 
3044  /* If this is being partially passed in a register, but multiple locations
3045     are specified, we assume that the one partially used is the one that is
3046     listed first.  */
3047  if (reg && GET_CODE (reg) == EXPR_LIST)
3048    reg = XEXP (reg, 0);
3049
3050  /* If this is being passed partially in a register, we can't evaluate
3051     it directly into its stack slot.  Otherwise, we can.  */
3052  if (arg->value == 0)
3053    {
3054#ifdef ACCUMULATE_OUTGOING_ARGS
3055      /* stack_arg_under_construction is nonzero if a function argument is
3056         being evaluated directly into the outgoing argument list and
3057         expand_call must take special action to preserve the argument list
3058         if it is called recursively.
3059
3060         For scalar function arguments stack_usage_map is sufficient to
3061         determine which stack slots must be saved and restored.  Scalar
3062         arguments in general have pass_on_stack == 0.
3063
3064         If this argument is initialized by a function which takes the
3065         address of the argument (a C++ constructor or a C function
3066         returning a BLKmode structure), then stack_usage_map is
3067         insufficient and expand_call must push the stack around the
3068         function call.  Such arguments have pass_on_stack == 1.
3069
3070         Note that it is always safe to set stack_arg_under_construction,
3071         but this generates suboptimal code if set when not needed.  */
3072
3073      if (arg->pass_on_stack)
3074        stack_arg_under_construction++;
3075#endif
3076      arg->value = expand_expr (pval,
3077                                (partial
3078                                 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
3079                                ? NULL_RTX : arg->stack,
3080                                VOIDmode, 0);
3081
3082      /* If we are promoting object (or for any other reason) the mode
3083         doesn't agree, convert the mode.  */
3084
3085      if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
3086        arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
3087                                    arg->value, arg->unsignedp);
3088
3089#ifdef ACCUMULATE_OUTGOING_ARGS
3090      if (arg->pass_on_stack)
3091        stack_arg_under_construction--;
3092#endif
3093    }
3094
3095  /* Don't allow anything left on stack from computation
3096     of argument to alloca.  */
3097  if (may_be_alloca)
3098    do_pending_stack_adjust ();
3099
3100  if (arg->value == arg->stack)
3101    /* If the value is already in the stack slot, we are done.  */
3102    ;
3103  else if (arg->mode != BLKmode)
3104    {
3105      register int size;
3106
3107      /* Argument is a scalar, not entirely passed in registers.
3108         (If part is passed in registers, arg->partial says how much
3109         and emit_push_insn will take care of putting it there.)
3110         
3111         Push it, and if its size is less than the
3112         amount of space allocated to it,
3113         also bump stack pointer by the additional space.
3114         Note that in C the default argument promotions
3115         will prevent such mismatches.  */
3116
3117      size = GET_MODE_SIZE (arg->mode);
3118      /* Compute how much space the push instruction will push.
3119         On many machines, pushing a byte will advance the stack
3120         pointer by a halfword.  */
3121#ifdef PUSH_ROUNDING
3122      size = PUSH_ROUNDING (size);
3123#endif
3124      used = size;
3125
3126      /* Compute how much space the argument should get:
3127         round up to a multiple of the alignment for arguments.  */
3128      if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
3129        used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
3130                 / (PARM_BOUNDARY / BITS_PER_UNIT))
3131                * (PARM_BOUNDARY / BITS_PER_UNIT));
3132
3133      /* This isn't already where we want it on the stack, so put it there.
3134         This can either be done with push or copy insns.  */
3135      emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
3136                      0, partial, reg, used - size,
3137                      argblock, ARGS_SIZE_RTX (arg->offset));
3138    }
3139  else
3140    {
3141      /* BLKmode, at least partly to be pushed.  */
3142
3143      register int excess;
3144      rtx size_rtx;
3145
3146      /* Pushing a nonscalar.
3147         If part is passed in registers, PARTIAL says how much
3148         and emit_push_insn will take care of putting it there.  */
3149
3150      /* Round its size up to a multiple
3151         of the allocation unit for arguments.  */
3152
3153      if (arg->size.var != 0)
3154        {
3155          excess = 0;
3156          size_rtx = ARGS_SIZE_RTX (arg->size);
3157        }
3158      else
3159        {
3160          /* PUSH_ROUNDING has no effect on us, because
3161             emit_push_insn for BLKmode is careful to avoid it.  */
3162          excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
3163                    + partial * UNITS_PER_WORD);
3164          size_rtx = expr_size (pval);
3165        }
3166
3167      emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
3168                      TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
3169                      reg, excess, argblock, ARGS_SIZE_RTX (arg->offset));
3170    }
3171
3172
3173  /* Unless this is a partially-in-register argument, the argument is now
3174     in the stack.
3175
3176     ??? Note that this can change arg->value from arg->stack to
3177     arg->stack_slot and it matters when they are not the same.
3178     It isn't totally clear that this is correct in all cases.  */
3179  if (partial == 0)
3180    arg->value = arg->stack_slot;
3181
3182  /* Once we have pushed something, pops can't safely
3183     be deferred during the rest of the arguments.  */
3184  NO_DEFER_POP;
3185
3186  /* ANSI doesn't require a sequence point here,
3187     but PCC has one, so this will avoid some problems.  */
3188  emit_queue ();
3189
3190  /* Free any temporary slots made in processing this argument.  Show
3191     that we might have taken the address of something and pushed that
3192     as an operand.  */
3193  preserve_temp_slots (NULL_RTX);
3194  free_temp_slots ();
3195  pop_temp_slots ();
3196
3197#ifdef ACCUMULATE_OUTGOING_ARGS
3198  /* Now mark the segment we just used.  */
3199  if (argblock && ! variable_size && arg->stack)
3200    for (i = lower_bound; i < upper_bound; i++)
3201      stack_usage_map[i] = 1;
3202#endif
3203}
Note: See TracBrowser for help on using the repository browser.