source: trunk/third/gcc/profile.c @ 11288

Revision 11288, 50.1 KB checked in by ghudson, 26 years ago (diff)
This commit was generated by cvs2svn to compensate for changes in r11287, which included commits to RCS files with non-trunk default branches.
Line 
1/* Calculate branch probabilities, and basic block execution counts.
2   Copyright (C) 1990, 91-94, 96, 97, 1998 Free Software Foundation, Inc.
3   Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
4   based on some ideas from Dain Samples of UC Berkeley.
5   Further mangling by Bob Manson, Cygnus Support.
6
7This file is part of GNU CC.
8
9GNU CC is free software; you can redistribute it and/or modify
10it under the terms of the GNU General Public License as published by
11the Free Software Foundation; either version 2, or (at your option)
12any later version.
13
14GNU CC is distributed in the hope that it will be useful,
15but WITHOUT ANY WARRANTY; without even the implied warranty of
16MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
17GNU General Public License for more details.
18
19You should have received a copy of the GNU General Public License
20along with GNU CC; see the file COPYING.  If not, write to
21the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.  */
22
23/* ??? Really should not put insns inside of LIBCALL sequences, when putting
24   insns after a call, should look for the insn setting the retval, and
25   insert the insns after that one.  */
26
27/* ??? Register allocation should use basic block execution counts to
28   give preference to the most commonly executed blocks.  */
29
30/* ??? The .da files are not safe.  Changing the program after creating .da
31   files or using different options when compiling with -fbranch-probabilities
32   can result the arc data not matching the program.  Maybe add instrumented
33   arc count to .bbg file?  Maybe check whether PFG matches the .bbg file?  */
34
35/* ??? Should calculate branch probabilities before instrumenting code, since
36   then we can use arc counts to help decide which arcs to instrument.  */
37
38/* ??? Rearrange code so that the most frequently executed arcs become from
39   one block to the next block (i.e. a fall through), move seldom executed
40   code outside of loops even at the expense of adding a few branches to
41   achieve this, see Dain Sample's UC Berkeley thesis.  */
42
43#include "config.h"
44#include <stdio.h>
45#include "rtl.h"
46#include "flags.h"
47#include "insn-flags.h"
48#include "insn-config.h"
49#include "output.h"
50#include "regs.h"
51#include "tree.h"
52#include "output.h"
53#include "gcov-io.h"
54
55extern char * xmalloc ();
56extern void free ();
57
58/* One of these is dynamically created whenever we identify an arc in the
59   function.  */
60
61struct adj_list
62{
63  int source;
64  int target;
65  int arc_count;
66  unsigned int count_valid : 1;
67  unsigned int on_tree : 1;
68  unsigned int fake : 1;
69  unsigned int fall_through : 1;
70  rtx branch_insn;
71  struct adj_list *pred_next;
72  struct adj_list *succ_next;
73};
74
75#define ARC_TARGET(ARCPTR) (ARCPTR->target)
76#define ARC_SOURCE(ARCPTR) (ARCPTR->source)
77#define ARC_COUNT(ARCPTR)  (ARCPTR->arc_count)
78
79/* Count the number of basic blocks, and create an array of these structures,
80   one for each bb in the function.  */
81
82struct bb_info
83{
84  struct adj_list *succ;
85  struct adj_list *pred;
86  int succ_count;
87  int pred_count;
88  int exec_count;
89  unsigned int count_valid : 1;
90  unsigned int on_tree : 1;
91  rtx first_insn;
92};
93
94/* Indexed by label number, gives the basic block number containing that
95   label.  */
96
97static int *label_to_bb;
98
99/* Number of valid entries in the label_to_bb array.  */
100
101static int label_to_bb_size;
102
103/* Indexed by block index, holds the basic block graph.  */
104
105static struct bb_info *bb_graph;
106
107/* Name and file pointer of the output file for the basic block graph.  */
108
109static char *bbg_file_name;
110static FILE *bbg_file;
111
112/* Name and file pointer of the input file for the arc count data.  */
113
114static char *da_file_name;
115static FILE *da_file;
116
117/* Pointer of the output file for the basic block/line number map. */
118static FILE *bb_file;
119
120/* Last source file name written to bb_file. */
121
122static char *last_bb_file_name;
123
124/* Indicates whether the next line number note should be output to
125   bb_file or not.  Used to eliminate a redundant note after an
126   expanded inline function call.  */
127
128static int ignore_next_note;
129
130/* Used by final, for allocating the proper amount of storage for the
131   instrumented arc execution counts.  */
132
133int count_instrumented_arcs;
134
135/* Number of executions for the return label.  */
136
137int return_label_execution_count;
138
139/* Collect statistics on the performance of this pass for the entire source
140   file.  */
141
142static int total_num_blocks;
143static int total_num_arcs;
144static int total_num_arcs_instrumented;
145static int total_num_blocks_created;
146static int total_num_passes;
147static int total_num_times_called;
148static int total_hist_br_prob[20];
149static int total_num_never_executed;
150static int total_num_branches;
151
152/* Forward declarations.  */
153static void init_arc PROTO((struct adj_list *, int, int, rtx));
154static void find_spanning_tree PROTO((int));
155static void expand_spanning_tree PROTO((int));
156static void fill_spanning_tree PROTO((int));
157static void init_arc_profiler PROTO((void));
158static void output_arc_profiler PROTO((int, rtx));
159
160#ifndef LONG_TYPE_SIZE
161#define LONG_TYPE_SIZE BITS_PER_WORD
162#endif
163
164/* If non-zero, we need to output a constructor to set up the
165   per-object-file data. */
166static int need_func_profiler = 0;
167
168
169/* Add arc instrumentation code to the entire insn chain.
170
171   F is the first insn of the chain.
172   NUM_BLOCKS is the number of basic blocks found in F.
173   DUMP_FILE, if nonzero, is an rtl dump file we can write to.  */
174
175static void
176instrument_arcs (f, num_blocks, dump_file)
177     rtx f;
178     int num_blocks;
179     FILE *dump_file;
180{
181  register int i;
182  register struct adj_list *arcptr, *backptr;
183  int num_arcs = 0;
184  int num_instr_arcs = 0;
185  rtx insn;
186
187  int neg_one = -1;
188  int zero = 0;
189  int inverted;
190  rtx note;
191
192  /* Instrument the program start.  */
193  /* Handle block 0 specially, since it will always be instrumented,
194     but it doesn't have a valid first_insn or branch_insn.  We must
195     put the instructions before the NOTE_INSN_FUNCTION_BEG note, so
196     that they don't clobber any of the parameters of the current
197     function.  */
198  for (insn = f; insn; insn = NEXT_INSN (insn))
199    if (GET_CODE (insn) == NOTE
200        && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
201      break;
202  insn = PREV_INSN (insn);
203  need_func_profiler = 1;
204  output_arc_profiler (total_num_arcs_instrumented + num_instr_arcs++, insn);
205
206  for (i = 1; i < num_blocks; i++)
207    for (arcptr = bb_graph[i].succ; arcptr; arcptr = arcptr->succ_next)
208      if (! arcptr->on_tree)
209        {
210          if (dump_file)
211            fprintf (dump_file, "Arc %d to %d instrumented\n", i,
212                     ARC_TARGET (arcptr));
213
214          /* Check to see if this arc is the only exit from its source block,
215             or the only entrance to its target block.  In either case,
216             we don't need to create a new block to instrument the arc.  */
217         
218          if (bb_graph[i].succ == arcptr && arcptr->succ_next == 0)
219            {
220              /* Instrument the source block.  */
221              output_arc_profiler (total_num_arcs_instrumented
222                                   + num_instr_arcs++,
223                                   PREV_INSN (bb_graph[i].first_insn));
224            }
225          else if (arcptr == bb_graph[ARC_TARGET (arcptr)].pred
226                   && arcptr->pred_next == 0)
227            {
228              /* Instrument the target block.  */
229              output_arc_profiler (total_num_arcs_instrumented
230                                   + num_instr_arcs++,
231                                   PREV_INSN (bb_graph[ARC_TARGET (arcptr)].first_insn));
232            }
233          else if (arcptr->fall_through)
234            {
235              /* This is a fall-through; put the instrumentation code after
236                 the branch that ends this block.  */
237             
238              for (backptr = bb_graph[i].succ; backptr;
239                   backptr = backptr->succ_next)
240                if (backptr != arcptr)
241                  break;
242             
243              output_arc_profiler (total_num_arcs_instrumented
244                                   + num_instr_arcs++,
245                                   backptr->branch_insn);
246            }
247          else
248            {
249              /* Must emit a new basic block to hold the arc counting code.  */
250              enum rtx_code code = GET_CODE (PATTERN (arcptr->branch_insn));
251
252              if (code == SET)
253                {
254                  /* Create the new basic block right after the branch.
255                     Invert the branch so that it jumps past the end of the new
256                     block.  The new block will consist of the instrumentation
257                     code, and a jump to the target of this arc.  */
258                  int this_is_simplejump = simplejump_p (arcptr->branch_insn);
259                  rtx new_label = gen_label_rtx ();
260                  rtx old_label, set_src;
261                  rtx after = arcptr->branch_insn;
262                 
263                  /* Simplejumps can't reach here.  */
264                  if (this_is_simplejump)
265                    abort ();
266
267                  /* We can't use JUMP_LABEL, because it won't be set if we
268                     are compiling without optimization.  */
269
270                  set_src = SET_SRC (single_set (arcptr->branch_insn));
271                  if (GET_CODE (set_src) == LABEL_REF)
272                    old_label = set_src;
273                  else if (GET_CODE (set_src) != IF_THEN_ELSE)
274                    abort ();
275                  else if (XEXP (set_src, 1) == pc_rtx)
276                    old_label = XEXP (XEXP (set_src, 2), 0);
277                  else
278                    old_label = XEXP (XEXP (set_src, 1), 0);
279
280                  /* Set the JUMP_LABEL so that redirect_jump will work.  */
281                  JUMP_LABEL (arcptr->branch_insn) = old_label;
282
283                  /* Add a use for OLD_LABEL that will be needed when we emit
284                     the JUMP_INSN below.  If we don't do this here,
285                     `invert_jump' might delete it for us.  We must add two
286                     when not optimizing, because the NUSES is zero now,
287                     but must be at least two to prevent the label from being
288                     deleted.  */
289                  LABEL_NUSES (old_label) += 2;
290                 
291                  /* Emit the insns for the new block in reverse order,
292                     since that is most convenient.  */
293
294                  if (this_is_simplejump)
295                    {
296                      after = NEXT_INSN (arcptr->branch_insn);
297                      if (! redirect_jump (arcptr->branch_insn, new_label))
298                        /* Don't know what to do if this branch won't
299                           redirect.  */
300                        abort ();
301                    }
302                  else
303                    {
304                      if (! invert_jump (arcptr->branch_insn, new_label))
305                        /* Don't know what to do if this branch won't invert.  */
306                        abort ();
307
308                      emit_label_after (new_label, after);
309                      LABEL_NUSES (new_label)++;
310                    }
311                  emit_barrier_after (after);
312                  emit_jump_insn_after (gen_jump (old_label), after);
313                  JUMP_LABEL (NEXT_INSN (after)) = old_label;
314                 
315                  /* Instrument the source arc.  */
316                  output_arc_profiler (total_num_arcs_instrumented
317                                       + num_instr_arcs++,
318                                       after);
319                  if (this_is_simplejump)
320                    {
321                      emit_label_after (new_label, after);
322                      LABEL_NUSES (new_label)++;
323                    }
324                }
325              else if (code == ADDR_VEC || code == ADDR_DIFF_VEC)
326                {
327                  /* A table jump.  Create a new basic block immediately
328                     after the table, by emitting a barrier, a label, a
329                     counting note, and a jump to the old label.  Put the
330                     new label in the table.  */
331                 
332                  rtx new_label = gen_label_rtx ();
333                  rtx old_lref, new_lref;
334                  int index;
335                 
336                  /* Must determine the old_label reference, do this
337                     by counting the arcs after this one, which will
338                     give the index of our label in the table.  */
339                 
340                  index = 0;
341                  for (backptr = arcptr->succ_next; backptr;
342                       backptr = backptr->succ_next)
343                    index++;
344                 
345                  old_lref = XVECEXP (PATTERN (arcptr->branch_insn),
346                                      (code == ADDR_DIFF_VEC), index);
347                 
348                  /* Emit the insns for the new block in reverse order,
349                     since that is most convenient.  */
350                  emit_jump_insn_after (gen_jump (XEXP (old_lref, 0)),
351                                        arcptr->branch_insn);
352                  JUMP_LABEL (NEXT_INSN (arcptr->branch_insn))
353                    = XEXP (old_lref, 0);
354
355                  /* Instrument the source arc.  */
356                  output_arc_profiler (total_num_arcs_instrumented
357                                       + num_instr_arcs++,
358                                       arcptr->branch_insn);
359
360                  emit_label_after (new_label, arcptr->branch_insn);
361                  LABEL_NUSES (NEXT_INSN (arcptr->branch_insn))++;
362                  emit_barrier_after (arcptr->branch_insn);
363                 
364                  /* Fix up the table jump.  */
365                  new_lref = gen_rtx (LABEL_REF, Pmode, new_label);
366                  XVECEXP (PATTERN (arcptr->branch_insn),
367                           (code == ADDR_DIFF_VEC), index) = new_lref;
368                }
369              else
370                abort ();
371
372              num_arcs += 1;
373              if (dump_file)
374                fprintf (dump_file,
375                         "Arc %d to %d needed new basic block\n", i,
376                         ARC_TARGET (arcptr));
377            }
378        }
379 
380  total_num_arcs_instrumented += num_instr_arcs;
381  count_instrumented_arcs = total_num_arcs_instrumented;
382
383  total_num_blocks_created += num_arcs;
384  if (dump_file)
385    {
386      fprintf (dump_file, "%d arcs instrumented\n", num_instr_arcs);
387      fprintf (dump_file, "%d extra basic blocks created\n", num_arcs);
388    }
389}
390
391/* Output STRING to bb_file, surrounded by DELIMITER.  */
392
393static void
394output_gcov_string (string, delimiter)
395     char *string;
396     long delimiter;
397{
398  long temp;
399                       
400  /* Write a delimiter to indicate that a file name follows.  */
401  __write_long (delimiter, bb_file, 4);
402
403  /* Write the string.  */
404  temp = strlen (string) + 1;
405  fwrite (string, temp, 1, bb_file);
406
407  /* Append a few zeros, to align the output to a 4 byte boundary.  */
408  temp = temp & 0x3;
409  if (temp)
410    {
411      char c[4];
412
413      c[0] = c[1] = c[2] = c[3] = 0;
414      fwrite (c, sizeof (char), 4 - temp, bb_file);
415    }
416
417  /* Store another delimiter in the .bb file, just to make it easy to find the
418     end of the file name.  */
419  __write_long (delimiter, bb_file, 4);
420}
421
422/* Instrument and/or analyze program behavior based on program flow graph.
423   In either case, this function builds a flow graph for the function being
424   compiled.  The flow graph is stored in BB_GRAPH.
425
426   When FLAG_PROFILE_ARCS is nonzero, this function instruments the arcs in
427   the flow graph that are needed to reconstruct the dynamic behavior of the
428   flow graph.
429
430   When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
431   information from a data file containing arc count information from previous
432   executions of the function being compiled.  In this case, the flow graph is
433   annotated with actual execution counts, which are later propagated into the
434   rtl for optimization purposes.
435
436   Main entry point of this file.  */
437
438void
439branch_prob (f, dump_file)
440     rtx f;
441     FILE *dump_file;
442{
443  int i, num_blocks;
444  int dest;
445  rtx insn;
446  struct adj_list *arcptr;
447  int num_arcs, changes, passes;
448  int total, prob;
449  int hist_br_prob[20], num_never_executed, num_branches;
450  /* Set to non-zero if we got bad count information.  */
451  int bad_counts = 0;
452
453  /* start of a function.  */
454  if (flag_test_coverage)
455    output_gcov_string (current_function_name, (long) -2);
456
457  /* Execute this only if doing arc profiling or branch probabilities.  */
458  if (! profile_arc_flag && ! flag_branch_probabilities
459      && ! flag_test_coverage)
460    abort ();
461
462  total_num_times_called++;
463
464  /* Create an array label_to_bb of ints of size max_label_num.  */
465  label_to_bb_size = max_label_num ();
466  label_to_bb = (int *) oballoc (label_to_bb_size * sizeof (int));
467  bzero ((char *) label_to_bb, label_to_bb_size * sizeof (int));
468
469  /* Scan the insns in the function, count the number of basic blocks
470     present.  When a code label is passed, set label_to_bb[label] = bb
471     number.  */
472
473  /* The first block found will be block 1, so that function entry can be
474     block 0.  */
475
476  {
477    register RTX_CODE prev_code = JUMP_INSN;
478    register RTX_CODE code;
479    register rtx insn;
480    register int i;
481    int block_separator_emitted = 0;
482
483    ignore_next_note = 0;
484
485    for (insn = NEXT_INSN (f), i = 0; insn; insn = NEXT_INSN (insn))
486      {
487        code = GET_CODE (insn);
488
489        if (code == BARRIER)
490          ;
491        else if (code == CODE_LABEL)
492          /* This label is part of the next block, but we can't increment
493             block number yet since there might be multiple labels.  */
494          label_to_bb[CODE_LABEL_NUMBER (insn)] = i + 1;
495        /* We make NOTE_INSN_SETJMP notes into a block of their own, so that
496           they can be the target of the fake arc for the setjmp call.
497           This avoids creating cycles of fake arcs, which would happen if
498           the block after the setjmp call contained a call insn.  */
499        else if ((prev_code == JUMP_INSN || prev_code == CALL_INSN
500                  || prev_code == CODE_LABEL || prev_code == BARRIER)
501                 && (GET_RTX_CLASS (code) == 'i'
502                     || (code == NOTE
503                         && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)))
504          {
505            i += 1;
506
507            /* Emit the block separator if it hasn't already been emitted.  */
508            if (flag_test_coverage && ! block_separator_emitted)
509              {
510                /* Output a zero to the .bb file to indicate that a new
511                   block list is starting.  */
512                __write_long (0, bb_file, 4);
513              }
514            block_separator_emitted = 0;
515          }
516        /* If flag_test_coverage is true, then we must add an entry to the
517           .bb file for every note.  */
518        else if (code == NOTE && flag_test_coverage)
519          {
520            /* Must ignore the line number notes that immediately follow the
521               end of an inline function to avoid counting it twice.  There
522               is a note before the call, and one after the call.  */
523            if (NOTE_LINE_NUMBER (insn) == NOTE_REPEATED_LINE_NUMBER)
524              ignore_next_note = 1;
525            else if (NOTE_LINE_NUMBER (insn) > 0)
526              {
527                if (ignore_next_note)
528                  ignore_next_note = 0;
529                else
530                  {
531                    /* Emit a block separator here to ensure that a NOTE
532                       immediately following a JUMP_INSN or CALL_INSN will end
533                       up in the right basic block list.  */
534                    if ((prev_code == JUMP_INSN || prev_code == CALL_INSN
535                         || prev_code == CODE_LABEL || prev_code == BARRIER)
536                        && ! block_separator_emitted)
537                      {
538                        /* Output a zero to the .bb file to indicate that
539                           a new block list is starting.  */
540                        __write_long (0, bb_file, 4);
541
542                        block_separator_emitted = 1;
543                      }
544                   
545                    /* If this is a new source file, then output the file's
546                       name to the .bb file.  */
547                    if (! last_bb_file_name
548                        || strcmp (NOTE_SOURCE_FILE (insn),
549                                   last_bb_file_name))
550                      {
551                        if (last_bb_file_name)
552                          free (last_bb_file_name);
553                        last_bb_file_name
554                          = xmalloc (strlen (NOTE_SOURCE_FILE (insn)) + 1);
555                        strcpy (last_bb_file_name, NOTE_SOURCE_FILE (insn));
556                        output_gcov_string (NOTE_SOURCE_FILE (insn), (long)-1);
557                      }
558
559                    /* Output the line number to the .bb file.  Must be done
560                       after the output_bb_profile_data() call, and after the
561                       file name is written, to ensure that it is correctly
562                       handled by gcov.  */
563                    __write_long (NOTE_LINE_NUMBER (insn), bb_file, 4);
564                  }
565              }
566          }
567
568        if (code != NOTE)
569          prev_code = code;
570        else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
571          prev_code = CALL_INSN;
572      }
573
574    /* Allocate last `normal' entry for bb_graph.  */
575
576    /* The last insn was a jump, call, or label.  In that case we have
577       a block at the end of the function with no insns.  */
578    if (prev_code == JUMP_INSN || prev_code == CALL_INSN
579        || prev_code == CODE_LABEL || prev_code == BARRIER)
580      {
581        i++;
582
583        /* Emit the block separator if it hasn't already been emitted.  */
584        if (flag_test_coverage && ! block_separator_emitted)
585          {
586            /* Output a zero to the .bb file to indicate that a new
587               block list is starting.  */
588            __write_long (0, bb_file, 4);
589          }
590      }
591
592    /* Create another block to stand for EXIT, and make all return insns, and
593       the last basic block point here.  Add one more to account for block
594       zero.  */
595    num_blocks = i + 2;
596  }
597
598  total_num_blocks += num_blocks;
599  if (dump_file)
600    fprintf (dump_file, "%d basic blocks\n", num_blocks);
601
602  /* If we are only doing test coverage here, then return now.  */
603  if (! profile_arc_flag && ! flag_branch_probabilities)
604    return;
605
606  /* Create and initialize the arrays that will hold bb_graph
607     and execution count info.  */
608
609  bb_graph = (struct bb_info *) alloca (num_blocks * sizeof (struct bb_info));
610  bzero ((char *) bb_graph, (sizeof (struct bb_info) * num_blocks));
611
612  {
613    /* Scan the insns again:
614       - at the entry to each basic block, increment the predecessor count
615       (and successor of previous block) if it is a fall through entry,
616       create adj_list entries for this and the previous block
617       - at each jump insn, increment predecessor/successor counts for
618       target/source basic blocks, add this insn to pred/succ lists.
619
620       This also cannot be broken out as a separate subroutine
621       because it uses `alloca'.  */
622
623    register RTX_CODE prev_code = JUMP_INSN;
624    register RTX_CODE code;
625    register rtx insn;
626    register int i;
627    int fall_through = 0;
628    struct adj_list *arcptr;
629    int dest;
630
631    /* Block 0 always falls through to block 1.  */
632    num_arcs = 0;
633    arcptr = (struct adj_list *) alloca (sizeof (struct adj_list));
634    init_arc (arcptr, 0, 1, 0);
635    arcptr->fall_through = 1;
636    num_arcs++;
637
638    /* Add a fake fall through arc from the last block to block 0, to make the
639       graph complete.  */
640    arcptr = (struct adj_list *) alloca (sizeof (struct adj_list));
641    init_arc (arcptr, num_blocks - 1, 0, 0);
642    arcptr->fake = 1;
643    num_arcs++;
644
645    /* Exit must be one node of the graph, and all exits from the function
646       must point there.  When see a return branch, must point the arc to the
647       exit node.  */
648
649    /* Must start scan with second insn in function as above.  */
650    for (insn = NEXT_INSN (f), i = 0; insn; insn = NEXT_INSN (insn))
651      {
652        code = GET_CODE (insn);
653
654        if (code == BARRIER)
655          fall_through = 0;
656        else if (code == CODE_LABEL)
657          ;
658        /* We make NOTE_INSN_SETJMP notes into a block of their own, so that
659           they can be the target of the fake arc for the setjmp call.
660           This avoids creating cycles of fake arcs, which would happen if
661           the block after the setjmp call ended with a call.  */
662        else if ((prev_code == JUMP_INSN || prev_code == CALL_INSN
663                  || prev_code == CODE_LABEL || prev_code == BARRIER)
664                 && (GET_RTX_CLASS (code) == 'i'
665                     || (code == NOTE
666                         && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)))
667          {
668            /* This is the first insn of the block.  */
669            i += 1;
670            if (fall_through)
671              {
672                arcptr = (struct adj_list *) alloca (sizeof (struct adj_list));
673                init_arc (arcptr, i - 1, i, 0);
674                arcptr->fall_through = 1;
675
676                num_arcs++;
677              }
678            fall_through = 1;
679            bb_graph[i].first_insn = insn;
680          }
681        else if (code == NOTE)
682          ;
683
684        if (code == CALL_INSN)
685          {
686            /* In the normal case, the call returns, and this is just like
687               a branch fall through.  */
688            fall_through = 1;
689
690            /* Setjmp may return more times than called, so to make the graph
691               solvable, add a fake arc from the function entrance to the
692               next block.
693
694               All other functions may return fewer times than called (if
695               a descendent call longjmp or exit), so to make the graph
696               solvable, add a fake arc to the function exit from the
697               current block.
698
699               Distinguish the cases by checking for a SETJUMP note.
700               A call_insn can be the last ins of a function, so must check
701               to see if next insn actually exists.  */
702            arcptr = (struct adj_list *) alloca (sizeof (struct adj_list));
703            if (NEXT_INSN (insn)
704                && GET_CODE (NEXT_INSN (insn)) == NOTE
705                && NOTE_LINE_NUMBER (NEXT_INSN (insn)) == NOTE_INSN_SETJMP)
706              init_arc (arcptr, 0, i+1, insn);
707            else
708              init_arc (arcptr, i, num_blocks-1, insn);
709            arcptr->fake = 1;
710            num_arcs++;
711          }
712        else if (code == JUMP_INSN)
713          {
714            rtx tem, pattern = PATTERN (insn);
715            rtx tablejump = 0;
716
717            /* If running without optimization, then jump label won't be valid,
718               so we must search for the destination label in that case.
719               We have to handle tablejumps and returns specially anyways, so
720               we don't check the JUMP_LABEL at all here.  */
721
722            if (GET_CODE (pattern) == PARALLEL)
723              {
724                /* This assumes that PARALLEL jumps are tablejump entry
725                   jumps.  */
726                /* Make an arc from this jump to the label of the
727                   jump table.  This will instrument the number of
728                   times the switch statement is executed.  */
729                if (GET_CODE (XVECEXP (pattern, 0, 1)) == USE)
730                  {
731                    tem = XEXP (XVECEXP (pattern, 0, 1), 0);
732                    if (GET_CODE (tem) != LABEL_REF)
733                      abort ();
734                    dest = label_to_bb[CODE_LABEL_NUMBER (XEXP (tem, 0))];
735                  }
736                else if (GET_CODE (XVECEXP (pattern, 0, 0)) == SET
737                         && SET_DEST (XVECEXP (pattern, 0, 0)) == pc_rtx)
738                  {
739                    tem = SET_SRC (XVECEXP (pattern, 0, 0));
740                    if (GET_CODE (tem) == PLUS
741                        && GET_CODE (XEXP (tem, 1)) == LABEL_REF)
742                      {
743                        tem = XEXP (tem, 1);
744                        dest = label_to_bb [CODE_LABEL_NUMBER (XEXP (tem, 0))];
745                      }
746                  }
747                else
748                  abort ();
749              }
750            else if (GET_CODE (pattern) == ADDR_VEC
751                     || GET_CODE (pattern) == ADDR_DIFF_VEC)
752              tablejump = pattern;
753            else if (GET_CODE (pattern) == RETURN)
754              dest = num_blocks - 1;
755            else if ((tem = SET_SRC (pattern))
756                     && GET_CODE (tem) == LABEL_REF)
757              dest = label_to_bb[CODE_LABEL_NUMBER (XEXP (tem, 0))];
758            else
759              {
760                rtx label_ref;
761
762                /* Must be an IF_THEN_ELSE branch.  */
763                if (GET_CODE (tem) != IF_THEN_ELSE)
764                  abort ();
765                if (XEXP (tem, 1) != pc_rtx)
766                  label_ref = XEXP (tem, 1);
767                else
768                  label_ref = XEXP (tem, 2);
769                dest = label_to_bb[CODE_LABEL_NUMBER (XEXP (label_ref, 0))];
770              }
771
772            if (tablejump)
773              {
774                int diff_vec_p = GET_CODE (tablejump) == ADDR_DIFF_VEC;
775                int len = XVECLEN (tablejump, diff_vec_p);
776                int k;
777
778                for (k = 0; k < len; k++)
779                  {
780                    rtx tem = XEXP (XVECEXP (tablejump, diff_vec_p, k), 0);
781                    dest = label_to_bb[CODE_LABEL_NUMBER (tem)];
782
783                    arcptr = (struct adj_list *) alloca (sizeof(struct adj_list));
784                    init_arc (arcptr, i, dest, insn);
785
786                    num_arcs++;
787                  }
788              }
789            else
790              {
791                arcptr = (struct adj_list *) alloca (sizeof (struct adj_list));
792                init_arc (arcptr, i, dest, insn);
793
794                num_arcs++;
795              }
796
797            /* Determine whether or not this jump will fall through.
798               Unconditional jumps and returns are not always followed by
799               barriers.  */
800            pattern = PATTERN (insn);
801            if (GET_CODE (pattern) == PARALLEL
802                || GET_CODE (pattern) == RETURN)
803              fall_through = 0;
804            else if (GET_CODE (pattern) == ADDR_VEC
805                     || GET_CODE (pattern) == ADDR_DIFF_VEC)
806              /* These aren't actually jump insns, but they never fall
807                 through, so...  */
808              fall_through = 0;
809            else
810              {
811                if (GET_CODE (pattern) != SET || SET_DEST (pattern) != pc_rtx)
812                  abort ();
813                if (GET_CODE (SET_SRC (pattern)) != IF_THEN_ELSE)
814                  fall_through = 0;
815              }
816          }
817
818        if (code != NOTE)
819          prev_code = code;
820        else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
821          prev_code = CALL_INSN;
822      }
823
824    /* If the code at the end of the function would give a new block, then
825       do the following.  */
826
827    if (prev_code == JUMP_INSN || prev_code == CALL_INSN
828        || prev_code == CODE_LABEL || prev_code == BARRIER)
829      {
830        if (fall_through)
831          {
832            arcptr = (struct adj_list *) alloca (sizeof (struct adj_list));
833            init_arc (arcptr, i, i + 1, 0);
834            arcptr->fall_through = 1;
835
836            num_arcs++;
837          }
838         
839        /* This may not be a real insn, but that should not cause a problem.  */
840        bb_graph[i+1].first_insn = get_last_insn ();
841      }
842
843    /* There is always a fake arc from the last block of the function
844       to the function exit block.  */
845    arcptr = (struct adj_list *) alloca (sizeof (struct adj_list));
846    init_arc (arcptr, num_blocks-2, num_blocks-1, 0);
847    arcptr->fake = 1;
848    num_arcs++;
849  }
850
851  total_num_arcs += num_arcs;
852  if (dump_file)
853    fprintf (dump_file, "%d arcs\n", num_arcs);
854
855  /* Create spanning tree from basic block graph, mark each arc that is
856     on the spanning tree.  */
857
858  /* To reduce the instrumentation cost, make two passes over the tree.
859     First, put as many must-split (crowded and fake) arcs on the tree as
860     possible, then on the second pass fill in the rest of the tree.
861     Note that the spanning tree is considered undirected, so that as many
862     must-split arcs as possible can be put on it.
863
864     Fallthrough arcs which are crowded should not be chosen on the first
865     pass, since they do not require creating a new basic block.  These
866     arcs will have fall_through set.  */
867
868  find_spanning_tree (num_blocks);
869
870  /* Create a .bbg file from which gcov can reconstruct the basic block
871     graph.  First output the number of basic blocks, and then for every
872     arc output the source and target basic block numbers.
873     NOTE: The format of this file must be compatible with gcov.  */
874
875  if (flag_test_coverage)
876    {
877      int flag_bits;
878
879      __write_long (num_blocks, bbg_file, 4);
880      __write_long (num_arcs, bbg_file, 4);
881
882      for (i = 0; i < num_blocks; i++)
883        {
884          long count = 0;
885          for (arcptr = bb_graph[i].succ; arcptr; arcptr = arcptr->succ_next)
886            count++;
887          __write_long (count, bbg_file, 4);
888
889          for (arcptr = bb_graph[i].succ; arcptr; arcptr = arcptr->succ_next)
890            {
891              flag_bits = 0;
892              if (arcptr->on_tree)
893                flag_bits |= 0x1;
894              if (arcptr->fake)
895                flag_bits |= 0x2;
896              if (arcptr->fall_through)
897                flag_bits |= 0x4;
898
899              __write_long (ARC_TARGET (arcptr), bbg_file, 4);
900              __write_long (flag_bits, bbg_file, 4);
901            }
902        }
903
904      /* Emit a -1 to separate the list of all arcs from the list of
905         loop back edges that follows.  */
906      __write_long (-1, bbg_file, 4);
907    }
908
909  /* For each arc not on the spanning tree, add counting code as rtl.  */
910
911  if (profile_arc_flag)
912    instrument_arcs (f, num_blocks, dump_file);
913
914  /* Execute the rest only if doing branch probabilities.  */
915  if (! flag_branch_probabilities)
916    return;
917
918  /* For each arc not on the spanning tree, set its execution count from
919     the .da file.  */
920
921  /* The first count in the .da file is the number of times that the function
922     was entered.  This is the exec_count for block zero.  */
923
924  num_arcs = 0;
925  for (i = 0; i < num_blocks; i++)
926    for (arcptr = bb_graph[i].succ; arcptr; arcptr = arcptr->succ_next)
927      if (! arcptr->on_tree)
928        {
929          num_arcs++;
930          if (da_file)
931            {
932              long value;
933              __read_long (&value, da_file, 8);
934              ARC_COUNT (arcptr) = value;
935            }
936          else
937            ARC_COUNT (arcptr) = 0;
938          arcptr->count_valid = 1;
939          bb_graph[i].succ_count--;
940          bb_graph[ARC_TARGET (arcptr)].pred_count--;
941        }
942
943  if (dump_file)
944    fprintf (dump_file, "%d arc counts read\n", num_arcs);
945
946  /* For every block in the file,
947     - if every exit/entrance arc has a known count, then set the block count
948     - if the block count is known, and every exit/entrance arc but one has
949       a known execution count, then set the count of the remaining arc
950
951     As arc counts are set, decrement the succ/pred count, but don't delete
952     the arc, that way we can easily tell when all arcs are known, or only
953     one arc is unknown.  */
954
955  /* The order that the basic blocks are iterated through is important.
956     Since the code that finds spanning trees starts with block 0, low numbered
957     arcs are put on the spanning tree in preference to high numbered arcs.
958     Hence, most instrumented arcs are at the end.  Graph solving works much
959     faster if we propagate numbers from the end to the start.
960     
961     This takes an average of slightly more than 3 passes.  */
962
963  changes = 1;
964  passes = 0;
965  while (changes)
966    {
967      passes++;
968      changes = 0;
969
970      for (i = num_blocks - 1; i >= 0; i--)
971        {
972          struct bb_info *binfo = &bb_graph[i];
973          if (! binfo->count_valid)
974            {
975              if (binfo->succ_count == 0)
976                {
977                  total = 0;
978                  for (arcptr = binfo->succ; arcptr;
979                       arcptr = arcptr->succ_next)
980                    total += ARC_COUNT (arcptr);
981                  binfo->exec_count = total;
982                  binfo->count_valid = 1;
983                  changes = 1;
984                }
985              else if (binfo->pred_count == 0)
986                {
987                  total = 0;
988                  for (arcptr = binfo->pred; arcptr;
989                       arcptr = arcptr->pred_next)
990                    total += ARC_COUNT (arcptr);
991                  binfo->exec_count = total;
992                  binfo->count_valid = 1;
993                  changes = 1;
994                }
995            }
996          if (binfo->count_valid)
997            {
998              if (binfo->succ_count == 1)
999                {
1000                  total = 0;
1001                  /* One of the counts will be invalid, but it is zero,
1002                     so adding it in also doesn't hurt.  */
1003                  for (arcptr = binfo->succ; arcptr;
1004                       arcptr = arcptr->succ_next)
1005                    total += ARC_COUNT (arcptr);
1006                  /* Calculate count for remaining arc by conservation.  */
1007                  total = binfo->exec_count - total;
1008                  /* Search for the invalid arc, and set its count.  */
1009                  for (arcptr = binfo->succ; arcptr;
1010                       arcptr = arcptr->succ_next)
1011                    if (! arcptr->count_valid)
1012                      break;
1013                  if (! arcptr)
1014                    abort ();
1015                  arcptr->count_valid = 1;
1016                  ARC_COUNT (arcptr) = total;
1017                  binfo->succ_count--;
1018                 
1019                  bb_graph[ARC_TARGET (arcptr)].pred_count--;
1020                  changes = 1;
1021                }
1022              if (binfo->pred_count == 1)
1023                {
1024                  total = 0;
1025                  /* One of the counts will be invalid, but it is zero,
1026                     so adding it in also doesn't hurt.  */
1027                  for (arcptr = binfo->pred; arcptr;
1028                       arcptr = arcptr->pred_next)
1029                    total += ARC_COUNT (arcptr);
1030                  /* Calculate count for remaining arc by conservation.  */
1031                  total = binfo->exec_count - total;
1032                  /* Search for the invalid arc, and set its count.  */
1033                  for (arcptr = binfo->pred; arcptr;
1034                       arcptr = arcptr->pred_next)
1035                    if (! arcptr->count_valid)
1036                      break;
1037                  if (! arcptr)
1038                    abort ();
1039                  arcptr->count_valid = 1;
1040                  ARC_COUNT (arcptr) = total;
1041                  binfo->pred_count--;
1042                 
1043                  bb_graph[ARC_SOURCE (arcptr)].succ_count--;
1044                  changes = 1;
1045                }
1046            }
1047        }
1048    }
1049
1050  total_num_passes += passes;
1051  if (dump_file)
1052    fprintf (dump_file, "Graph solving took %d passes.\n\n", passes);
1053
1054  /* If the graph has been correctly solved, every block will have a
1055     succ and pred count of zero.  */
1056  for (i = 0; i < num_blocks; i++)
1057    {
1058      struct bb_info *binfo = &bb_graph[i];
1059      if (binfo->succ_count || binfo->pred_count)
1060        abort ();
1061    }
1062
1063  /* For every arc, calculate its branch probability and add a reg_note
1064     to the branch insn to indicate this.  */
1065
1066  for (i = 0; i < 20; i++)
1067    hist_br_prob[i] = 0;
1068  num_never_executed = 0;
1069  num_branches = 0;
1070
1071  for (i = 0; i < num_blocks; i++)
1072    {
1073      struct bb_info *binfo = &bb_graph[i];
1074
1075      total = binfo->exec_count;
1076      for (arcptr = binfo->succ; arcptr; arcptr = arcptr->succ_next)
1077        {
1078          if (arcptr->branch_insn)
1079            {
1080              /* This calculates the branch probability as an integer between
1081                 0 and REG_BR_PROB_BASE, properly rounded to the nearest
1082                 integer.  Perform the arithmetic in double to avoid
1083                 overflowing the range of ints.  */
1084
1085              if (total == 0)
1086                prob = -1;
1087              else
1088                {
1089                  rtx pat = PATTERN (arcptr->branch_insn);
1090                 
1091                  prob = (((double)ARC_COUNT (arcptr) * REG_BR_PROB_BASE)
1092                          + (total >> 1)) / total;
1093                  if (prob < 0 || prob > REG_BR_PROB_BASE)
1094                    {
1095                      if (dump_file)
1096                        fprintf (dump_file, "bad count: prob for %d-%d thought to be %d (forcibly normalized)\n",
1097                                 ARC_SOURCE (arcptr), ARC_TARGET (arcptr),
1098                                 prob);
1099
1100                      bad_counts = 1;
1101                      prob = REG_BR_PROB_BASE / 2;
1102                    }
1103                 
1104                  /* Match up probability with JUMP pattern.  */
1105
1106                  if (GET_CODE (pat) == SET
1107                      && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
1108                    {
1109                      if (ARC_TARGET (arcptr) == ARC_SOURCE (arcptr) + 1)
1110                        {
1111                          /* A fall through arc should never have a
1112                             branch insn.  */
1113                          abort ();
1114                        }
1115                      else
1116                        {
1117                          /* This is the arc for the taken branch.  */
1118                          if (GET_CODE (XEXP (SET_SRC (pat), 2)) != PC)
1119                            prob = REG_BR_PROB_BASE - prob;
1120                        }
1121                    }
1122                }
1123             
1124              if (prob == -1)
1125                num_never_executed++;
1126              else
1127                {
1128                  int index = prob * 20 / REG_BR_PROB_BASE;
1129                  if (index == 20)
1130                    index = 19;
1131                  hist_br_prob[index]++;
1132                }
1133              num_branches++;
1134             
1135              REG_NOTES (arcptr->branch_insn)
1136                = gen_rtx (EXPR_LIST, REG_BR_PROB, GEN_INT (prob),
1137                           REG_NOTES (arcptr->branch_insn));
1138            }
1139        }
1140
1141      /* Add a REG_EXEC_COUNT note to the first instruction of this block.  */
1142      if (! binfo->first_insn
1143          || GET_RTX_CLASS (GET_CODE (binfo->first_insn)) != 'i')
1144        {
1145          /* Block 0 is a fake block representing function entry, and does
1146             not have a real first insn.  The second last block might not
1147             begin with a real insn.  */
1148          if (i == num_blocks - 1)
1149            return_label_execution_count = total;
1150          else if (i != 0 && i != num_blocks - 2)
1151            abort ();
1152        }
1153      else
1154        {
1155          REG_NOTES (binfo->first_insn)
1156            = gen_rtx (EXPR_LIST, REG_EXEC_COUNT, GEN_INT (total),
1157                       REG_NOTES (binfo->first_insn));
1158          if (i == num_blocks - 1)
1159            return_label_execution_count = total;
1160        }
1161    }
1162 
1163  /* This should never happen.  */
1164  if (bad_counts)
1165    warning ("Arc profiling: some arc counts were bad.");
1166
1167  if (dump_file)
1168    {
1169      fprintf (dump_file, "%d branches\n", num_branches);
1170      fprintf (dump_file, "%d branches never executed\n",
1171               num_never_executed);
1172      if (num_branches)
1173        for (i = 0; i < 10; i++)
1174          fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
1175                   (hist_br_prob[i]+hist_br_prob[19-i])*100/num_branches,
1176                   5*i, 5*i+5);
1177
1178      total_num_branches += num_branches;
1179      total_num_never_executed += num_never_executed;
1180      for (i = 0; i < 20; i++)
1181        total_hist_br_prob[i] += hist_br_prob[i];
1182    }
1183
1184}
1185
1186/* Initialize a new arc.
1187   ARCPTR is the empty adj_list this function fills in.
1188   SOURCE is the block number of the source block.
1189   TARGET is the block number of the target block.
1190   INSN is the insn which transfers control from SOURCE to TARGET,
1191   or zero if the transfer is implicit.  */
1192
1193static void
1194init_arc (arcptr, source, target, insn)
1195     struct adj_list *arcptr;
1196     int source, target;
1197     rtx insn;
1198{
1199  ARC_TARGET (arcptr) = target;
1200  ARC_SOURCE (arcptr) = source;
1201
1202  ARC_COUNT (arcptr) = 0;
1203  arcptr->count_valid = 0;
1204  arcptr->on_tree = 0;
1205  arcptr->fake = 0;
1206  arcptr->fall_through = 0;
1207  arcptr->branch_insn = insn;
1208
1209  arcptr->succ_next = bb_graph[source].succ;
1210  bb_graph[source].succ = arcptr;
1211  bb_graph[source].succ_count++;
1212
1213  arcptr->pred_next = bb_graph[target].pred;
1214  bb_graph[target].pred = arcptr;
1215  bb_graph[target].pred_count++;
1216}
1217
1218/* This function searches all of the arcs in the program flow graph, and puts
1219   as many bad arcs as possible onto the spanning tree.  Bad arcs include
1220   fake arcs (needed for setjmp(), longjmp(), exit()) which MUST be on the
1221   spanning tree as they can't be instrumented.  Also, arcs which must be
1222   split when instrumented should be part of the spanning tree if possible.  */
1223
1224static void
1225find_spanning_tree (num_blocks)
1226     int num_blocks;
1227{
1228  int i;
1229  struct adj_list *arcptr;
1230  struct bb_info *binfo = &bb_graph[0];
1231
1232  /* Fake arcs must be part of the spanning tree, and are always safe to put
1233     on the spanning tree.  Fake arcs will either be a successor of node 0,
1234     a predecessor of the last node, or from the last node to node 0.  */
1235
1236  for (arcptr = bb_graph[0].succ; arcptr; arcptr = arcptr->succ_next)
1237    if (arcptr->fake)
1238      {
1239        /* Adding this arc should never cause a cycle.  This is a fatal
1240           error if it would.  */
1241        if (bb_graph[ARC_TARGET (arcptr)].on_tree && binfo->on_tree)
1242          abort();
1243        else
1244          {
1245            arcptr->on_tree = 1;
1246            bb_graph[ARC_TARGET (arcptr)].on_tree = 1;
1247            binfo->on_tree = 1;
1248          }
1249      }
1250
1251  binfo = &bb_graph[num_blocks-1];
1252  for (arcptr = binfo->pred; arcptr; arcptr = arcptr->pred_next)
1253    if (arcptr->fake)
1254      {
1255        /* Adding this arc should never cause a cycle.  This is a fatal
1256           error if it would.  */
1257        if (bb_graph[ARC_SOURCE (arcptr)].on_tree && binfo->on_tree)
1258          abort();
1259        else
1260          {
1261            arcptr->on_tree = 1;
1262            bb_graph[ARC_SOURCE (arcptr)].on_tree = 1;
1263            binfo->on_tree = 1;
1264          }
1265      }
1266  /* The only entrace to node zero is a fake arc.  */
1267  bb_graph[0].pred->on_tree = 1;
1268 
1269  /* Arcs which are crowded at both the source and target should be put on
1270     the spanning tree if possible, except for fall_throuch arcs which never
1271     require adding a new block even if crowded, add arcs with the same source
1272     and dest which must always be instrumented.  */
1273  for (i = 0; i < num_blocks; i++)
1274    {
1275      binfo = &bb_graph[i];
1276
1277      for (arcptr = binfo->succ; arcptr; arcptr = arcptr->succ_next)
1278        if (! ((binfo->succ == arcptr && arcptr->succ_next == 0)
1279               || (bb_graph[ARC_TARGET (arcptr)].pred
1280                   && arcptr->pred_next == 0))
1281            && ! arcptr->fall_through
1282            && ARC_TARGET (arcptr) != i)
1283          {
1284            /* This is a crowded arc at both source and target.  Try to put
1285               in on the spanning tree.  Can do this if either the source or
1286               target block is not yet on the tree.  */
1287            if (! bb_graph[ARC_TARGET (arcptr)].on_tree || ! binfo->on_tree)
1288              {
1289                arcptr->on_tree = 1;
1290                bb_graph[ARC_TARGET (arcptr)].on_tree = 1;
1291                binfo->on_tree = 1;
1292              }
1293          }
1294    }
1295
1296  /* Clear all of the basic block on_tree bits, so that we can use them to
1297     create the spanning tree.  */
1298  for (i = 0; i < num_blocks; i++)
1299    bb_graph[i].on_tree = 0;
1300
1301  /* Now fill in the spanning tree until every basic block is on it.
1302     Don't put the 0 to 1 fall through arc on the tree, since it is
1303     always cheap to instrument, so start filling the tree from node 1.  */
1304
1305  for (i = 1; i < num_blocks; i++)
1306    for (arcptr = bb_graph[i].succ; arcptr; arcptr = arcptr->succ_next)
1307      if (! arcptr->on_tree
1308          && ! bb_graph[ARC_TARGET (arcptr)].on_tree)
1309        {
1310          fill_spanning_tree (i);
1311          break;
1312        }
1313}
1314
1315/* Add arcs reached from BLOCK to the spanning tree if they are needed and
1316   not already there.  */
1317
1318static void
1319fill_spanning_tree (block)
1320     int block;
1321{
1322  struct adj_list *arcptr;
1323 
1324  expand_spanning_tree (block);
1325
1326  for (arcptr = bb_graph[block].succ; arcptr; arcptr = arcptr->succ_next)
1327    if (! arcptr->on_tree
1328        && ! bb_graph[ARC_TARGET (arcptr)].on_tree)
1329      {
1330        arcptr->on_tree = 1;
1331        fill_spanning_tree (ARC_TARGET (arcptr));
1332      }
1333}
1334
1335/* When first visit a block, must add all blocks that are already connected
1336   to this block via tree arcs to the spanning tree.  */
1337
1338static void
1339expand_spanning_tree (block)
1340     int block;
1341{
1342  struct adj_list *arcptr;
1343
1344  bb_graph[block].on_tree = 1;
1345
1346  for (arcptr = bb_graph[block].succ; arcptr; arcptr = arcptr->succ_next)
1347    if (arcptr->on_tree && ! bb_graph[ARC_TARGET (arcptr)].on_tree)
1348      expand_spanning_tree (ARC_TARGET (arcptr));
1349   
1350  for (arcptr = bb_graph[block].pred;
1351       arcptr; arcptr = arcptr->pred_next)
1352    if (arcptr->on_tree && ! bb_graph[ARC_SOURCE (arcptr)].on_tree)
1353      expand_spanning_tree (ARC_SOURCE (arcptr));
1354}
1355
1356/* Perform file-level initialization for branch-prob processing.  */
1357
1358void
1359init_branch_prob (filename)
1360     char *filename;
1361{
1362  long len;
1363  int i;
1364
1365  if (flag_test_coverage)
1366    {
1367      /* Open an output file for the basic block/line number map.  */
1368      int len = strlen (filename);
1369      char *data_file = (char *) alloca (len + 4);
1370      strcpy (data_file, filename);
1371      strip_off_ending (data_file, len);
1372      strcat (data_file, ".bb");
1373      if ((bb_file = fopen (data_file, "w")) == 0)
1374        pfatal_with_name (data_file);
1375
1376      /* Open an output file for the program flow graph.  */
1377      len = strlen (filename);
1378      bbg_file_name = (char *) alloca (len + 5);
1379      strcpy (bbg_file_name, filename);
1380      strip_off_ending (bbg_file_name, len);
1381      strcat (bbg_file_name, ".bbg");
1382      if ((bbg_file = fopen (bbg_file_name, "w")) == 0)
1383        pfatal_with_name (bbg_file_name);
1384
1385      /* Initialize to zero, to ensure that the first file name will be
1386         written to the .bb file.  */
1387      last_bb_file_name = 0;
1388    }
1389
1390  if (flag_branch_probabilities)
1391    {
1392      len = strlen (filename);
1393      da_file_name = (char *) alloca (len + 4);
1394      strcpy (da_file_name, filename);
1395      strip_off_ending (da_file_name, len);
1396      strcat (da_file_name, ".da");
1397      if ((da_file = fopen (da_file_name, "r")) == 0)
1398        warning ("file %s not found, execution counts assumed to be zero.",
1399                 da_file_name);
1400
1401      /* The first word in the .da file gives the number of instrumented arcs,
1402         which is not needed for our purposes.  */
1403
1404      if (da_file)
1405        __read_long (&len, da_file, 8);
1406    }
1407
1408  if (profile_arc_flag)
1409    init_arc_profiler ();
1410
1411  total_num_blocks = 0;
1412  total_num_arcs = 0;
1413  total_num_arcs_instrumented = 0;
1414  total_num_blocks_created = 0;
1415  total_num_passes = 0;
1416  total_num_times_called = 0;
1417  total_num_branches = 0;
1418  total_num_never_executed = 0;
1419  for (i = 0; i < 20; i++)
1420    total_hist_br_prob[i] = 0;
1421}
1422
1423/* Performs file-level cleanup after branch-prob processing
1424   is completed.  */
1425
1426void
1427end_branch_prob (dump_file)
1428     FILE *dump_file;
1429{
1430  if (flag_test_coverage)
1431    {
1432      fclose (bb_file);
1433      fclose (bbg_file);
1434    }
1435
1436  if (flag_branch_probabilities)
1437    {
1438      if (da_file)
1439        {
1440          long temp;
1441          /* This seems slightly dangerous, as it presumes the EOF
1442             flag will not be set until an attempt is made to read
1443             past the end of the file. */
1444          if (feof (da_file))
1445            warning (".da file contents exhausted too early\n");
1446          /* Should be at end of file now.  */
1447          if (__read_long (&temp, da_file, 8) == 0)
1448            warning (".da file contents not exhausted\n");
1449          fclose (da_file);
1450        }
1451    }
1452
1453  if (dump_file)
1454    {
1455      fprintf (dump_file, "\n");
1456      fprintf (dump_file, "Total number of blocks: %d\n", total_num_blocks);
1457      fprintf (dump_file, "Total number of arcs: %d\n", total_num_arcs);
1458      fprintf (dump_file, "Total number of instrumented arcs: %d\n",
1459               total_num_arcs_instrumented);
1460      fprintf (dump_file, "Total number of blocks created: %d\n",
1461               total_num_blocks_created);
1462      fprintf (dump_file, "Total number of graph solution passes: %d\n",
1463               total_num_passes);
1464      if (total_num_times_called != 0)
1465        fprintf (dump_file, "Average number of graph solution passes: %d\n",
1466                 (total_num_passes + (total_num_times_called  >> 1))
1467                 / total_num_times_called);
1468      fprintf (dump_file, "Total number of branches: %d\n", total_num_branches);
1469      fprintf (dump_file, "Total number of branches never executed: %d\n",
1470               total_num_never_executed);
1471      if (total_num_branches)
1472        {
1473          int i;
1474
1475          for (i = 0; i < 10; i++)
1476            fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
1477                     (total_hist_br_prob[i] + total_hist_br_prob[19-i]) * 100
1478                     / total_num_branches, 5*i, 5*i+5);
1479        }
1480    }
1481}
1482
1483/* The label used by the arc profiling code.  */
1484
1485static rtx profiler_label;
1486
1487/* Initialize the profiler_label.  */
1488
1489static void
1490init_arc_profiler ()
1491{
1492  /* Generate and save a copy of this so it can be shared.  */
1493  char *name = xmalloc (20);
1494  ASM_GENERATE_INTERNAL_LABEL (name, "LPBX", 2);
1495  profiler_label = gen_rtx (SYMBOL_REF, Pmode, name);
1496}
1497
1498/* Output instructions as RTL to increment the arc execution count.  */
1499
1500static void
1501output_arc_profiler (arcno, insert_after)
1502     int arcno;
1503     rtx insert_after;
1504{
1505  rtx profiler_target_addr
1506    = (arcno
1507       ? gen_rtx (CONST, Pmode,
1508                  gen_rtx (PLUS, Pmode, profiler_label,
1509                           gen_rtx (CONST_INT, VOIDmode,
1510                                    LONG_TYPE_SIZE / BITS_PER_UNIT * arcno)))
1511       : profiler_label);
1512  enum machine_mode mode = mode_for_size (LONG_TYPE_SIZE, MODE_INT, 0);
1513  rtx profiler_reg = gen_reg_rtx (mode);
1514  rtx address_reg = gen_reg_rtx (Pmode);
1515  rtx mem_ref, add_ref;
1516  rtx sequence;
1517
1518  /* In this case, reload can use explicitly mentioned hard registers for
1519     reloads.  It is not safe to output profiling code between a call
1520     and the instruction that copies the result to a pseudo-reg.  This
1521     is because reload may allocate one of the profiling code pseudo-regs
1522     to the return value reg, thus clobbering the return value.  So we
1523     must check for calls here, and emit the profiling code after the
1524     instruction that uses the return value, if any.
1525
1526     ??? The code here performs the same tests that reload does so hopefully
1527     all the bases are covered.  */
1528
1529  if (SMALL_REGISTER_CLASSES
1530      && GET_CODE (insert_after) == CALL_INSN
1531      && (GET_CODE (PATTERN (insert_after)) == SET
1532          || (GET_CODE (PATTERN (insert_after)) == PARALLEL
1533              && GET_CODE (XVECEXP (PATTERN (insert_after), 0, 0)) == SET)))
1534    {
1535      rtx return_reg;
1536      rtx next_insert_after = next_nonnote_insn (insert_after);
1537
1538      /* The first insn after the call may be a stack pop, skip it.  */
1539      if (next_insert_after
1540          && GET_CODE (next_insert_after) == INSN
1541          && GET_CODE (PATTERN (next_insert_after)) == SET
1542          && SET_DEST (PATTERN (next_insert_after)) == stack_pointer_rtx)
1543        next_insert_after = next_nonnote_insn (next_insert_after);
1544
1545      if (next_insert_after
1546          && GET_CODE (next_insert_after) == INSN)
1547        {
1548          if (GET_CODE (PATTERN (insert_after)) == SET)
1549            return_reg = SET_DEST (PATTERN (insert_after));
1550          else
1551            return_reg = SET_DEST (XVECEXP (PATTERN (insert_after), 0, 0));
1552
1553          /* Now, NEXT_INSERT_AFTER may be an instruction that uses the
1554             return value.  However, it could also be something else,
1555             like a CODE_LABEL, so check that the code is INSN.  */
1556          if (next_insert_after != 0
1557              && GET_RTX_CLASS (GET_CODE (next_insert_after)) == 'i'
1558              && reg_referenced_p (return_reg, PATTERN (next_insert_after)))
1559            insert_after = next_insert_after;
1560        }
1561    }
1562
1563  start_sequence ();
1564
1565  emit_move_insn (address_reg, profiler_target_addr);
1566  mem_ref = gen_rtx (MEM, mode, address_reg);
1567  emit_move_insn (profiler_reg, mem_ref);
1568
1569  add_ref = gen_rtx (PLUS, mode, profiler_reg, GEN_INT (1));
1570  emit_move_insn (profiler_reg, add_ref);
1571
1572  /* This is the same rtx as above, but it is not legal to share this rtx.  */
1573  mem_ref = gen_rtx (MEM, mode, address_reg);
1574  emit_move_insn (mem_ref, profiler_reg);
1575
1576  sequence = gen_sequence ();
1577  end_sequence ();
1578  emit_insn_after (sequence, insert_after);
1579}
1580
1581/* Output code for a constructor that will invoke __bb_init_func, if
1582   this has not already been done. */
1583
1584void
1585output_func_start_profiler ()
1586{
1587  tree fnname, fndecl;
1588  char *name, *cfnname;
1589  rtx table_address;
1590  enum machine_mode mode = mode_for_size (LONG_TYPE_SIZE, MODE_INT, 0);
1591
1592  /* It's either already been output, or we don't need it because we're
1593     not doing profile-arcs. */
1594  if (! need_func_profiler)
1595    return;
1596
1597  need_func_profiler = 0;
1598
1599  /* Synthesize a constructor function to invoke __bb_init_func with a
1600     pointer to this object file's profile block. */
1601  start_sequence ();
1602
1603  /* Try and make a unique name given the "file function name".
1604
1605     And no, I don't like this either. */
1606
1607  fnname = get_file_function_name ('I');
1608  cfnname = IDENTIFIER_POINTER (fnname);
1609  name = xmalloc (strlen (cfnname) + 5);
1610  sprintf (name, "%sGCOV",cfnname);
1611  fnname = get_identifier (name);
1612  free (name);
1613
1614  fndecl = build_decl (FUNCTION_DECL, fnname,
1615                       build_function_type (void_type_node, NULL_TREE));
1616  DECL_EXTERNAL (fndecl) = 0;
1617  TREE_PUBLIC (fndecl) = 1;
1618  DECL_ASSEMBLER_NAME (fndecl) = fnname;
1619  DECL_RESULT (fndecl) = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
1620  current_function_decl = fndecl;
1621  pushlevel (0);
1622  make_function_rtl (fndecl);
1623  init_function_start (fndecl, input_filename, lineno);
1624  expand_function_start (fndecl, 0);
1625
1626  /* Actually generate the code to call __bb_init_func. */
1627  name = xmalloc (20);
1628  ASM_GENERATE_INTERNAL_LABEL (name, "LPBX", 0);
1629  table_address = force_reg (Pmode, gen_rtx (SYMBOL_REF, Pmode, name));
1630  emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__bb_init_func"), 0,
1631                     mode, 1, table_address, Pmode);
1632
1633  expand_function_end (input_filename, lineno, 0);
1634  poplevel (1, 0, 1);
1635  rest_of_compilation (fndecl);
1636  if (! quiet_flag)
1637    fflush (asm_out_file);
1638  current_function_decl = NULL_TREE;
1639
1640  assemble_constructor (IDENTIFIER_POINTER (DECL_NAME (fndecl)));
1641}
Note: See TracBrowser for help on using the repository browser.