source: trunk/third/glib2/glib/gmem.c @ 20721

Revision 20721, 31.1 KB checked in by ghudson, 20 years ago (diff)
This commit was generated by cvs2svn to compensate for changes in r20720, which included commits to RCS files with non-trunk default branches.
Line 
1/* GLIB - Library of useful routines for C programming
2 * Copyright (C) 1995-1997  Peter Mattis, Spencer Kimball and Josh MacDonald
3 *
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
8 *
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
12 * Lesser General Public License for more details.
13 *
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
17 * Boston, MA 02111-1307, USA.
18 */
19
20/*
21 * Modified by the GLib Team and others 1997-2000.  See the AUTHORS
22 * file for a list of people on the GLib Team.  See the ChangeLog
23 * files for a list of changes.  These files are distributed with
24 * GLib at ftp://ftp.gtk.org/pub/gtk/.
25 */
26
27/*
28 * MT safe
29 */
30
31#include "config.h"
32
33#include <stdlib.h>
34#include <string.h>
35#include <signal.h>
36
37#include "glib.h"
38#include "gthreadinit.h"
39
40/* notes on macros:
41 * having DISABLE_MEM_POOLS defined, disables mem_chunks alltogether, their
42 * allocations are performed through ordinary g_malloc/g_free.
43 * having G_DISABLE_CHECKS defined disables use of glib_mem_profiler_table and
44 * g_mem_profile().
45 * REALLOC_0_WORKS is defined if g_realloc (NULL, x) works.
46 * SANE_MALLOC_PROTOS is defined if the systems malloc() and friends functions
47 * match the corresponding GLib prototypes, keep configure.in and gmem.h in sync here.
48 * if ENABLE_GC_FRIENDLY is defined, freed memory should be 0-wiped.
49 */
50
51#define MEM_PROFILE_TABLE_SIZE 4096
52
53#define MEM_AREA_SIZE 4L
54
55#ifdef  G_DISABLE_CHECKS
56#  define ENTER_MEM_CHUNK_ROUTINE()
57#  define LEAVE_MEM_CHUNK_ROUTINE()
58#  define IN_MEM_CHUNK_ROUTINE()        FALSE
59#else   /* !G_DISABLE_CHECKS */
60static GPrivate* mem_chunk_recursion = NULL;
61#  define MEM_CHUNK_ROUTINE_COUNT()     GPOINTER_TO_UINT (g_private_get (mem_chunk_recursion))
62#  define ENTER_MEM_CHUNK_ROUTINE()     g_private_set (mem_chunk_recursion, GUINT_TO_POINTER (MEM_CHUNK_ROUTINE_COUNT () + 1))
63#  define LEAVE_MEM_CHUNK_ROUTINE()     g_private_set (mem_chunk_recursion, GUINT_TO_POINTER (MEM_CHUNK_ROUTINE_COUNT () - 1))
64#endif  /* !G_DISABLE_CHECKS */
65
66#ifndef REALLOC_0_WORKS
67static gpointer
68standard_realloc (gpointer mem,
69                  gsize    n_bytes)
70{
71  if (!mem)
72    return malloc (n_bytes);
73  else
74    return realloc (mem, n_bytes);
75}
76#endif  /* !REALLOC_0_WORKS */
77
78#ifdef SANE_MALLOC_PROTOS
79#  define standard_malloc       malloc
80#  ifdef REALLOC_0_WORKS
81#    define standard_realloc    realloc
82#  endif /* REALLOC_0_WORKS */
83#  define standard_free         free
84#  define standard_calloc       calloc
85#  define standard_try_malloc   malloc
86#  define standard_try_realloc  realloc
87#else   /* !SANE_MALLOC_PROTOS */
88static gpointer
89standard_malloc (gsize n_bytes)
90{
91  return malloc (n_bytes);
92}
93#  ifdef REALLOC_0_WORKS
94static gpointer
95standard_realloc (gpointer mem,
96                  gsize    n_bytes)
97{
98  return realloc (mem, n_bytes);
99}
100#  endif /* REALLOC_0_WORKS */
101static void
102standard_free (gpointer mem)
103{
104  free (mem);
105}
106static gpointer
107standard_calloc (gsize n_blocks,
108                 gsize n_bytes)
109{
110  return calloc (n_blocks, n_bytes);
111}
112#define standard_try_malloc     standard_malloc
113#define standard_try_realloc    standard_realloc
114#endif  /* !SANE_MALLOC_PROTOS */
115
116
117/* --- variables --- */
118static GMemVTable glib_mem_vtable = {
119  standard_malloc,
120  standard_realloc,
121  standard_free,
122  standard_calloc,
123  standard_try_malloc,
124  standard_try_realloc,
125};
126
127
128/* --- functions --- */
129gpointer
130g_malloc (gulong n_bytes)
131{
132  if (n_bytes)
133    {
134      gpointer mem;
135
136      mem = glib_mem_vtable.malloc (n_bytes);
137      if (mem)
138        return mem;
139
140      g_error ("%s: failed to allocate %lu bytes", G_STRLOC, n_bytes);
141    }
142
143  return NULL;
144}
145
146gpointer
147g_malloc0 (gulong n_bytes)
148{
149  if (n_bytes)
150    {
151      gpointer mem;
152
153      mem = glib_mem_vtable.calloc (1, n_bytes);
154      if (mem)
155        return mem;
156
157      g_error ("%s: failed to allocate %lu bytes", G_STRLOC, n_bytes);
158    }
159
160  return NULL;
161}
162
163gpointer
164g_realloc (gpointer mem,
165           gulong   n_bytes)
166{
167  if (n_bytes)
168    {
169      mem = glib_mem_vtable.realloc (mem, n_bytes);
170      if (mem)
171        return mem;
172
173      g_error ("%s: failed to allocate %lu bytes", G_STRLOC, n_bytes);
174    }
175
176  if (mem)
177    glib_mem_vtable.free (mem);
178
179  return NULL;
180}
181
182void
183g_free (gpointer mem)
184{
185  if (mem)
186    glib_mem_vtable.free (mem);
187}
188
189gpointer
190g_try_malloc (gulong n_bytes)
191{
192  if (n_bytes)
193    return glib_mem_vtable.try_malloc (n_bytes);
194  else
195    return NULL;
196}
197
198gpointer
199g_try_realloc (gpointer mem,
200               gulong   n_bytes)
201{
202  if (n_bytes)
203    return glib_mem_vtable.try_realloc (mem, n_bytes);
204
205  if (mem)
206    glib_mem_vtable.free (mem);
207
208  return NULL;
209}
210
211static gpointer
212fallback_calloc (gsize n_blocks,
213                 gsize n_block_bytes)
214{
215  gsize l = n_blocks * n_block_bytes;
216  gpointer mem = glib_mem_vtable.malloc (l);
217
218  if (mem)
219    memset (mem, 0, l);
220
221  return mem;
222}
223
224static gboolean vtable_set = FALSE;
225
226/**
227 * g_mem_is_system_malloc
228 *
229 * Checks whether the allocator used by g_malloc() is the system's
230 * malloc implementation. If it returns %TRUE memory allocated with
231 * malloc() can be used interchangeable with memory allocated using g_malloc().
232 * This function is useful for avoiding an extra copy of allocated memory returned
233 * by a non-GLib-based API.
234 *
235 * A different allocator can be set using g_mem_set_vtable().
236 *
237 * Return value: if %TRUE, malloc() and g_malloc() can be mixed.
238 **/
239gboolean
240g_mem_is_system_malloc (void)
241{
242  return !vtable_set;
243}
244
245void
246g_mem_set_vtable (GMemVTable *vtable)
247{
248  if (!vtable_set)
249    {
250      vtable_set = TRUE;
251      if (vtable->malloc && vtable->realloc && vtable->free)
252        {
253          glib_mem_vtable.malloc = vtable->malloc;
254          glib_mem_vtable.realloc = vtable->realloc;
255          glib_mem_vtable.free = vtable->free;
256          glib_mem_vtable.calloc = vtable->calloc ? vtable->calloc : fallback_calloc;
257          glib_mem_vtable.try_malloc = vtable->try_malloc ? vtable->try_malloc : glib_mem_vtable.malloc;
258          glib_mem_vtable.try_realloc = vtable->try_realloc ? vtable->try_realloc : glib_mem_vtable.realloc;
259        }
260      else
261        g_warning (G_STRLOC ": memory allocation vtable lacks one of malloc(), realloc() or free()");
262    }
263  else
264    g_warning (G_STRLOC ": memory allocation vtable can only be set once at startup");
265}
266
267
268/* --- memory profiling and checking --- */
269#ifdef  G_DISABLE_CHECKS
270GMemVTable *glib_mem_profiler_table = &glib_mem_vtable;
271void
272g_mem_profile (void)
273{
274}
275#else   /* !G_DISABLE_CHECKS */
276typedef enum {
277  PROFILER_FREE         = 0,
278  PROFILER_ALLOC        = 1,
279  PROFILER_RELOC        = 2,
280  PROFILER_ZINIT        = 4
281} ProfilerJob;
282static guint *profile_data = NULL;
283static gulong profile_allocs = 0;
284static gulong profile_mc_allocs = 0;
285static gulong profile_zinit = 0;
286static gulong profile_frees = 0;
287static gulong profile_mc_frees = 0;
288static GMutex *g_profile_mutex = NULL;
289#ifdef  G_ENABLE_DEBUG
290static volatile gulong g_trap_free_size = 0;
291static volatile gulong g_trap_realloc_size = 0;
292static volatile gulong g_trap_malloc_size = 0;
293#endif  /* G_ENABLE_DEBUG */
294
295#define PROFILE_TABLE(f1,f2,f3)   ( ( ((f3) << 2) | ((f2) << 1) | (f1) ) * (MEM_PROFILE_TABLE_SIZE + 1))
296
297static void
298profiler_log (ProfilerJob job,
299              gulong      n_bytes,
300              gboolean    success)
301{
302  g_mutex_lock (g_profile_mutex);
303  if (!profile_data)
304    {
305      profile_data = standard_malloc ((MEM_PROFILE_TABLE_SIZE + 1) * 8 * sizeof (profile_data[0]));
306      if (!profile_data)        /* memory system kiddin' me, eh? */
307        {
308          g_mutex_unlock (g_profile_mutex);
309          return;
310        }
311    }
312
313  if (MEM_CHUNK_ROUTINE_COUNT () == 0)
314    {
315      if (n_bytes < MEM_PROFILE_TABLE_SIZE)
316        profile_data[n_bytes + PROFILE_TABLE ((job & PROFILER_ALLOC) != 0,
317                                              (job & PROFILER_RELOC) != 0,
318                                              success != 0)] += 1;
319      else
320        profile_data[MEM_PROFILE_TABLE_SIZE + PROFILE_TABLE ((job & PROFILER_ALLOC) != 0,
321                                                             (job & PROFILER_RELOC) != 0,
322                                                             success != 0)] += 1;
323      if (success)
324        {
325          if (job & PROFILER_ALLOC)
326            {
327              profile_allocs += n_bytes;
328              if (job & PROFILER_ZINIT)
329                profile_zinit += n_bytes;
330            }
331          else
332            profile_frees += n_bytes;
333        }
334    }
335  else if (success)
336    {
337      if (job & PROFILER_ALLOC)
338        profile_mc_allocs += n_bytes;
339      else
340        profile_mc_frees += n_bytes;
341    }
342  g_mutex_unlock (g_profile_mutex);
343}
344
345static void
346profile_print_locked (guint   *local_data,
347                      gboolean success)
348{
349  gboolean need_header = TRUE;
350  guint i;
351
352  for (i = 0; i <= MEM_PROFILE_TABLE_SIZE; i++)
353    {
354      glong t_malloc = local_data[i + PROFILE_TABLE (1, 0, success)];
355      glong t_realloc = local_data[i + PROFILE_TABLE (1, 1, success)];
356      glong t_free = local_data[i + PROFILE_TABLE (0, 0, success)];
357      glong t_refree = local_data[i + PROFILE_TABLE (0, 1, success)];
358     
359      if (!t_malloc && !t_realloc && !t_free && !t_refree)
360        continue;
361      else if (need_header)
362        {
363          need_header = FALSE;
364          g_print (" blocks of | allocated  | freed      | allocated  | freed      | n_bytes   \n");
365          g_print ("  n_bytes  | n_times by | n_times by | n_times by | n_times by | remaining \n");
366          g_print ("           | malloc()   | free()     | realloc()  | realloc()  |           \n");
367          g_print ("===========|============|============|============|============|===========\n");
368        }
369      if (i < MEM_PROFILE_TABLE_SIZE)
370        g_print ("%10u | %10ld | %10ld | %10ld | %10ld |%+11ld\n",
371                 i, t_malloc, t_free, t_realloc, t_refree,
372                 (t_malloc - t_free + t_realloc - t_refree) * i);
373      else if (i >= MEM_PROFILE_TABLE_SIZE)
374        g_print ("   >%6u | %10ld | %10ld | %10ld | %10ld |        ***\n",
375                 i, t_malloc, t_free, t_realloc, t_refree);
376    }
377  if (need_header)
378    g_print (" --- none ---\n");
379}
380
381void
382g_mem_profile (void)
383{
384  guint local_data[(MEM_PROFILE_TABLE_SIZE + 1) * 8 * sizeof (profile_data[0])];
385  gulong local_allocs;
386  gulong local_zinit;
387  gulong local_frees;
388  gulong local_mc_allocs;
389  gulong local_mc_frees;
390
391  g_mutex_lock (g_profile_mutex);
392
393  local_allocs = profile_allocs;
394  local_zinit = profile_zinit;
395  local_frees = profile_frees;
396  local_mc_allocs = profile_mc_allocs;
397  local_mc_frees = profile_mc_frees;
398
399  if (!profile_data)
400    {
401      g_mutex_unlock (g_profile_mutex);
402      return;
403    }
404
405  memcpy (local_data, profile_data,
406          (MEM_PROFILE_TABLE_SIZE + 1) * 8 * sizeof (profile_data[0]));
407 
408  g_mutex_unlock (g_profile_mutex);
409
410  g_print ("GLib Memory statistics (successful operations):\n");
411  profile_print_locked (local_data, TRUE);
412  g_print ("GLib Memory statistics (failing operations):\n");
413  profile_print_locked (local_data, FALSE);
414  g_print ("Total bytes: allocated=%lu, zero-initialized=%lu (%.2f%%), freed=%lu (%.2f%%), remaining=%lu\n",
415           local_allocs,
416           local_zinit,
417           ((gdouble) local_zinit) / local_allocs * 100.0,
418           local_frees,
419           ((gdouble) local_frees) / local_allocs * 100.0,
420           local_allocs - local_frees);
421  g_print ("MemChunk bytes: allocated=%lu, freed=%lu (%.2f%%), remaining=%lu\n",
422           local_mc_allocs,
423           local_mc_frees,
424           ((gdouble) local_mc_frees) / local_mc_allocs * 100.0,
425           local_mc_allocs - local_mc_frees);
426}
427
428static gpointer
429profiler_try_malloc (gsize n_bytes)
430{
431  gulong *p;
432
433#ifdef  G_ENABLE_DEBUG
434  if (g_trap_malloc_size == n_bytes)
435    G_BREAKPOINT ();
436#endif  /* G_ENABLE_DEBUG */
437
438  p = standard_malloc (sizeof (gulong) * 2 + n_bytes);
439
440  if (p)
441    {
442      p[0] = 0;         /* free count */
443      p[1] = n_bytes;   /* length */
444      profiler_log (PROFILER_ALLOC, n_bytes, TRUE);
445      p += 2;
446    }
447  else
448    profiler_log (PROFILER_ALLOC, n_bytes, FALSE);
449 
450  return p;
451}
452
453static gpointer
454profiler_malloc (gsize n_bytes)
455{
456  gpointer mem = profiler_try_malloc (n_bytes);
457
458  if (!mem)
459    g_mem_profile ();
460
461  return mem;
462}
463
464static gpointer
465profiler_calloc (gsize n_blocks,
466                 gsize n_block_bytes)
467{
468  gsize l = n_blocks * n_block_bytes;
469  gulong *p;
470
471#ifdef  G_ENABLE_DEBUG
472  if (g_trap_malloc_size == l)
473    G_BREAKPOINT ();
474#endif  /* G_ENABLE_DEBUG */
475 
476  p = standard_calloc (1, sizeof (gulong) * 2 + l);
477
478  if (p)
479    {
480      p[0] = 0;         /* free count */
481      p[1] = l;         /* length */
482      profiler_log (PROFILER_ALLOC | PROFILER_ZINIT, l, TRUE);
483      p += 2;
484    }
485  else
486    {
487      profiler_log (PROFILER_ALLOC | PROFILER_ZINIT, l, FALSE);
488      g_mem_profile ();
489    }
490
491  return p;
492}
493
494static void
495profiler_free (gpointer mem)
496{
497  gulong *p = mem;
498
499  p -= 2;
500  if (p[0])     /* free count */
501    {
502      g_warning ("free(%p): memory has been freed %lu times already", p + 2, p[0]);
503      profiler_log (PROFILER_FREE,
504                    p[1],       /* length */
505                    FALSE);
506    }
507  else
508    {
509#ifdef  G_ENABLE_DEBUG
510      if (g_trap_free_size == p[1])
511        G_BREAKPOINT ();
512#endif  /* G_ENABLE_DEBUG */
513
514      profiler_log (PROFILER_FREE,
515                    p[1],       /* length */
516                    TRUE);
517      memset (p + 2, 0xaa, p[1]);
518
519      /* for all those that miss standard_free (p); in this place, yes,
520       * we do leak all memory when profiling, and that is intentional
521       * to catch double frees. patch submissions are futile.
522       */
523    }
524  p[0] += 1;
525}
526
527static gpointer
528profiler_try_realloc (gpointer mem,
529                      gsize    n_bytes)
530{
531  gulong *p = mem;
532
533  p -= 2;
534
535#ifdef  G_ENABLE_DEBUG
536  if (g_trap_realloc_size == n_bytes)
537    G_BREAKPOINT ();
538#endif  /* G_ENABLE_DEBUG */
539 
540  if (mem && p[0])      /* free count */
541    {
542      g_warning ("realloc(%p, %lu): memory has been freed %lu times already", p + 2, (gulong)n_bytes, p[0]);
543      profiler_log (PROFILER_ALLOC | PROFILER_RELOC, n_bytes, FALSE);
544
545      return NULL;
546    }
547  else
548    {
549      p = standard_realloc (mem ? p : NULL, sizeof (gulong) * 2 + n_bytes);
550
551      if (p)
552        {
553          if (mem)
554            profiler_log (PROFILER_FREE | PROFILER_RELOC, p[1], TRUE);
555          p[0] = 0;
556          p[1] = n_bytes;
557          profiler_log (PROFILER_ALLOC | PROFILER_RELOC, p[1], TRUE);
558          p += 2;
559        }
560      else
561        profiler_log (PROFILER_ALLOC | PROFILER_RELOC, n_bytes, FALSE);
562
563      return p;
564    }
565}
566
567static gpointer
568profiler_realloc (gpointer mem,
569                  gsize    n_bytes)
570{
571  mem = profiler_try_realloc (mem, n_bytes);
572
573  if (!mem)
574    g_mem_profile ();
575
576  return mem;
577}
578
579static GMemVTable profiler_table = {
580  profiler_malloc,
581  profiler_realloc,
582  profiler_free,
583  profiler_calloc,
584  profiler_try_malloc,
585  profiler_try_realloc,
586};
587GMemVTable *glib_mem_profiler_table = &profiler_table;
588
589#endif  /* !G_DISABLE_CHECKS */
590
591
592/* --- MemChunks --- */
593typedef struct _GFreeAtom      GFreeAtom;
594typedef struct _GMemArea       GMemArea;
595
596struct _GFreeAtom
597{
598  GFreeAtom *next;
599};
600
601struct _GMemArea
602{
603  GMemArea *next;            /* the next mem area */
604  GMemArea *prev;            /* the previous mem area */
605  gulong index;              /* the current index into the "mem" array */
606  gulong free;               /* the number of free bytes in this mem area */
607  gulong allocated;          /* the number of atoms allocated from this area */
608  gulong mark;               /* is this mem area marked for deletion */
609  gchar mem[MEM_AREA_SIZE];  /* the mem array from which atoms get allocated
610                              * the actual size of this array is determined by
611                              *  the mem chunk "area_size". ANSI says that it
612                              *  must be declared to be the maximum size it
613                              *  can possibly be (even though the actual size
614                              *  may be less).
615                              */
616};
617
618struct _GMemChunk
619{
620  const gchar *name;         /* name of this MemChunk...used for debugging output */
621  gint type;                 /* the type of MemChunk: ALLOC_ONLY or ALLOC_AND_FREE */
622  gint num_mem_areas;        /* the number of memory areas */
623  gint num_marked_areas;     /* the number of areas marked for deletion */
624  guint atom_size;           /* the size of an atom */
625  gulong area_size;          /* the size of a memory area */
626  GMemArea *mem_area;        /* the current memory area */
627  GMemArea *mem_areas;       /* a list of all the mem areas owned by this chunk */
628  GMemArea *free_mem_area;   /* the free area...which is about to be destroyed */
629  GFreeAtom *free_atoms;     /* the free atoms list */
630  GTree *mem_tree;           /* tree of mem areas sorted by memory address */
631  GMemChunk *next;           /* pointer to the next chunk */
632  GMemChunk *prev;           /* pointer to the previous chunk */
633};
634
635
636#ifndef DISABLE_MEM_POOLS
637static gulong g_mem_chunk_compute_size (gulong    size,
638                                        gulong    min_size) G_GNUC_CONST;
639static gint   g_mem_chunk_area_compare (GMemArea *a,
640                                        GMemArea *b);
641static gint   g_mem_chunk_area_search  (GMemArea *a,
642                                        gchar    *addr);
643
644/* here we can't use StaticMutexes, as they depend upon a working
645 * g_malloc, the same holds true for StaticPrivate
646 */
647static GMutex        *mem_chunks_lock = NULL;
648static GMemChunk     *mem_chunks = NULL;
649
650GMemChunk*
651g_mem_chunk_new (const gchar  *name,
652                 gint          atom_size,
653                 gulong        area_size,
654                 gint          type)
655{
656  GMemChunk *mem_chunk;
657  gulong rarea_size;
658
659  g_return_val_if_fail (atom_size > 0, NULL);
660  g_return_val_if_fail (area_size >= atom_size, NULL);
661
662  ENTER_MEM_CHUNK_ROUTINE ();
663
664  area_size = (area_size + atom_size - 1) / atom_size;
665  area_size *= atom_size;
666
667  mem_chunk = g_new (GMemChunk, 1);
668  mem_chunk->name = name;
669  mem_chunk->type = type;
670  mem_chunk->num_mem_areas = 0;
671  mem_chunk->num_marked_areas = 0;
672  mem_chunk->mem_area = NULL;
673  mem_chunk->free_mem_area = NULL;
674  mem_chunk->free_atoms = NULL;
675  mem_chunk->mem_tree = NULL;
676  mem_chunk->mem_areas = NULL;
677  mem_chunk->atom_size = atom_size;
678 
679  if (mem_chunk->type == G_ALLOC_AND_FREE)
680    mem_chunk->mem_tree = g_tree_new ((GCompareFunc) g_mem_chunk_area_compare);
681 
682  if (mem_chunk->atom_size % G_MEM_ALIGN)
683    mem_chunk->atom_size += G_MEM_ALIGN - (mem_chunk->atom_size % G_MEM_ALIGN);
684
685  rarea_size = area_size + sizeof (GMemArea) - MEM_AREA_SIZE;
686  rarea_size = g_mem_chunk_compute_size (rarea_size, atom_size + sizeof (GMemArea) - MEM_AREA_SIZE);
687  mem_chunk->area_size = rarea_size - (sizeof (GMemArea) - MEM_AREA_SIZE);
688
689  g_mutex_lock (mem_chunks_lock);
690  mem_chunk->next = mem_chunks;
691  mem_chunk->prev = NULL;
692  if (mem_chunks)
693    mem_chunks->prev = mem_chunk;
694  mem_chunks = mem_chunk;
695  g_mutex_unlock (mem_chunks_lock);
696
697  LEAVE_MEM_CHUNK_ROUTINE ();
698
699  return mem_chunk;
700}
701
702void
703g_mem_chunk_destroy (GMemChunk *mem_chunk)
704{
705  GMemArea *mem_areas;
706  GMemArea *temp_area;
707 
708  g_return_if_fail (mem_chunk != NULL);
709
710  ENTER_MEM_CHUNK_ROUTINE ();
711
712  mem_areas = mem_chunk->mem_areas;
713  while (mem_areas)
714    {
715      temp_area = mem_areas;
716      mem_areas = mem_areas->next;
717      g_free (temp_area);
718    }
719 
720  g_mutex_lock (mem_chunks_lock);
721  if (mem_chunk->next)
722    mem_chunk->next->prev = mem_chunk->prev;
723  if (mem_chunk->prev)
724    mem_chunk->prev->next = mem_chunk->next;
725 
726  if (mem_chunk == mem_chunks)
727    mem_chunks = mem_chunks->next;
728  g_mutex_unlock (mem_chunks_lock);
729 
730  if (mem_chunk->type == G_ALLOC_AND_FREE)
731    g_tree_destroy (mem_chunk->mem_tree); 
732
733  g_free (mem_chunk);
734
735  LEAVE_MEM_CHUNK_ROUTINE ();
736}
737
738gpointer
739g_mem_chunk_alloc (GMemChunk *mem_chunk)
740{
741  GMemArea *temp_area;
742  gpointer mem;
743
744  ENTER_MEM_CHUNK_ROUTINE ();
745
746  g_return_val_if_fail (mem_chunk != NULL, NULL);
747 
748  while (mem_chunk->free_atoms)
749    {
750      /* Get the first piece of memory on the "free_atoms" list.
751       * We can go ahead and destroy the list node we used to keep
752       *  track of it with and to update the "free_atoms" list to
753       *  point to its next element.
754       */
755      mem = mem_chunk->free_atoms;
756      mem_chunk->free_atoms = mem_chunk->free_atoms->next;
757     
758      /* Determine which area this piece of memory is allocated from */
759      temp_area = g_tree_search (mem_chunk->mem_tree,
760                                 (GCompareFunc) g_mem_chunk_area_search,
761                                 mem);
762     
763      /* If the area has been marked, then it is being destroyed.
764       *  (ie marked to be destroyed).
765       * We check to see if all of the segments on the free list that
766       *  reference this area have been removed. This occurs when
767       *  the ammount of free memory is less than the allocatable size.
768       * If the chunk should be freed, then we place it in the "free_mem_area".
769       * This is so we make sure not to free the mem area here and then
770       *  allocate it again a few lines down.
771       * If we don't allocate a chunk a few lines down then the "free_mem_area"
772       *  will be freed.
773       * If there is already a "free_mem_area" then we'll just free this mem area.
774       */
775      if (temp_area->mark)
776        {
777          /* Update the "free" memory available in that area */
778          temp_area->free += mem_chunk->atom_size;
779         
780          if (temp_area->free == mem_chunk->area_size)
781            {
782              if (temp_area == mem_chunk->mem_area)
783                mem_chunk->mem_area = NULL;
784             
785              if (mem_chunk->free_mem_area)
786                {
787                  mem_chunk->num_mem_areas -= 1;
788                 
789                  if (temp_area->next)
790                    temp_area->next->prev = temp_area->prev;
791                  if (temp_area->prev)
792                    temp_area->prev->next = temp_area->next;
793                  if (temp_area == mem_chunk->mem_areas)
794                    mem_chunk->mem_areas = mem_chunk->mem_areas->next;
795                 
796                  if (mem_chunk->type == G_ALLOC_AND_FREE)
797                    g_tree_remove (mem_chunk->mem_tree, temp_area);
798                  g_free (temp_area);
799                }
800              else
801                mem_chunk->free_mem_area = temp_area;
802             
803              mem_chunk->num_marked_areas -= 1;
804            }
805        }
806      else
807        {
808          /* Update the number of allocated atoms count.
809           */
810          temp_area->allocated += 1;
811         
812          /* The area wasn't marked...return the memory
813           */
814          goto outa_here;
815        }
816    }
817 
818  /* If there isn't a current mem area or the current mem area is out of space
819   *  then allocate a new mem area. We'll first check and see if we can use
820   *  the "free_mem_area". Otherwise we'll just malloc the mem area.
821   */
822  if ((!mem_chunk->mem_area) ||
823      ((mem_chunk->mem_area->index + mem_chunk->atom_size) > mem_chunk->area_size))
824    {
825      if (mem_chunk->free_mem_area)
826        {
827          mem_chunk->mem_area = mem_chunk->free_mem_area;
828          mem_chunk->free_mem_area = NULL;
829        }
830      else
831        {
832#ifdef ENABLE_GC_FRIENDLY
833          mem_chunk->mem_area = (GMemArea*) g_malloc0 (sizeof (GMemArea) -
834                                                       MEM_AREA_SIZE +
835                                                       mem_chunk->area_size);
836#else /* !ENABLE_GC_FRIENDLY */
837          mem_chunk->mem_area = (GMemArea*) g_malloc (sizeof (GMemArea) -
838                                                      MEM_AREA_SIZE +
839                                                      mem_chunk->area_size);
840#endif /* ENABLE_GC_FRIENDLY */
841         
842          mem_chunk->num_mem_areas += 1;
843          mem_chunk->mem_area->next = mem_chunk->mem_areas;
844          mem_chunk->mem_area->prev = NULL;
845         
846          if (mem_chunk->mem_areas)
847            mem_chunk->mem_areas->prev = mem_chunk->mem_area;
848          mem_chunk->mem_areas = mem_chunk->mem_area;
849         
850          if (mem_chunk->type == G_ALLOC_AND_FREE)
851            g_tree_insert (mem_chunk->mem_tree, mem_chunk->mem_area, mem_chunk->mem_area);
852        }
853     
854      mem_chunk->mem_area->index = 0;
855      mem_chunk->mem_area->free = mem_chunk->area_size;
856      mem_chunk->mem_area->allocated = 0;
857      mem_chunk->mem_area->mark = 0;
858    }
859 
860  /* Get the memory and modify the state variables appropriately.
861   */
862  mem = (gpointer) &mem_chunk->mem_area->mem[mem_chunk->mem_area->index];
863  mem_chunk->mem_area->index += mem_chunk->atom_size;
864  mem_chunk->mem_area->free -= mem_chunk->atom_size;
865  mem_chunk->mem_area->allocated += 1;
866
867outa_here:
868
869  LEAVE_MEM_CHUNK_ROUTINE ();
870
871  return mem;
872}
873
874gpointer
875g_mem_chunk_alloc0 (GMemChunk *mem_chunk)
876{
877  gpointer mem;
878
879  mem = g_mem_chunk_alloc (mem_chunk);
880  if (mem)
881    {
882      memset (mem, 0, mem_chunk->atom_size);
883    }
884
885  return mem;
886}
887
888void
889g_mem_chunk_free (GMemChunk *mem_chunk,
890                  gpointer   mem)
891{
892  GMemArea *temp_area;
893  GFreeAtom *free_atom;
894 
895  g_return_if_fail (mem_chunk != NULL);
896  g_return_if_fail (mem != NULL);
897
898  ENTER_MEM_CHUNK_ROUTINE ();
899
900#ifdef ENABLE_GC_FRIENDLY
901  memset (mem, 0, mem_chunk->atom_size);
902#endif /* ENABLE_GC_FRIENDLY */
903
904  /* Don't do anything if this is an ALLOC_ONLY chunk
905   */
906  if (mem_chunk->type == G_ALLOC_AND_FREE)
907    {
908      /* Place the memory on the "free_atoms" list
909       */
910      free_atom = (GFreeAtom*) mem;
911      free_atom->next = mem_chunk->free_atoms;
912      mem_chunk->free_atoms = free_atom;
913     
914      temp_area = g_tree_search (mem_chunk->mem_tree,
915                                 (GCompareFunc) g_mem_chunk_area_search,
916                                 mem);
917     
918      temp_area->allocated -= 1;
919     
920      if (temp_area->allocated == 0)
921        {
922          temp_area->mark = 1;
923          mem_chunk->num_marked_areas += 1;
924        }
925    }
926
927  LEAVE_MEM_CHUNK_ROUTINE ();
928}
929
930/* This doesn't free the free_area if there is one */
931void
932g_mem_chunk_clean (GMemChunk *mem_chunk)
933{
934  GMemArea *mem_area;
935  GFreeAtom *prev_free_atom;
936  GFreeAtom *temp_free_atom;
937  gpointer mem;
938 
939  g_return_if_fail (mem_chunk != NULL);
940 
941  ENTER_MEM_CHUNK_ROUTINE ();
942
943  if (mem_chunk->type == G_ALLOC_AND_FREE)
944    {
945      prev_free_atom = NULL;
946      temp_free_atom = mem_chunk->free_atoms;
947     
948      while (temp_free_atom)
949        {
950          mem = (gpointer) temp_free_atom;
951         
952          mem_area = g_tree_search (mem_chunk->mem_tree,
953                                    (GCompareFunc) g_mem_chunk_area_search,
954                                    mem);
955         
956          /* If this mem area is marked for destruction then delete the
957           *  area and list node and decrement the free mem.
958           */
959          if (mem_area->mark)
960            {
961              if (prev_free_atom)
962                prev_free_atom->next = temp_free_atom->next;
963              else
964                mem_chunk->free_atoms = temp_free_atom->next;
965              temp_free_atom = temp_free_atom->next;
966             
967              mem_area->free += mem_chunk->atom_size;
968              if (mem_area->free == mem_chunk->area_size)
969                {
970                  mem_chunk->num_mem_areas -= 1;
971                  mem_chunk->num_marked_areas -= 1;
972                 
973                  if (mem_area->next)
974                    mem_area->next->prev = mem_area->prev;
975                  if (mem_area->prev)
976                    mem_area->prev->next = mem_area->next;
977                  if (mem_area == mem_chunk->mem_areas)
978                    mem_chunk->mem_areas = mem_chunk->mem_areas->next;
979                  if (mem_area == mem_chunk->mem_area)
980                    mem_chunk->mem_area = NULL;
981                 
982                  if (mem_chunk->type == G_ALLOC_AND_FREE)
983                    g_tree_remove (mem_chunk->mem_tree, mem_area);
984                  g_free (mem_area);
985                }
986            }
987          else
988            {
989              prev_free_atom = temp_free_atom;
990              temp_free_atom = temp_free_atom->next;
991            }
992        }
993    }
994  LEAVE_MEM_CHUNK_ROUTINE ();
995}
996
997void
998g_mem_chunk_reset (GMemChunk *mem_chunk)
999{
1000  GMemArea *mem_areas;
1001  GMemArea *temp_area;
1002 
1003  g_return_if_fail (mem_chunk != NULL);
1004 
1005  ENTER_MEM_CHUNK_ROUTINE ();
1006
1007  mem_areas = mem_chunk->mem_areas;
1008  mem_chunk->num_mem_areas = 0;
1009  mem_chunk->mem_areas = NULL;
1010  mem_chunk->mem_area = NULL;
1011 
1012  while (mem_areas)
1013    {
1014      temp_area = mem_areas;
1015      mem_areas = mem_areas->next;
1016      g_free (temp_area);
1017    }
1018 
1019  mem_chunk->free_atoms = NULL;
1020 
1021  if (mem_chunk->mem_tree)
1022    {
1023      g_tree_destroy (mem_chunk->mem_tree);
1024      mem_chunk->mem_tree = g_tree_new ((GCompareFunc) g_mem_chunk_area_compare);
1025    }
1026
1027  LEAVE_MEM_CHUNK_ROUTINE ();
1028}
1029
1030void
1031g_mem_chunk_print (GMemChunk *mem_chunk)
1032{
1033  GMemArea *mem_areas;
1034  gulong mem;
1035 
1036  g_return_if_fail (mem_chunk != NULL);
1037 
1038  mem_areas = mem_chunk->mem_areas;
1039  mem = 0;
1040 
1041  while (mem_areas)
1042    {
1043      mem += mem_chunk->area_size - mem_areas->free;
1044      mem_areas = mem_areas->next;
1045    }
1046
1047  g_log (G_LOG_DOMAIN, G_LOG_LEVEL_INFO,
1048         "%s: %ld bytes using %d mem areas",
1049         mem_chunk->name, mem, mem_chunk->num_mem_areas);
1050}
1051
1052void
1053g_mem_chunk_info (void)
1054{
1055  GMemChunk *mem_chunk;
1056  gint count;
1057 
1058  count = 0;
1059  g_mutex_lock (mem_chunks_lock);
1060  mem_chunk = mem_chunks;
1061  while (mem_chunk)
1062    {
1063      count += 1;
1064      mem_chunk = mem_chunk->next;
1065    }
1066  g_mutex_unlock (mem_chunks_lock);
1067 
1068  g_log (G_LOG_DOMAIN, G_LOG_LEVEL_INFO, "%d mem chunks", count);
1069 
1070  g_mutex_lock (mem_chunks_lock);
1071  mem_chunk = mem_chunks;
1072  g_mutex_unlock (mem_chunks_lock);
1073
1074  while (mem_chunk)
1075    {
1076      g_mem_chunk_print ((GMemChunk*) mem_chunk);
1077      mem_chunk = mem_chunk->next;
1078    } 
1079}
1080
1081void
1082g_blow_chunks (void)
1083{
1084  GMemChunk *mem_chunk;
1085 
1086  g_mutex_lock (mem_chunks_lock);
1087  mem_chunk = mem_chunks;
1088  g_mutex_unlock (mem_chunks_lock);
1089  while (mem_chunk)
1090    {
1091      g_mem_chunk_clean ((GMemChunk*) mem_chunk);
1092      mem_chunk = mem_chunk->next;
1093    }
1094}
1095
1096static gulong
1097g_mem_chunk_compute_size (gulong size,
1098                          gulong min_size)
1099{
1100  gulong power_of_2;
1101  gulong lower, upper;
1102 
1103  power_of_2 = 16;
1104  while (power_of_2 < size)
1105    power_of_2 <<= 1;
1106 
1107  lower = power_of_2 >> 1;
1108  upper = power_of_2;
1109 
1110  if (size - lower < upper - size && lower >= min_size)
1111    return lower;
1112  else
1113    return upper;
1114}
1115
1116static gint
1117g_mem_chunk_area_compare (GMemArea *a,
1118                          GMemArea *b)
1119{
1120  if (a->mem > b->mem)
1121    return 1;
1122  else if (a->mem < b->mem)
1123    return -1;
1124  return 0;
1125}
1126
1127static gint
1128g_mem_chunk_area_search (GMemArea *a,
1129                         gchar    *addr)
1130{
1131  if (a->mem <= addr)
1132    {
1133      if (addr < &a->mem[a->index])
1134        return 0;
1135      return 1;
1136    }
1137  return -1;
1138}
1139
1140#else /* DISABLE_MEM_POOLS */
1141
1142typedef struct {
1143  guint alloc_size;           /* the size of an atom */
1144}  GMinimalMemChunk;
1145
1146GMemChunk*
1147g_mem_chunk_new (const gchar  *name,
1148                 gint          atom_size,
1149                 gulong        area_size,
1150                 gint          type)
1151{
1152  GMinimalMemChunk *mem_chunk;
1153
1154  g_return_val_if_fail (atom_size > 0, NULL);
1155
1156  mem_chunk = g_new (GMinimalMemChunk, 1);
1157  mem_chunk->alloc_size = atom_size;
1158
1159  return ((GMemChunk*) mem_chunk);
1160}
1161
1162void
1163g_mem_chunk_destroy (GMemChunk *mem_chunk)
1164{
1165  g_return_if_fail (mem_chunk != NULL);
1166 
1167  g_free (mem_chunk);
1168}
1169
1170gpointer
1171g_mem_chunk_alloc (GMemChunk *mem_chunk)
1172{
1173  GMinimalMemChunk *minimal = (GMinimalMemChunk *)mem_chunk;
1174 
1175  g_return_val_if_fail (mem_chunk != NULL, NULL);
1176 
1177  return g_malloc (minimal->alloc_size);
1178}
1179
1180gpointer
1181g_mem_chunk_alloc0 (GMemChunk *mem_chunk)
1182{
1183  GMinimalMemChunk *minimal = (GMinimalMemChunk *)mem_chunk;
1184 
1185  g_return_val_if_fail (mem_chunk != NULL, NULL);
1186 
1187  return g_malloc0 (minimal->alloc_size);
1188}
1189
1190void
1191g_mem_chunk_free (GMemChunk *mem_chunk,
1192                  gpointer   mem)
1193{
1194  g_return_if_fail (mem_chunk != NULL);
1195 
1196  g_free (mem);
1197}
1198
1199void    g_mem_chunk_clean       (GMemChunk *mem_chunk)  {}
1200void    g_mem_chunk_reset       (GMemChunk *mem_chunk)  {}
1201void    g_mem_chunk_print       (GMemChunk *mem_chunk)  {}
1202void    g_mem_chunk_info        (void)                  {}
1203void    g_blow_chunks           (void)                  {}
1204
1205#endif /* DISABLE_MEM_POOLS */
1206
1207
1208/* generic allocators
1209 */
1210struct _GAllocator /* from gmem.c */
1211{
1212  gchar         *name;
1213  guint16        n_preallocs;
1214  guint          is_unused : 1;
1215  guint          type : 4;
1216  GAllocator    *last;
1217  GMemChunk     *mem_chunk;
1218  gpointer       dummy; /* implementation specific */
1219};
1220
1221GAllocator*
1222g_allocator_new (const gchar *name,
1223                 guint        n_preallocs)
1224{
1225  GAllocator *allocator;
1226
1227  g_return_val_if_fail (name != NULL, NULL);
1228
1229  allocator = g_new0 (GAllocator, 1);
1230  allocator->name = g_strdup (name);
1231  allocator->n_preallocs = CLAMP (n_preallocs, 1, 65535);
1232  allocator->is_unused = TRUE;
1233  allocator->type = 0;
1234  allocator->last = NULL;
1235  allocator->mem_chunk = NULL;
1236  allocator->dummy = NULL;
1237
1238  return allocator;
1239}
1240
1241void
1242g_allocator_free (GAllocator *allocator)
1243{
1244  g_return_if_fail (allocator != NULL);
1245  g_return_if_fail (allocator->is_unused == TRUE);
1246
1247  g_free (allocator->name);
1248  if (allocator->mem_chunk)
1249    g_mem_chunk_destroy (allocator->mem_chunk);
1250
1251  g_free (allocator);
1252}
1253
1254void
1255_g_mem_thread_init (void)
1256{
1257#ifndef DISABLE_MEM_POOLS
1258  mem_chunks_lock = g_mutex_new ();
1259#endif
1260#ifndef G_DISABLE_CHECKS
1261  g_profile_mutex = g_mutex_new ();
1262#endif
1263}
1264
1265void
1266_g_mem_thread_private_init (void)
1267{
1268#ifndef G_DISABLE_CHECKS
1269  g_assert (mem_chunk_recursion == NULL);
1270  mem_chunk_recursion = g_private_new (NULL);
1271#endif
1272}
1273
Note: See TracBrowser for help on using the repository browser.