source: trunk/third/glib2/glib/gmem.c @ 18159

Revision 18159, 31.0 KB checked in by ghudson, 22 years ago (diff)
This commit was generated by cvs2svn to compensate for changes in r18158, which included commits to RCS files with non-trunk default branches.
Line 
1/* GLIB - Library of useful routines for C programming
2 * Copyright (C) 1995-1997  Peter Mattis, Spencer Kimball and Josh MacDonald
3 *
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
8 *
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
12 * Lesser General Public License for more details.
13 *
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
17 * Boston, MA 02111-1307, USA.
18 */
19
20/*
21 * Modified by the GLib Team and others 1997-2000.  See the AUTHORS
22 * file for a list of people on the GLib Team.  See the ChangeLog
23 * files for a list of changes.  These files are distributed with
24 * GLib at ftp://ftp.gtk.org/pub/gtk/.
25 */
26
27/*
28 * MT safe
29 */
30
31#include "config.h"
32
33#include <stdlib.h>
34#include <string.h>
35#include <signal.h>
36
37#include "glib.h"
38
39
40/* notes on macros:
41 * having DISABLE_MEM_POOLS defined, disables mem_chunks alltogether, their
42 * allocations are performed through ordinary g_malloc/g_free.
43 * having G_DISABLE_CHECKS defined disables use of glib_mem_profiler_table and
44 * g_mem_profile().
45 * REALLOC_0_WORKS is defined if g_realloc (NULL, x) works.
46 * SANE_MALLOC_PROTOS is defined if the systems malloc() and friends functions
47 * match the corresponding GLib prototypes, keep configure.in and gmem.h in sync here.
48 * if ENABLE_GC_FRIENDLY is defined, freed memory should be 0-wiped.
49 */
50
51#define MEM_PROFILE_TABLE_SIZE 4096
52
53#define MEM_AREA_SIZE 4L
54
55#ifdef  G_DISABLE_CHECKS
56#  define ENTER_MEM_CHUNK_ROUTINE()
57#  define LEAVE_MEM_CHUNK_ROUTINE()
58#  define IN_MEM_CHUNK_ROUTINE()        FALSE
59#else   /* !G_DISABLE_CHECKS */
60static GPrivate* mem_chunk_recursion = NULL;
61#  define MEM_CHUNK_ROUTINE_COUNT()     GPOINTER_TO_UINT (g_private_get (mem_chunk_recursion))
62#  define ENTER_MEM_CHUNK_ROUTINE()     g_private_set (mem_chunk_recursion, GUINT_TO_POINTER (MEM_CHUNK_ROUTINE_COUNT () + 1))
63#  define LEAVE_MEM_CHUNK_ROUTINE()     g_private_set (mem_chunk_recursion, GUINT_TO_POINTER (MEM_CHUNK_ROUTINE_COUNT () - 1))
64#endif  /* !G_DISABLE_CHECKS */
65
66#ifndef REALLOC_0_WORKS
67static gpointer
68standard_realloc (gpointer mem,
69                  gsize    n_bytes)
70{
71  if (!mem)
72    return malloc (n_bytes);
73  else
74    return realloc (mem, n_bytes);
75}
76#endif  /* !REALLOC_0_WORKS */
77
78#ifdef SANE_MALLOC_PROTOS
79#  define standard_malloc       malloc
80#  ifdef REALLOC_0_WORKS
81#    define standard_realloc    realloc
82#  endif /* REALLOC_0_WORKS */
83#  define standard_free         free
84#  define standard_calloc       calloc
85#  define standard_try_malloc   malloc
86#  define standard_try_realloc  realloc
87#else   /* !SANE_MALLOC_PROTOS */
88static gpointer
89standard_malloc (gsize n_bytes)
90{
91  return malloc (n_bytes);
92}
93#  ifdef REALLOC_0_WORKS
94static gpointer
95standard_realloc (gpointer mem,
96                  gsize    n_bytes)
97{
98  return realloc (mem, n_bytes);
99}
100#  endif /* REALLOC_0_WORKS */
101static void
102standard_free (gpointer mem)
103{
104  free (mem);
105}
106static gpointer
107standard_calloc (gsize n_blocks,
108                 gsize n_bytes)
109{
110  return calloc (n_blocks, n_bytes);
111}
112#define standard_try_malloc     standard_malloc
113#define standard_try_realloc    standard_realloc
114#endif  /* !SANE_MALLOC_PROTOS */
115
116
117/* --- variables --- */
118static GMemVTable glib_mem_vtable = {
119  standard_malloc,
120  standard_realloc,
121  standard_free,
122  standard_calloc,
123  standard_try_malloc,
124  standard_try_realloc,
125};
126
127
128/* --- functions --- */
129gpointer
130g_malloc (gulong n_bytes)
131{
132  if (n_bytes)
133    {
134      gpointer mem;
135
136      mem = glib_mem_vtable.malloc (n_bytes);
137      if (mem)
138        return mem;
139
140      g_error ("%s: failed to allocate %lu bytes", G_STRLOC, n_bytes);
141    }
142
143  return NULL;
144}
145
146gpointer
147g_malloc0 (gulong n_bytes)
148{
149  if (n_bytes)
150    {
151      gpointer mem;
152
153      mem = glib_mem_vtable.calloc (1, n_bytes);
154      if (mem)
155        return mem;
156
157      g_error ("%s: failed to allocate %lu bytes", G_STRLOC, n_bytes);
158    }
159
160  return NULL;
161}
162
163gpointer
164g_realloc (gpointer mem,
165           gulong   n_bytes)
166{
167  if (n_bytes)
168    {
169      mem = glib_mem_vtable.realloc (mem, n_bytes);
170      if (mem)
171        return mem;
172
173      g_error ("%s: failed to allocate %lu bytes", G_STRLOC, n_bytes);
174    }
175
176  if (mem)
177    glib_mem_vtable.free (mem);
178
179  return NULL;
180}
181
182void
183g_free (gpointer mem)
184{
185  if (mem)
186    glib_mem_vtable.free (mem);
187}
188
189gpointer
190g_try_malloc (gulong n_bytes)
191{
192  if (n_bytes)
193    return glib_mem_vtable.try_malloc (n_bytes);
194  else
195    return NULL;
196}
197
198gpointer
199g_try_realloc (gpointer mem,
200               gulong   n_bytes)
201{
202  if (n_bytes)
203    return glib_mem_vtable.try_realloc (mem, n_bytes);
204
205  if (mem)
206    glib_mem_vtable.free (mem);
207
208  return NULL;
209}
210
211static gpointer
212fallback_calloc (gsize n_blocks,
213                 gsize n_block_bytes)
214{
215  gsize l = n_blocks * n_block_bytes;
216  gpointer mem = glib_mem_vtable.malloc (l);
217
218  if (mem)
219    memset (mem, 0, l);
220
221  return mem;
222}
223
224static gboolean vtable_set = FALSE;
225
226/**
227 * g_mem_is_system_malloc
228 *
229 * Checks whether the allocator used by g_malloc() is the system's
230 * malloc implementation. If it returns %TRUE memory allocated with
231 * <function>malloc()</function> can be used interchangeable with
232 * memory allocated using g_malloc(). This function is useful for
233 * avoiding an extra copy of allocated memory returned by a
234 * non-GLib-based API.
235 *
236 * A different allocator can be set using g_mem_set_vtable().
237 *
238 * Return value: if %TRUE, <function>malloc()</function> and g_malloc() can be mixed.
239 **/
240gboolean
241g_mem_is_system_malloc (void)
242{
243  return !vtable_set;
244}
245
246void
247g_mem_set_vtable (GMemVTable *vtable)
248{
249  if (!vtable_set)
250    {
251      vtable_set = TRUE;
252      if (vtable->malloc && vtable->realloc && vtable->free)
253        {
254          glib_mem_vtable.malloc = vtable->malloc;
255          glib_mem_vtable.realloc = vtable->realloc;
256          glib_mem_vtable.free = vtable->free;
257          glib_mem_vtable.calloc = vtable->calloc ? vtable->calloc : fallback_calloc;
258          glib_mem_vtable.try_malloc = vtable->try_malloc ? vtable->try_malloc : glib_mem_vtable.malloc;
259          glib_mem_vtable.try_realloc = vtable->try_realloc ? vtable->try_realloc : glib_mem_vtable.realloc;
260        }
261      else
262        g_warning (G_STRLOC ": memory allocation vtable lacks one of malloc(), realloc() or free()");
263    }
264  else
265    g_warning (G_STRLOC ": memory allocation vtable can only be set once at startup");
266}
267
268
269/* --- memory profiling and checking --- */
270#ifdef  G_DISABLE_CHECKS
271GMemVTable *glib_mem_profiler_table = &glib_mem_vtable;
272void
273g_mem_profile (void)
274{
275}
276#else   /* !G_DISABLE_CHECKS */
277typedef enum {
278  PROFILER_FREE         = 0,
279  PROFILER_ALLOC        = 1,
280  PROFILER_RELOC        = 2,
281  PROFILER_ZINIT        = 4
282} ProfilerJob;
283static guint *profile_data = NULL;
284static gulong profile_allocs = 0;
285static gulong profile_mc_allocs = 0;
286static gulong profile_zinit = 0;
287static gulong profile_frees = 0;
288static gulong profile_mc_frees = 0;
289static GMutex *g_profile_mutex = NULL;
290#ifdef  G_ENABLE_DEBUG
291static volatile gulong g_trap_free_size = 0;
292static volatile gulong g_trap_realloc_size = 0;
293static volatile gulong g_trap_malloc_size = 0;
294#endif  /* G_ENABLE_DEBUG */
295
296#define PROFILE_TABLE(f1,f2,f3)   ( ( ((f3) << 2) | ((f2) << 1) | (f1) ) * (MEM_PROFILE_TABLE_SIZE + 1))
297
298static void
299profiler_log (ProfilerJob job,
300              gulong      n_bytes,
301              gboolean    success)
302{
303  g_mutex_lock (g_profile_mutex);
304  if (!profile_data)
305    {
306      profile_data = standard_malloc ((MEM_PROFILE_TABLE_SIZE + 1) * 8 * sizeof (profile_data[0]));
307      if (!profile_data)        /* memory system kiddin' me, eh? */
308        {
309          g_mutex_unlock (g_profile_mutex);
310          return;
311        }
312    }
313
314  if (MEM_CHUNK_ROUTINE_COUNT () == 0)
315    {
316      if (n_bytes < MEM_PROFILE_TABLE_SIZE)
317        profile_data[n_bytes + PROFILE_TABLE ((job & PROFILER_ALLOC) != 0,
318                                              (job & PROFILER_RELOC) != 0,
319                                              success != 0)] += 1;
320      else
321        profile_data[MEM_PROFILE_TABLE_SIZE + PROFILE_TABLE ((job & PROFILER_ALLOC) != 0,
322                                                             (job & PROFILER_RELOC) != 0,
323                                                             success != 0)] += 1;
324      if (success)
325        {
326          if (job & PROFILER_ALLOC)
327            {
328              profile_allocs += n_bytes;
329              if (job & PROFILER_ZINIT)
330                profile_zinit += n_bytes;
331            }
332          else
333            profile_frees += n_bytes;
334        }
335    }
336  else if (success)
337    {
338      if (job & PROFILER_ALLOC)
339        profile_mc_allocs += n_bytes;
340      else
341        profile_mc_frees += n_bytes;
342    }
343  g_mutex_unlock (g_profile_mutex);
344}
345
346static void
347profile_print_locked (guint   *local_data,
348                      gboolean success)
349{
350  gboolean need_header = TRUE;
351  guint i;
352
353  for (i = 0; i <= MEM_PROFILE_TABLE_SIZE; i++)
354    {
355      glong t_malloc = local_data[i + PROFILE_TABLE (1, 0, success)];
356      glong t_realloc = local_data[i + PROFILE_TABLE (1, 1, success)];
357      glong t_free = local_data[i + PROFILE_TABLE (0, 0, success)];
358      glong t_refree = local_data[i + PROFILE_TABLE (0, 1, success)];
359     
360      if (!t_malloc && !t_realloc && !t_free && !t_refree)
361        continue;
362      else if (need_header)
363        {
364          need_header = FALSE;
365          g_print (" blocks of | allocated  | freed      | allocated  | freed      | n_bytes   \n");
366          g_print ("  n_bytes  | n_times by | n_times by | n_times by | n_times by | remaining \n");
367          g_print ("           | malloc()   | free()     | realloc()  | realloc()  |           \n");
368          g_print ("===========|============|============|============|============|===========\n");
369        }
370      if (i < MEM_PROFILE_TABLE_SIZE)
371        g_print ("%10u | %10ld | %10ld | %10ld | %10ld |%+11ld\n",
372                 i, t_malloc, t_free, t_realloc, t_refree,
373                 (t_malloc - t_free + t_realloc - t_refree) * i);
374      else if (i >= MEM_PROFILE_TABLE_SIZE)
375        g_print ("   >%6u | %10ld | %10ld | %10ld | %10ld |        ***\n",
376                 i, t_malloc, t_free, t_realloc, t_refree);
377    }
378  if (need_header)
379    g_print (" --- none ---\n");
380}
381
382void
383g_mem_profile (void)
384{
385  guint local_data[(MEM_PROFILE_TABLE_SIZE + 1) * 8 * sizeof (profile_data[0])];
386  gulong local_allocs;
387  gulong local_zinit;
388  gulong local_frees;
389  gulong local_mc_allocs;
390  gulong local_mc_frees;
391
392  g_mutex_lock (g_profile_mutex);
393
394  local_allocs = profile_allocs;
395  local_zinit = profile_zinit;
396  local_frees = profile_frees;
397  local_mc_allocs = profile_mc_allocs;
398  local_mc_frees = profile_mc_frees;
399
400  if (!profile_data)
401    {
402      g_mutex_unlock (g_profile_mutex);
403      return;
404    }
405
406  memcpy (local_data, profile_data,
407          (MEM_PROFILE_TABLE_SIZE + 1) * 8 * sizeof (profile_data[0]));
408 
409  g_mutex_unlock (g_profile_mutex);
410
411  g_print ("GLib Memory statistics (successful operations):\n");
412  profile_print_locked (local_data, TRUE);
413  g_print ("GLib Memory statistics (failing operations):\n");
414  profile_print_locked (local_data, FALSE);
415  g_print ("Total bytes: allocated=%lu, zero-initialized=%lu (%.2f%%), freed=%lu (%.2f%%), remaining=%lu\n",
416           local_allocs,
417           local_zinit,
418           ((gdouble) local_zinit) / local_allocs * 100.0,
419           local_frees,
420           ((gdouble) local_frees) / local_allocs * 100.0,
421           local_allocs - local_frees);
422  g_print ("MemChunk bytes: allocated=%lu, freed=%lu (%.2f%%), remaining=%lu\n",
423           local_mc_allocs,
424           local_mc_frees,
425           ((gdouble) local_mc_frees) / local_mc_allocs * 100.0,
426           local_mc_allocs - local_mc_frees);
427}
428
429static gpointer
430profiler_try_malloc (gsize n_bytes)
431{
432  gulong *p;
433
434#ifdef  G_ENABLE_DEBUG
435  if (g_trap_malloc_size == n_bytes)
436    G_BREAKPOINT ();
437#endif  /* G_ENABLE_DEBUG */
438
439  p = standard_malloc (sizeof (gulong) * 2 + n_bytes);
440
441  if (p)
442    {
443      p[0] = 0;         /* free count */
444      p[1] = n_bytes;   /* length */
445      profiler_log (PROFILER_ALLOC, n_bytes, TRUE);
446      p += 2;
447    }
448  else
449    profiler_log (PROFILER_ALLOC, n_bytes, FALSE);
450 
451  return p;
452}
453
454static gpointer
455profiler_malloc (gsize n_bytes)
456{
457  gpointer mem = profiler_try_malloc (n_bytes);
458
459  if (!mem)
460    g_mem_profile ();
461
462  return mem;
463}
464
465static gpointer
466profiler_calloc (gsize n_blocks,
467                 gsize n_block_bytes)
468{
469  gsize l = n_blocks * n_block_bytes;
470  gulong *p;
471
472#ifdef  G_ENABLE_DEBUG
473  if (g_trap_malloc_size == l)
474    G_BREAKPOINT ();
475#endif  /* G_ENABLE_DEBUG */
476 
477  p = standard_calloc (1, sizeof (gulong) * 2 + l);
478
479  if (p)
480    {
481      p[0] = 0;         /* free count */
482      p[1] = l;         /* length */
483      profiler_log (PROFILER_ALLOC | PROFILER_ZINIT, l, TRUE);
484      p += 2;
485    }
486  else
487    {
488      profiler_log (PROFILER_ALLOC | PROFILER_ZINIT, l, FALSE);
489      g_mem_profile ();
490    }
491
492  return p;
493}
494
495static void
496profiler_free (gpointer mem)
497{
498  gulong *p = mem;
499
500  p -= 2;
501  if (p[0])     /* free count */
502    {
503      g_warning ("free(%p): memory has been freed %lu times already", p + 2, p[0]);
504      profiler_log (PROFILER_FREE,
505                    p[1],       /* length */
506                    FALSE);
507    }
508  else
509    {
510#ifdef  G_ENABLE_DEBUG
511      if (g_trap_free_size == p[1])
512        G_BREAKPOINT ();
513#endif  /* G_ENABLE_DEBUG */
514
515      profiler_log (PROFILER_FREE,
516                    p[1],       /* length */
517                    TRUE);
518      memset (p + 2, 0xaa, p[1]);
519
520      /* for all those that miss standard_free (p); in this place, yes,
521       * we do leak all memory when profiling, and that is intentional
522       * to catch double frees. patch submissions are futile.
523       */
524    }
525  p[0] += 1;
526}
527
528static gpointer
529profiler_try_realloc (gpointer mem,
530                      gsize    n_bytes)
531{
532  gulong *p = mem;
533
534  p -= 2;
535
536#ifdef  G_ENABLE_DEBUG
537  if (g_trap_realloc_size == n_bytes)
538    G_BREAKPOINT ();
539#endif  /* G_ENABLE_DEBUG */
540 
541  if (mem && p[0])      /* free count */
542    {
543      g_warning ("realloc(%p, %lu): memory has been freed %lu times already", p + 2, (gulong)n_bytes, p[0]);
544      profiler_log (PROFILER_ALLOC | PROFILER_RELOC, n_bytes, FALSE);
545
546      return NULL;
547    }
548  else
549    {
550      p = standard_realloc (mem ? p : NULL, sizeof (gulong) * 2 + n_bytes);
551
552      if (p)
553        {
554          if (mem)
555            profiler_log (PROFILER_FREE | PROFILER_RELOC, p[1], TRUE);
556          p[0] = 0;
557          p[1] = n_bytes;
558          profiler_log (PROFILER_ALLOC | PROFILER_RELOC, p[1], TRUE);
559          p += 2;
560        }
561      else
562        profiler_log (PROFILER_ALLOC | PROFILER_RELOC, n_bytes, FALSE);
563
564      return p;
565    }
566}
567
568static gpointer
569profiler_realloc (gpointer mem,
570                  gsize    n_bytes)
571{
572  mem = profiler_try_realloc (mem, n_bytes);
573
574  if (!mem)
575    g_mem_profile ();
576
577  return mem;
578}
579
580static GMemVTable profiler_table = {
581  profiler_malloc,
582  profiler_realloc,
583  profiler_free,
584  profiler_calloc,
585  profiler_try_malloc,
586  profiler_try_realloc,
587};
588GMemVTable *glib_mem_profiler_table = &profiler_table;
589
590#endif  /* !G_DISABLE_CHECKS */
591
592
593/* --- MemChunks --- */
594typedef struct _GFreeAtom      GFreeAtom;
595typedef struct _GMemArea       GMemArea;
596
597struct _GFreeAtom
598{
599  GFreeAtom *next;
600};
601
602struct _GMemArea
603{
604  GMemArea *next;            /* the next mem area */
605  GMemArea *prev;            /* the previous mem area */
606  gulong index;              /* the current index into the "mem" array */
607  gulong free;               /* the number of free bytes in this mem area */
608  gulong allocated;          /* the number of atoms allocated from this area */
609  gulong mark;               /* is this mem area marked for deletion */
610  gchar mem[MEM_AREA_SIZE];  /* the mem array from which atoms get allocated
611                              * the actual size of this array is determined by
612                              *  the mem chunk "area_size". ANSI says that it
613                              *  must be declared to be the maximum size it
614                              *  can possibly be (even though the actual size
615                              *  may be less).
616                              */
617};
618
619struct _GMemChunk
620{
621  const gchar *name;         /* name of this MemChunk...used for debugging output */
622  gint type;                 /* the type of MemChunk: ALLOC_ONLY or ALLOC_AND_FREE */
623  gint num_mem_areas;        /* the number of memory areas */
624  gint num_marked_areas;     /* the number of areas marked for deletion */
625  guint atom_size;           /* the size of an atom */
626  gulong area_size;          /* the size of a memory area */
627  GMemArea *mem_area;        /* the current memory area */
628  GMemArea *mem_areas;       /* a list of all the mem areas owned by this chunk */
629  GMemArea *free_mem_area;   /* the free area...which is about to be destroyed */
630  GFreeAtom *free_atoms;     /* the free atoms list */
631  GTree *mem_tree;           /* tree of mem areas sorted by memory address */
632  GMemChunk *next;           /* pointer to the next chunk */
633  GMemChunk *prev;           /* pointer to the previous chunk */
634};
635
636
637#ifndef DISABLE_MEM_POOLS
638static gulong g_mem_chunk_compute_size (gulong    size,
639                                        gulong    min_size) G_GNUC_CONST;
640static gint   g_mem_chunk_area_compare (GMemArea *a,
641                                        GMemArea *b);
642static gint   g_mem_chunk_area_search  (GMemArea *a,
643                                        gchar    *addr);
644
645/* here we can't use StaticMutexes, as they depend upon a working
646 * g_malloc, the same holds true for StaticPrivate
647 */
648static GMutex        *mem_chunks_lock = NULL;
649static GMemChunk     *mem_chunks = NULL;
650
651GMemChunk*
652g_mem_chunk_new (const gchar  *name,
653                 gint          atom_size,
654                 gulong        area_size,
655                 gint          type)
656{
657  GMemChunk *mem_chunk;
658  gulong rarea_size;
659
660  g_return_val_if_fail (atom_size > 0, NULL);
661  g_return_val_if_fail (area_size >= atom_size, NULL);
662
663  ENTER_MEM_CHUNK_ROUTINE ();
664
665  area_size = (area_size + atom_size - 1) / atom_size;
666  area_size *= atom_size;
667
668  mem_chunk = g_new (GMemChunk, 1);
669  mem_chunk->name = name;
670  mem_chunk->type = type;
671  mem_chunk->num_mem_areas = 0;
672  mem_chunk->num_marked_areas = 0;
673  mem_chunk->mem_area = NULL;
674  mem_chunk->free_mem_area = NULL;
675  mem_chunk->free_atoms = NULL;
676  mem_chunk->mem_tree = NULL;
677  mem_chunk->mem_areas = NULL;
678  mem_chunk->atom_size = atom_size;
679 
680  if (mem_chunk->type == G_ALLOC_AND_FREE)
681    mem_chunk->mem_tree = g_tree_new ((GCompareFunc) g_mem_chunk_area_compare);
682 
683  if (mem_chunk->atom_size % G_MEM_ALIGN)
684    mem_chunk->atom_size += G_MEM_ALIGN - (mem_chunk->atom_size % G_MEM_ALIGN);
685
686  rarea_size = area_size + sizeof (GMemArea) - MEM_AREA_SIZE;
687  rarea_size = g_mem_chunk_compute_size (rarea_size, atom_size + sizeof (GMemArea) - MEM_AREA_SIZE);
688  mem_chunk->area_size = rarea_size - (sizeof (GMemArea) - MEM_AREA_SIZE);
689
690  g_mutex_lock (mem_chunks_lock);
691  mem_chunk->next = mem_chunks;
692  mem_chunk->prev = NULL;
693  if (mem_chunks)
694    mem_chunks->prev = mem_chunk;
695  mem_chunks = mem_chunk;
696  g_mutex_unlock (mem_chunks_lock);
697
698  LEAVE_MEM_CHUNK_ROUTINE ();
699
700  return mem_chunk;
701}
702
703void
704g_mem_chunk_destroy (GMemChunk *mem_chunk)
705{
706  GMemArea *mem_areas;
707  GMemArea *temp_area;
708 
709  g_return_if_fail (mem_chunk != NULL);
710
711  ENTER_MEM_CHUNK_ROUTINE ();
712
713  mem_areas = mem_chunk->mem_areas;
714  while (mem_areas)
715    {
716      temp_area = mem_areas;
717      mem_areas = mem_areas->next;
718      g_free (temp_area);
719    }
720 
721  if (mem_chunk->next)
722    mem_chunk->next->prev = mem_chunk->prev;
723  if (mem_chunk->prev)
724    mem_chunk->prev->next = mem_chunk->next;
725 
726  g_mutex_lock (mem_chunks_lock);
727  if (mem_chunk == mem_chunks)
728    mem_chunks = mem_chunks->next;
729  g_mutex_unlock (mem_chunks_lock);
730 
731  if (mem_chunk->type == G_ALLOC_AND_FREE)
732    g_tree_destroy (mem_chunk->mem_tree); 
733
734  g_free (mem_chunk);
735
736  LEAVE_MEM_CHUNK_ROUTINE ();
737}
738
739gpointer
740g_mem_chunk_alloc (GMemChunk *mem_chunk)
741{
742  GMemArea *temp_area;
743  gpointer mem;
744
745  ENTER_MEM_CHUNK_ROUTINE ();
746
747  g_return_val_if_fail (mem_chunk != NULL, NULL);
748 
749  while (mem_chunk->free_atoms)
750    {
751      /* Get the first piece of memory on the "free_atoms" list.
752       * We can go ahead and destroy the list node we used to keep
753       *  track of it with and to update the "free_atoms" list to
754       *  point to its next element.
755       */
756      mem = mem_chunk->free_atoms;
757      mem_chunk->free_atoms = mem_chunk->free_atoms->next;
758     
759      /* Determine which area this piece of memory is allocated from */
760      temp_area = g_tree_search (mem_chunk->mem_tree,
761                                 (GCompareFunc) g_mem_chunk_area_search,
762                                 mem);
763     
764      /* If the area has been marked, then it is being destroyed.
765       *  (ie marked to be destroyed).
766       * We check to see if all of the segments on the free list that
767       *  reference this area have been removed. This occurs when
768       *  the ammount of free memory is less than the allocatable size.
769       * If the chunk should be freed, then we place it in the "free_mem_area".
770       * This is so we make sure not to free the mem area here and then
771       *  allocate it again a few lines down.
772       * If we don't allocate a chunk a few lines down then the "free_mem_area"
773       *  will be freed.
774       * If there is already a "free_mem_area" then we'll just free this mem area.
775       */
776      if (temp_area->mark)
777        {
778          /* Update the "free" memory available in that area */
779          temp_area->free += mem_chunk->atom_size;
780         
781          if (temp_area->free == mem_chunk->area_size)
782            {
783              if (temp_area == mem_chunk->mem_area)
784                mem_chunk->mem_area = NULL;
785             
786              if (mem_chunk->free_mem_area)
787                {
788                  mem_chunk->num_mem_areas -= 1;
789                 
790                  if (temp_area->next)
791                    temp_area->next->prev = temp_area->prev;
792                  if (temp_area->prev)
793                    temp_area->prev->next = temp_area->next;
794                  if (temp_area == mem_chunk->mem_areas)
795                    mem_chunk->mem_areas = mem_chunk->mem_areas->next;
796                 
797                  if (mem_chunk->type == G_ALLOC_AND_FREE)
798                    g_tree_remove (mem_chunk->mem_tree, temp_area);
799                  g_free (temp_area);
800                }
801              else
802                mem_chunk->free_mem_area = temp_area;
803             
804              mem_chunk->num_marked_areas -= 1;
805            }
806        }
807      else
808        {
809          /* Update the number of allocated atoms count.
810           */
811          temp_area->allocated += 1;
812         
813          /* The area wasn't marked...return the memory
814           */
815          goto outa_here;
816        }
817    }
818 
819  /* If there isn't a current mem area or the current mem area is out of space
820   *  then allocate a new mem area. We'll first check and see if we can use
821   *  the "free_mem_area". Otherwise we'll just malloc the mem area.
822   */
823  if ((!mem_chunk->mem_area) ||
824      ((mem_chunk->mem_area->index + mem_chunk->atom_size) > mem_chunk->area_size))
825    {
826      if (mem_chunk->free_mem_area)
827        {
828          mem_chunk->mem_area = mem_chunk->free_mem_area;
829          mem_chunk->free_mem_area = NULL;
830        }
831      else
832        {
833#ifdef ENABLE_GC_FRIENDLY
834          mem_chunk->mem_area = (GMemArea*) g_malloc0 (sizeof (GMemArea) -
835                                                       MEM_AREA_SIZE +
836                                                       mem_chunk->area_size);
837#else /* !ENABLE_GC_FRIENDLY */
838          mem_chunk->mem_area = (GMemArea*) g_malloc (sizeof (GMemArea) -
839                                                      MEM_AREA_SIZE +
840                                                      mem_chunk->area_size);
841#endif /* ENABLE_GC_FRIENDLY */
842         
843          mem_chunk->num_mem_areas += 1;
844          mem_chunk->mem_area->next = mem_chunk->mem_areas;
845          mem_chunk->mem_area->prev = NULL;
846         
847          if (mem_chunk->mem_areas)
848            mem_chunk->mem_areas->prev = mem_chunk->mem_area;
849          mem_chunk->mem_areas = mem_chunk->mem_area;
850         
851          if (mem_chunk->type == G_ALLOC_AND_FREE)
852            g_tree_insert (mem_chunk->mem_tree, mem_chunk->mem_area, mem_chunk->mem_area);
853        }
854     
855      mem_chunk->mem_area->index = 0;
856      mem_chunk->mem_area->free = mem_chunk->area_size;
857      mem_chunk->mem_area->allocated = 0;
858      mem_chunk->mem_area->mark = 0;
859    }
860 
861  /* Get the memory and modify the state variables appropriately.
862   */
863  mem = (gpointer) &mem_chunk->mem_area->mem[mem_chunk->mem_area->index];
864  mem_chunk->mem_area->index += mem_chunk->atom_size;
865  mem_chunk->mem_area->free -= mem_chunk->atom_size;
866  mem_chunk->mem_area->allocated += 1;
867
868outa_here:
869
870  LEAVE_MEM_CHUNK_ROUTINE ();
871
872  return mem;
873}
874
875gpointer
876g_mem_chunk_alloc0 (GMemChunk *mem_chunk)
877{
878  gpointer mem;
879
880  mem = g_mem_chunk_alloc (mem_chunk);
881  if (mem)
882    {
883      memset (mem, 0, mem_chunk->atom_size);
884    }
885
886  return mem;
887}
888
889void
890g_mem_chunk_free (GMemChunk *mem_chunk,
891                  gpointer   mem)
892{
893  GMemArea *temp_area;
894  GFreeAtom *free_atom;
895 
896  g_return_if_fail (mem_chunk != NULL);
897  g_return_if_fail (mem != NULL);
898
899  ENTER_MEM_CHUNK_ROUTINE ();
900
901#ifdef ENABLE_GC_FRIENDLY
902  memset (mem, 0, mem_chunk->atom_size);
903#endif /* ENABLE_GC_FRIENDLY */
904
905  /* Don't do anything if this is an ALLOC_ONLY chunk
906   */
907  if (mem_chunk->type == G_ALLOC_AND_FREE)
908    {
909      /* Place the memory on the "free_atoms" list
910       */
911      free_atom = (GFreeAtom*) mem;
912      free_atom->next = mem_chunk->free_atoms;
913      mem_chunk->free_atoms = free_atom;
914     
915      temp_area = g_tree_search (mem_chunk->mem_tree,
916                                 (GCompareFunc) g_mem_chunk_area_search,
917                                 mem);
918     
919      temp_area->allocated -= 1;
920     
921      if (temp_area->allocated == 0)
922        {
923          temp_area->mark = 1;
924          mem_chunk->num_marked_areas += 1;
925        }
926    }
927
928  LEAVE_MEM_CHUNK_ROUTINE ();
929}
930
931/* This doesn't free the free_area if there is one */
932void
933g_mem_chunk_clean (GMemChunk *mem_chunk)
934{
935  GMemArea *mem_area;
936  GFreeAtom *prev_free_atom;
937  GFreeAtom *temp_free_atom;
938  gpointer mem;
939 
940  g_return_if_fail (mem_chunk != NULL);
941 
942  ENTER_MEM_CHUNK_ROUTINE ();
943
944  if (mem_chunk->type == G_ALLOC_AND_FREE)
945    {
946      prev_free_atom = NULL;
947      temp_free_atom = mem_chunk->free_atoms;
948     
949      while (temp_free_atom)
950        {
951          mem = (gpointer) temp_free_atom;
952         
953          mem_area = g_tree_search (mem_chunk->mem_tree,
954                                    (GCompareFunc) g_mem_chunk_area_search,
955                                    mem);
956         
957          /* If this mem area is marked for destruction then delete the
958           *  area and list node and decrement the free mem.
959           */
960          if (mem_area->mark)
961            {
962              if (prev_free_atom)
963                prev_free_atom->next = temp_free_atom->next;
964              else
965                mem_chunk->free_atoms = temp_free_atom->next;
966              temp_free_atom = temp_free_atom->next;
967             
968              mem_area->free += mem_chunk->atom_size;
969              if (mem_area->free == mem_chunk->area_size)
970                {
971                  mem_chunk->num_mem_areas -= 1;
972                  mem_chunk->num_marked_areas -= 1;
973                 
974                  if (mem_area->next)
975                    mem_area->next->prev = mem_area->prev;
976                  if (mem_area->prev)
977                    mem_area->prev->next = mem_area->next;
978                  if (mem_area == mem_chunk->mem_areas)
979                    mem_chunk->mem_areas = mem_chunk->mem_areas->next;
980                  if (mem_area == mem_chunk->mem_area)
981                    mem_chunk->mem_area = NULL;
982                 
983                  if (mem_chunk->type == G_ALLOC_AND_FREE)
984                    g_tree_remove (mem_chunk->mem_tree, mem_area);
985                  g_free (mem_area);
986                }
987            }
988          else
989            {
990              prev_free_atom = temp_free_atom;
991              temp_free_atom = temp_free_atom->next;
992            }
993        }
994    }
995  LEAVE_MEM_CHUNK_ROUTINE ();
996}
997
998void
999g_mem_chunk_reset (GMemChunk *mem_chunk)
1000{
1001  GMemArea *mem_areas;
1002  GMemArea *temp_area;
1003 
1004  g_return_if_fail (mem_chunk != NULL);
1005 
1006  ENTER_MEM_CHUNK_ROUTINE ();
1007
1008  mem_areas = mem_chunk->mem_areas;
1009  mem_chunk->num_mem_areas = 0;
1010  mem_chunk->mem_areas = NULL;
1011  mem_chunk->mem_area = NULL;
1012 
1013  while (mem_areas)
1014    {
1015      temp_area = mem_areas;
1016      mem_areas = mem_areas->next;
1017      g_free (temp_area);
1018    }
1019 
1020  mem_chunk->free_atoms = NULL;
1021 
1022  if (mem_chunk->mem_tree)
1023    {
1024      g_tree_destroy (mem_chunk->mem_tree);
1025      mem_chunk->mem_tree = g_tree_new ((GCompareFunc) g_mem_chunk_area_compare);
1026    }
1027
1028  LEAVE_MEM_CHUNK_ROUTINE ();
1029}
1030
1031void
1032g_mem_chunk_print (GMemChunk *mem_chunk)
1033{
1034  GMemArea *mem_areas;
1035  gulong mem;
1036 
1037  g_return_if_fail (mem_chunk != NULL);
1038 
1039  mem_areas = mem_chunk->mem_areas;
1040  mem = 0;
1041 
1042  while (mem_areas)
1043    {
1044      mem += mem_chunk->area_size - mem_areas->free;
1045      mem_areas = mem_areas->next;
1046    }
1047
1048  g_log (G_LOG_DOMAIN, G_LOG_LEVEL_INFO,
1049         "%s: %ld bytes using %d mem areas",
1050         mem_chunk->name, mem, mem_chunk->num_mem_areas);
1051}
1052
1053void
1054g_mem_chunk_info (void)
1055{
1056  GMemChunk *mem_chunk;
1057  gint count;
1058 
1059  count = 0;
1060  g_mutex_lock (mem_chunks_lock);
1061  mem_chunk = mem_chunks;
1062  while (mem_chunk)
1063    {
1064      count += 1;
1065      mem_chunk = mem_chunk->next;
1066    }
1067  g_mutex_unlock (mem_chunks_lock);
1068 
1069  g_log (G_LOG_DOMAIN, G_LOG_LEVEL_INFO, "%d mem chunks", count);
1070 
1071  g_mutex_lock (mem_chunks_lock);
1072  mem_chunk = mem_chunks;
1073  g_mutex_unlock (mem_chunks_lock);
1074
1075  while (mem_chunk)
1076    {
1077      g_mem_chunk_print ((GMemChunk*) mem_chunk);
1078      mem_chunk = mem_chunk->next;
1079    } 
1080}
1081
1082void
1083g_blow_chunks (void)
1084{
1085  GMemChunk *mem_chunk;
1086 
1087  g_mutex_lock (mem_chunks_lock);
1088  mem_chunk = mem_chunks;
1089  g_mutex_unlock (mem_chunks_lock);
1090  while (mem_chunk)
1091    {
1092      g_mem_chunk_clean ((GMemChunk*) mem_chunk);
1093      mem_chunk = mem_chunk->next;
1094    }
1095}
1096
1097static gulong
1098g_mem_chunk_compute_size (gulong size,
1099                          gulong min_size)
1100{
1101  gulong power_of_2;
1102  gulong lower, upper;
1103 
1104  power_of_2 = 16;
1105  while (power_of_2 < size)
1106    power_of_2 <<= 1;
1107 
1108  lower = power_of_2 >> 1;
1109  upper = power_of_2;
1110 
1111  if (size - lower < upper - size && lower >= min_size)
1112    return lower;
1113  else
1114    return upper;
1115}
1116
1117static gint
1118g_mem_chunk_area_compare (GMemArea *a,
1119                          GMemArea *b)
1120{
1121  if (a->mem > b->mem)
1122    return 1;
1123  else if (a->mem < b->mem)
1124    return -1;
1125  return 0;
1126}
1127
1128static gint
1129g_mem_chunk_area_search (GMemArea *a,
1130                         gchar    *addr)
1131{
1132  if (a->mem <= addr)
1133    {
1134      if (addr < &a->mem[a->index])
1135        return 0;
1136      return 1;
1137    }
1138  return -1;
1139}
1140
1141#else /* DISABLE_MEM_POOLS */
1142
1143typedef struct {
1144  guint alloc_size;           /* the size of an atom */
1145}  GMinimalMemChunk;
1146
1147GMemChunk*
1148g_mem_chunk_new (const gchar  *name,
1149                 gint          atom_size,
1150                 gulong        area_size,
1151                 gint          type)
1152{
1153  GMinimalMemChunk *mem_chunk;
1154
1155  g_return_val_if_fail (atom_size > 0, NULL);
1156
1157  mem_chunk = g_new (GMinimalMemChunk, 1);
1158  mem_chunk->alloc_size = atom_size;
1159
1160  return ((GMemChunk*) mem_chunk);
1161}
1162
1163void
1164g_mem_chunk_destroy (GMemChunk *mem_chunk)
1165{
1166  g_return_if_fail (mem_chunk != NULL);
1167 
1168  g_free (mem_chunk);
1169}
1170
1171gpointer
1172g_mem_chunk_alloc (GMemChunk *mem_chunk)
1173{
1174  GMinimalMemChunk *minimal = (GMinimalMemChunk *)mem_chunk;
1175 
1176  g_return_val_if_fail (mem_chunk != NULL, NULL);
1177 
1178  return g_malloc (minimal->alloc_size);
1179}
1180
1181gpointer
1182g_mem_chunk_alloc0 (GMemChunk *mem_chunk)
1183{
1184  GMinimalMemChunk *minimal = (GMinimalMemChunk *)mem_chunk;
1185 
1186  g_return_val_if_fail (mem_chunk != NULL, NULL);
1187 
1188  return g_malloc0 (minimal->alloc_size);
1189}
1190
1191void
1192g_mem_chunk_free (GMemChunk *mem_chunk,
1193                  gpointer   mem)
1194{
1195  g_return_if_fail (mem_chunk != NULL);
1196 
1197  g_free (mem);
1198}
1199
1200void    g_mem_chunk_clean       (GMemChunk *mem_chunk)  {}
1201void    g_mem_chunk_reset       (GMemChunk *mem_chunk)  {}
1202void    g_mem_chunk_print       (GMemChunk *mem_chunk)  {}
1203void    g_mem_chunk_info        (void)                  {}
1204void    g_blow_chunks           (void)                  {}
1205
1206#endif /* DISABLE_MEM_POOLS */
1207
1208
1209/* generic allocators
1210 */
1211struct _GAllocator /* from gmem.c */
1212{
1213  gchar         *name;
1214  guint16        n_preallocs;
1215  guint          is_unused : 1;
1216  guint          type : 4;
1217  GAllocator    *last;
1218  GMemChunk     *mem_chunk;
1219  gpointer       dummy; /* implementation specific */
1220};
1221
1222GAllocator*
1223g_allocator_new (const gchar *name,
1224                 guint        n_preallocs)
1225{
1226  GAllocator *allocator;
1227
1228  g_return_val_if_fail (name != NULL, NULL);
1229
1230  allocator = g_new0 (GAllocator, 1);
1231  allocator->name = g_strdup (name);
1232  allocator->n_preallocs = CLAMP (n_preallocs, 1, 65535);
1233  allocator->is_unused = TRUE;
1234  allocator->type = 0;
1235  allocator->last = NULL;
1236  allocator->mem_chunk = NULL;
1237  allocator->dummy = NULL;
1238
1239  return allocator;
1240}
1241
1242void
1243g_allocator_free (GAllocator *allocator)
1244{
1245  g_return_if_fail (allocator != NULL);
1246  g_return_if_fail (allocator->is_unused == TRUE);
1247
1248  g_free (allocator->name);
1249  if (allocator->mem_chunk)
1250    g_mem_chunk_destroy (allocator->mem_chunk);
1251
1252  g_free (allocator);
1253}
1254
1255void
1256g_mem_init (void)
1257{
1258#ifndef DISABLE_MEM_POOLS
1259  mem_chunks_lock = g_mutex_new ();
1260#endif
1261#ifndef G_DISABLE_CHECKS
1262  mem_chunk_recursion = g_private_new (NULL);
1263  g_profile_mutex = g_mutex_new ();
1264#endif
1265}
Note: See TracBrowser for help on using the repository browser.