source: trunk/src/gcc/boehm-gc/malloc.c@ 2013

Last change on this file since 2013 was 2, checked in by bird, 22 years ago

Initial revision

  • Property cvs2svn:cvs-rev set to 1.1
  • Property svn:eol-style set to native
  • Property svn:executable set to *
File size: 12.3 KB
Line 
1/*
2 * Copyright 1988, 1989 Hans-J. Boehm, Alan J. Demers
3 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
4 * Copyright (c) 2000 by Hewlett-Packard Company. All rights reserved.
5 *
6 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
7 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
8 *
9 * Permission is hereby granted to use or copy this program
10 * for any purpose, provided the above notices are retained on all copies.
11 * Permission to modify the code and to distribute modified code is granted,
12 * provided the above notices are retained, and a notice that the code was
13 * modified is included with the above copyright notice.
14 */
15/* Boehm, February 7, 1996 4:32 pm PST */
16
17#include <stdio.h>
18#include "private/gc_priv.h"
19
20extern ptr_t GC_clear_stack(); /* in misc.c, behaves like identity */
21void GC_extend_size_map(); /* in misc.c. */
22
23/* Allocate reclaim list for kind: */
24/* Return TRUE on success */
25GC_bool GC_alloc_reclaim_list(kind)
26register struct obj_kind * kind;
27{
28 struct hblk ** result = (struct hblk **)
29 GC_scratch_alloc((MAXOBJSZ+1) * sizeof(struct hblk *));
30 if (result == 0) return(FALSE);
31 BZERO(result, (MAXOBJSZ+1)*sizeof(struct hblk *));
32 kind -> ok_reclaim_list = result;
33 return(TRUE);
34}
35
36/* Allocate a large block of size lw words. */
37/* The block is not cleared. */
38/* Flags is 0 or IGNORE_OFF_PAGE. */
39ptr_t GC_alloc_large(lw, k, flags)
40word lw;
41int k;
42unsigned flags;
43{
44 struct hblk * h;
45 word n_blocks = OBJ_SZ_TO_BLOCKS(lw);
46 ptr_t result;
47
48 if (!GC_is_initialized) GC_init_inner();
49 /* Do our share of marking work */
50 if(GC_incremental && !GC_dont_gc)
51 GC_collect_a_little_inner((int)n_blocks);
52 h = GC_allochblk(lw, k, flags);
53# ifdef USE_MUNMAP
54 if (0 == h) {
55 GC_merge_unmapped();
56 h = GC_allochblk(lw, k, flags);
57 }
58# endif
59 while (0 == h && GC_collect_or_expand(n_blocks, (flags != 0))) {
60 h = GC_allochblk(lw, k, flags);
61 }
62 if (h == 0) {
63 result = 0;
64 } else {
65 int total_bytes = BYTES_TO_WORDS(n_blocks * HBLKSIZE);
66 if (n_blocks > 1) {
67 GC_large_allocd_bytes += n_blocks * HBLKSIZE;
68 if (GC_large_allocd_bytes > GC_max_large_allocd_bytes)
69 GC_max_large_allocd_bytes = GC_large_allocd_bytes;
70 }
71 result = (ptr_t) (h -> hb_body);
72 GC_words_wasted += total_bytes - lw;
73 }
74 return result;
75}
76
77
78/* Allocate a large block of size lb bytes. Clear if appropriate. */
79ptr_t GC_alloc_large_and_clear(lw, k, flags)
80word lw;
81int k;
82unsigned flags;
83{
84 ptr_t result = GC_alloc_large(lw, k, flags);
85 word n_blocks = OBJ_SZ_TO_BLOCKS(lw);
86
87 if (0 == result) return 0;
88 if (GC_debugging_started || GC_obj_kinds[k].ok_init) {
89 /* Clear the whole block, in case of GC_realloc call. */
90 BZERO(result, n_blocks * HBLKSIZE);
91 }
92 return result;
93}
94
95/* allocate lb bytes for an object of kind k. */
96/* Should not be used to directly to allocate */
97/* objects such as STUBBORN objects that */
98/* require special handling on allocation. */
99/* First a version that assumes we already */
100/* hold lock: */
101ptr_t GC_generic_malloc_inner(lb, k)
102register word lb;
103register int k;
104{
105register word lw;
106register ptr_t op;
107register ptr_t *opp;
108
109 if( SMALL_OBJ(lb) ) {
110 register struct obj_kind * kind = GC_obj_kinds + k;
111# ifdef MERGE_SIZES
112 lw = GC_size_map[lb];
113# else
114 lw = ALIGNED_WORDS(lb);
115 if (lw == 0) lw = MIN_WORDS;
116# endif
117 opp = &(kind -> ok_freelist[lw]);
118 if( (op = *opp) == 0 ) {
119# ifdef MERGE_SIZES
120 if (GC_size_map[lb] == 0) {
121 if (!GC_is_initialized) GC_init_inner();
122 if (GC_size_map[lb] == 0) GC_extend_size_map(lb);
123 return(GC_generic_malloc_inner(lb, k));
124 }
125# else
126 if (!GC_is_initialized) {
127 GC_init_inner();
128 return(GC_generic_malloc_inner(lb, k));
129 }
130# endif
131 if (kind -> ok_reclaim_list == 0) {
132 if (!GC_alloc_reclaim_list(kind)) goto out;
133 }
134 op = GC_allocobj(lw, k);
135 if (op == 0) goto out;
136 }
137 /* Here everything is in a consistent state. */
138 /* We assume the following assignment is */
139 /* atomic. If we get aborted */
140 /* after the assignment, we lose an object, */
141 /* but that's benign. */
142 /* Volatile declarations may need to be added */
143 /* to prevent the compiler from breaking things.*/
144 /* If we only execute the second of the */
145 /* following assignments, we lose the free */
146 /* list, but that should still be OK, at least */
147 /* for garbage collected memory. */
148 *opp = obj_link(op);
149 obj_link(op) = 0;
150 } else {
151 lw = ROUNDED_UP_WORDS(lb);
152 op = (ptr_t)GC_alloc_large_and_clear(lw, k, 0);
153 }
154 GC_words_allocd += lw;
155
156out:
157 return op;
158}
159
160/* Allocate a composite object of size n bytes. The caller guarantees */
161/* that pointers past the first page are not relevant. Caller holds */
162/* allocation lock. */
163ptr_t GC_generic_malloc_inner_ignore_off_page(lb, k)
164register size_t lb;
165register int k;
166{
167 register word lw;
168 ptr_t op;
169
170 if (lb <= HBLKSIZE)
171 return(GC_generic_malloc_inner((word)lb, k));
172 lw = ROUNDED_UP_WORDS(lb);
173 op = (ptr_t)GC_alloc_large_and_clear(lw, k, IGNORE_OFF_PAGE);
174 GC_words_allocd += lw;
175 return op;
176}
177
178ptr_t GC_generic_malloc(lb, k)
179register word lb;
180register int k;
181{
182 ptr_t result;
183 DCL_LOCK_STATE;
184
185 GC_INVOKE_FINALIZERS();
186 if (SMALL_OBJ(lb)) {
187 DISABLE_SIGNALS();
188 LOCK();
189 result = GC_generic_malloc_inner((word)lb, k);
190 UNLOCK();
191 ENABLE_SIGNALS();
192 } else {
193 word lw;
194 word n_blocks;
195 GC_bool init;
196 lw = ROUNDED_UP_WORDS(lb);
197 n_blocks = OBJ_SZ_TO_BLOCKS(lw);
198 init = GC_obj_kinds[k].ok_init;
199 DISABLE_SIGNALS();
200 LOCK();
201 result = (ptr_t)GC_alloc_large(lw, k, 0);
202 if (0 != result) {
203 if (GC_debugging_started) {
204 BZERO(result, n_blocks * HBLKSIZE);
205 } else {
206# ifdef THREADS
207 /* Clear any memory that might be used for GC descriptors */
208 /* before we release the lock. */
209 ((word *)result)[0] = 0;
210 ((word *)result)[1] = 0;
211 ((word *)result)[lw-1] = 0;
212 ((word *)result)[lw-2] = 0;
213# endif
214 }
215 }
216 GC_words_allocd += lw;
217 UNLOCK();
218 ENABLE_SIGNALS();
219 if (init & !GC_debugging_started && 0 != result) {
220 BZERO(result, n_blocks * HBLKSIZE);
221 }
222 }
223 if (0 == result) {
224 return((*GC_oom_fn)(lb));
225 } else {
226 return(result);
227 }
228}
229
230
231#define GENERAL_MALLOC(lb,k) \
232 (GC_PTR)GC_clear_stack(GC_generic_malloc((word)lb, k))
233/* We make the GC_clear_stack_call a tail call, hoping to get more of */
234/* the stack. */
235
236/* Allocate lb bytes of atomic (pointerfree) data */
237# ifdef __STDC__
238 GC_PTR GC_malloc_atomic(size_t lb)
239# else
240 GC_PTR GC_malloc_atomic(lb)
241 size_t lb;
242# endif
243{
244register ptr_t op;
245register ptr_t * opp;
246register word lw;
247DCL_LOCK_STATE;
248
249 if( EXPECT(SMALL_OBJ(lb), 1) ) {
250# ifdef MERGE_SIZES
251 lw = GC_size_map[lb];
252# else
253 lw = ALIGNED_WORDS(lb);
254# endif
255 opp = &(GC_aobjfreelist[lw]);
256 FASTLOCK();
257 if( EXPECT(!FASTLOCK_SUCCEEDED() || (op = *opp) == 0, 0) ) {
258 FASTUNLOCK();
259 return(GENERAL_MALLOC((word)lb, PTRFREE));
260 }
261 /* See above comment on signals. */
262 *opp = obj_link(op);
263 GC_words_allocd += lw;
264 FASTUNLOCK();
265 return((GC_PTR) op);
266 } else {
267 return(GENERAL_MALLOC((word)lb, PTRFREE));
268 }
269}
270
271/* Allocate lb bytes of composite (pointerful) data */
272# ifdef __STDC__
273 GC_PTR GC_malloc(size_t lb)
274# else
275 GC_PTR GC_malloc(lb)
276 size_t lb;
277# endif
278{
279register ptr_t op;
280register ptr_t *opp;
281register word lw;
282DCL_LOCK_STATE;
283
284 if( EXPECT(SMALL_OBJ(lb), 1) ) {
285# ifdef MERGE_SIZES
286 lw = GC_size_map[lb];
287# else
288 lw = ALIGNED_WORDS(lb);
289# endif
290 opp = &(GC_objfreelist[lw]);
291 FASTLOCK();
292 if( EXPECT(!FASTLOCK_SUCCEEDED() || (op = *opp) == 0, 0) ) {
293 FASTUNLOCK();
294 return(GENERAL_MALLOC((word)lb, NORMAL));
295 }
296 /* See above comment on signals. */
297 *opp = obj_link(op);
298 obj_link(op) = 0;
299 GC_words_allocd += lw;
300 FASTUNLOCK();
301 return((GC_PTR) op);
302 } else {
303 return(GENERAL_MALLOC((word)lb, NORMAL));
304 }
305}
306
307# ifdef REDIRECT_MALLOC
308# ifdef __STDC__
309 GC_PTR malloc(size_t lb)
310# else
311 GC_PTR malloc(lb)
312 size_t lb;
313# endif
314 {
315 /* It might help to manually inline the GC_malloc call here. */
316 /* But any decent compiler should reduce the extra procedure call */
317 /* to at most a jump instruction in this case. */
318# if defined(I386) && defined(GC_SOLARIS_THREADS)
319 /*
320 * Thread initialisation can call malloc before
321 * we're ready for it.
322 * It's not clear that this is enough to help matters.
323 * The thread implementation may well call malloc at other
324 * inopportune times.
325 */
326 if (!GC_is_initialized) return sbrk(lb);
327# endif /* I386 && GC_SOLARIS_THREADS */
328 return((GC_PTR)REDIRECT_MALLOC(lb));
329 }
330
331# ifdef __STDC__
332 GC_PTR calloc(size_t n, size_t lb)
333# else
334 GC_PTR calloc(n, lb)
335 size_t n, lb;
336# endif
337 {
338 return((GC_PTR)REDIRECT_MALLOC(n*lb));
339 }
340
341# include <string.h>
342# ifdef __STDC__
343 char *strdup(const char *s)
344# else
345 char *strdup(s)
346 char *s;
347# endif
348 {
349 size_t len = strlen + 1;
350 char * result = ((char *)REDIRECT_MALLOC(len+1));
351 BCOPY(s, result, len+1);
352 return result;
353 }
354# endif /* REDIRECT_MALLOC */
355
356/* Explicitly deallocate an object p. */
357# ifdef __STDC__
358 void GC_free(GC_PTR p)
359# else
360 void GC_free(p)
361 GC_PTR p;
362# endif
363{
364 register struct hblk *h;
365 register hdr *hhdr;
366 register signed_word sz;
367 register ptr_t * flh;
368 register int knd;
369 register struct obj_kind * ok;
370 DCL_LOCK_STATE;
371
372 if (p == 0) return;
373 /* Required by ANSI. It's not my fault ... */
374 h = HBLKPTR(p);
375 hhdr = HDR(h);
376# if defined(REDIRECT_MALLOC) && \
377 (defined(GC_SOLARIS_THREADS) || defined(GC_LINUX_THREADS) \
378 || defined(__MINGW32__)) /* Should this be MSWIN32 in general? */
379 /* For Solaris, we have to redirect malloc calls during */
380 /* initialization. For the others, this seems to happen */
381 /* implicitly. */
382 /* Don't try to deallocate that memory. */
383 if (0 == hhdr) return;
384# endif
385 knd = hhdr -> hb_obj_kind;
386 sz = hhdr -> hb_sz;
387 ok = &GC_obj_kinds[knd];
388 if (EXPECT((sz <= MAXOBJSZ), 1)) {
389# ifdef THREADS
390 DISABLE_SIGNALS();
391 LOCK();
392# endif
393 GC_mem_freed += sz;
394 /* A signal here can make GC_mem_freed and GC_non_gc_bytes */
395 /* inconsistent. We claim this is benign. */
396 if (IS_UNCOLLECTABLE(knd)) GC_non_gc_bytes -= WORDS_TO_BYTES(sz);
397 /* Its unnecessary to clear the mark bit. If the */
398 /* object is reallocated, it doesn't matter. O.w. the */
399 /* collector will do it, since it's on a free list. */
400 if (ok -> ok_init) {
401 BZERO((word *)p + 1, WORDS_TO_BYTES(sz-1));
402 }
403 flh = &(ok -> ok_freelist[sz]);
404 obj_link(p) = *flh;
405 *flh = (ptr_t)p;
406# ifdef THREADS
407 UNLOCK();
408 ENABLE_SIGNALS();
409# endif
410 } else {
411 DISABLE_SIGNALS();
412 LOCK();
413 GC_mem_freed += sz;
414 if (IS_UNCOLLECTABLE(knd)) GC_non_gc_bytes -= WORDS_TO_BYTES(sz);
415 GC_freehblk(h);
416 UNLOCK();
417 ENABLE_SIGNALS();
418 }
419}
420
421/* Explicitly deallocate an object p when we already hold lock. */
422/* Only used for internally allocated objects, so we can take some */
423/* shortcuts. */
424#ifdef THREADS
425void GC_free_inner(GC_PTR p)
426{
427 register struct hblk *h;
428 register hdr *hhdr;
429 register signed_word sz;
430 register ptr_t * flh;
431 register int knd;
432 register struct obj_kind * ok;
433 DCL_LOCK_STATE;
434
435 h = HBLKPTR(p);
436 hhdr = HDR(h);
437 knd = hhdr -> hb_obj_kind;
438 sz = hhdr -> hb_sz;
439 ok = &GC_obj_kinds[knd];
440 if (sz <= MAXOBJSZ) {
441 GC_mem_freed += sz;
442 if (IS_UNCOLLECTABLE(knd)) GC_non_gc_bytes -= WORDS_TO_BYTES(sz);
443 if (ok -> ok_init) {
444 BZERO((word *)p + 1, WORDS_TO_BYTES(sz-1));
445 }
446 flh = &(ok -> ok_freelist[sz]);
447 obj_link(p) = *flh;
448 *flh = (ptr_t)p;
449 } else {
450 GC_mem_freed += sz;
451 if (IS_UNCOLLECTABLE(knd)) GC_non_gc_bytes -= WORDS_TO_BYTES(sz);
452 GC_freehblk(h);
453 }
454}
455#endif /* THREADS */
456
457# ifdef REDIRECT_MALLOC
458# ifdef __STDC__
459 void free(GC_PTR p)
460# else
461 void free(p)
462 GC_PTR p;
463# endif
464 {
465# ifndef IGNORE_FREE
466 GC_free(p);
467# endif
468 }
469# endif /* REDIRECT_MALLOC */
Note: See TracBrowser for help on using the repository browser.