| 1 | /*
|
|---|
| 2 | * Copyright 1988, 1989 Hans-J. Boehm, Alan J. Demers
|
|---|
| 3 | * Copyright (c) 1991-1996 by Xerox Corporation. All rights reserved.
|
|---|
| 4 | * Copyright (c) 1998 by Silicon Graphics. All rights reserved.
|
|---|
| 5 | * Copyright (c) 1999 by Hewlett-Packard Company. All rights reserved.
|
|---|
| 6 | *
|
|---|
| 7 | * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
|
|---|
| 8 | * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
|
|---|
| 9 | *
|
|---|
| 10 | * Permission is hereby granted to use or copy this program
|
|---|
| 11 | * for any purpose, provided the above notices are retained on all copies.
|
|---|
| 12 | * Permission to modify the code and to distribute modified code is granted,
|
|---|
| 13 | * provided the above notices are retained, and a notice that the code was
|
|---|
| 14 | * modified is included with the above copyright notice.
|
|---|
| 15 | *
|
|---|
| 16 | */
|
|---|
| 17 |
|
|---|
| 18 |
|
|---|
| 19 | # include "private/gc_priv.h"
|
|---|
| 20 |
|
|---|
| 21 | # include <stdio.h>
|
|---|
| 22 | # if !defined(MACOS) && !defined(MSWINCE)
|
|---|
| 23 | # include <signal.h>
|
|---|
| 24 | # include <sys/types.h>
|
|---|
| 25 | # endif
|
|---|
| 26 |
|
|---|
| 27 | /*
|
|---|
| 28 | * Separate free lists are maintained for different sized objects
|
|---|
| 29 | * up to MAXOBJSZ.
|
|---|
| 30 | * The call GC_allocobj(i,k) ensures that the freelist for
|
|---|
| 31 | * kind k objects of size i points to a non-empty
|
|---|
| 32 | * free list. It returns a pointer to the first entry on the free list.
|
|---|
| 33 | * In a single-threaded world, GC_allocobj may be called to allocate
|
|---|
| 34 | * an object of (small) size i as follows:
|
|---|
| 35 | *
|
|---|
| 36 | * opp = &(GC_objfreelist[i]);
|
|---|
| 37 | * if (*opp == 0) GC_allocobj(i, NORMAL);
|
|---|
| 38 | * ptr = *opp;
|
|---|
| 39 | * *opp = obj_link(ptr);
|
|---|
| 40 | *
|
|---|
| 41 | * Note that this is very fast if the free list is non-empty; it should
|
|---|
| 42 | * only involve the execution of 4 or 5 simple instructions.
|
|---|
| 43 | * All composite objects on freelists are cleared, except for
|
|---|
| 44 | * their first word.
|
|---|
| 45 | */
|
|---|
| 46 |
|
|---|
| 47 | /*
|
|---|
| 48 | * The allocator uses GC_allochblk to allocate large chunks of objects.
|
|---|
| 49 | * These chunks all start on addresses which are multiples of
|
|---|
| 50 | * HBLKSZ. Each allocated chunk has an associated header,
|
|---|
| 51 | * which can be located quickly based on the address of the chunk.
|
|---|
| 52 | * (See headers.c for details.)
|
|---|
| 53 | * This makes it possible to check quickly whether an
|
|---|
| 54 | * arbitrary address corresponds to an object administered by the
|
|---|
| 55 | * allocator.
|
|---|
| 56 | */
|
|---|
| 57 |
|
|---|
| 58 | word GC_non_gc_bytes = 0; /* Number of bytes not intended to be collected */
|
|---|
| 59 |
|
|---|
| 60 | word GC_gc_no = 0;
|
|---|
| 61 |
|
|---|
| 62 | #ifndef SMALL_CONFIG
|
|---|
| 63 | int GC_incremental = 0; /* By default, stop the world. */
|
|---|
| 64 | #endif
|
|---|
| 65 |
|
|---|
| 66 | int GC_parallel = FALSE; /* By default, parallel GC is off. */
|
|---|
| 67 |
|
|---|
| 68 | int GC_full_freq = 19; /* Every 20th collection is a full */
|
|---|
| 69 | /* collection, whether we need it */
|
|---|
| 70 | /* or not. */
|
|---|
| 71 |
|
|---|
| 72 | GC_bool GC_need_full_gc = FALSE;
|
|---|
| 73 | /* Need full GC do to heap growth. */
|
|---|
| 74 |
|
|---|
| 75 | #ifdef THREADS
|
|---|
| 76 | GC_bool GC_world_stopped = FALSE;
|
|---|
| 77 | # define IF_THREADS(x) x
|
|---|
| 78 | #else
|
|---|
| 79 | # define IF_THREADS(x)
|
|---|
| 80 | #endif
|
|---|
| 81 |
|
|---|
| 82 | word GC_used_heap_size_after_full = 0;
|
|---|
| 83 |
|
|---|
| 84 | char * GC_copyright[] =
|
|---|
| 85 | {"Copyright 1988,1989 Hans-J. Boehm and Alan J. Demers ",
|
|---|
| 86 | "Copyright (c) 1991-1995 by Xerox Corporation. All rights reserved. ",
|
|---|
| 87 | "Copyright (c) 1996-1998 by Silicon Graphics. All rights reserved. ",
|
|---|
| 88 | "Copyright (c) 1999-2001 by Hewlett-Packard Company. All rights reserved. ",
|
|---|
| 89 | "THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY",
|
|---|
| 90 | " EXPRESSED OR IMPLIED. ANY USE IS AT YOUR OWN RISK.",
|
|---|
| 91 | "See source code for details." };
|
|---|
| 92 |
|
|---|
| 93 | # include "version.h"
|
|---|
| 94 |
|
|---|
| 95 | #if defined(SAVE_CALL_CHAIN) && \
|
|---|
| 96 | !(defined(REDIRECT_MALLOC) && defined(GC_HAVE_BUILTIN_BACKTRACE))
|
|---|
| 97 | # define SAVE_CALL_CHAIN_IN_GC
|
|---|
| 98 | /* This is only safe if the call chain save mechanism won't end up */
|
|---|
| 99 | /* calling GC_malloc. The GNU C library documentation suggests */
|
|---|
| 100 | /* that backtrace doesn't use malloc, but at least the initial */
|
|---|
| 101 | /* call in some versions does seem to invoke the dynamic linker, */
|
|---|
| 102 | /* which uses malloc. */
|
|---|
| 103 | #endif
|
|---|
| 104 |
|
|---|
| 105 | /* some more variables */
|
|---|
| 106 |
|
|---|
| 107 | extern signed_word GC_mem_found; /* Number of reclaimed longwords */
|
|---|
| 108 | /* after garbage collection */
|
|---|
| 109 |
|
|---|
| 110 | GC_bool GC_dont_expand = 0;
|
|---|
| 111 |
|
|---|
| 112 | word GC_free_space_divisor = 3;
|
|---|
| 113 |
|
|---|
| 114 | extern GC_bool GC_collection_in_progress();
|
|---|
| 115 | /* Collection is in progress, or was abandoned. */
|
|---|
| 116 |
|
|---|
| 117 | int GC_never_stop_func GC_PROTO((void)) { return(0); }
|
|---|
| 118 |
|
|---|
| 119 | unsigned long GC_time_limit = TIME_LIMIT;
|
|---|
| 120 |
|
|---|
| 121 | CLOCK_TYPE GC_start_time; /* Time at which we stopped world. */
|
|---|
| 122 | /* used only in GC_timeout_stop_func. */
|
|---|
| 123 |
|
|---|
| 124 | int GC_n_attempts = 0; /* Number of attempts at finishing */
|
|---|
| 125 | /* collection within GC_time_limit. */
|
|---|
| 126 |
|
|---|
| 127 | #if defined(SMALL_CONFIG) || defined(NO_CLOCK)
|
|---|
| 128 | # define GC_timeout_stop_func GC_never_stop_func
|
|---|
| 129 | #else
|
|---|
| 130 | int GC_timeout_stop_func GC_PROTO((void))
|
|---|
| 131 | {
|
|---|
| 132 | CLOCK_TYPE current_time;
|
|---|
| 133 | static unsigned count = 0;
|
|---|
| 134 | unsigned long time_diff;
|
|---|
| 135 |
|
|---|
| 136 | if ((count++ & 3) != 0) return(0);
|
|---|
| 137 | GET_TIME(current_time);
|
|---|
| 138 | time_diff = MS_TIME_DIFF(current_time,GC_start_time);
|
|---|
| 139 | if (time_diff >= GC_time_limit) {
|
|---|
| 140 | # ifdef CONDPRINT
|
|---|
| 141 | if (GC_print_stats) {
|
|---|
| 142 | GC_printf0("Abandoning stopped marking after ");
|
|---|
| 143 | GC_printf1("%lu msecs", (unsigned long)time_diff);
|
|---|
| 144 | GC_printf1("(attempt %ld)\n", (unsigned long) GC_n_attempts);
|
|---|
| 145 | }
|
|---|
| 146 | # endif
|
|---|
| 147 | return(1);
|
|---|
| 148 | }
|
|---|
| 149 | return(0);
|
|---|
| 150 | }
|
|---|
| 151 | #endif /* !SMALL_CONFIG */
|
|---|
| 152 |
|
|---|
| 153 | /* Return the minimum number of words that must be allocated between */
|
|---|
| 154 | /* collections to amortize the collection cost. */
|
|---|
| 155 | static word min_words_allocd()
|
|---|
| 156 | {
|
|---|
| 157 | # ifdef THREADS
|
|---|
| 158 | /* We punt, for now. */
|
|---|
| 159 | register signed_word stack_size = 10000;
|
|---|
| 160 | # else
|
|---|
| 161 | int dummy;
|
|---|
| 162 | register signed_word stack_size = (ptr_t)(&dummy) - GC_stackbottom;
|
|---|
| 163 | # endif
|
|---|
| 164 | word total_root_size; /* includes double stack size, */
|
|---|
| 165 | /* since the stack is expensive */
|
|---|
| 166 | /* to scan. */
|
|---|
| 167 | word scan_size; /* Estimate of memory to be scanned */
|
|---|
| 168 | /* during normal GC. */
|
|---|
| 169 |
|
|---|
| 170 | if (stack_size < 0) stack_size = -stack_size;
|
|---|
| 171 | total_root_size = 2 * stack_size + GC_root_size;
|
|---|
| 172 | scan_size = BYTES_TO_WORDS(GC_heapsize - GC_large_free_bytes
|
|---|
| 173 | + (GC_large_free_bytes >> 2)
|
|---|
| 174 | /* use a bit more of large empty heap */
|
|---|
| 175 | + total_root_size);
|
|---|
| 176 | if (TRUE_INCREMENTAL) {
|
|---|
| 177 | return scan_size / (2 * GC_free_space_divisor);
|
|---|
| 178 | } else {
|
|---|
| 179 | return scan_size / GC_free_space_divisor;
|
|---|
| 180 | }
|
|---|
| 181 | }
|
|---|
| 182 |
|
|---|
| 183 | /* Return the number of words allocated, adjusted for explicit storage */
|
|---|
| 184 | /* management, etc.. This number is used in deciding when to trigger */
|
|---|
| 185 | /* collections. */
|
|---|
| 186 | word GC_adj_words_allocd()
|
|---|
| 187 | {
|
|---|
| 188 | register signed_word result;
|
|---|
| 189 | register signed_word expl_managed =
|
|---|
| 190 | BYTES_TO_WORDS((long)GC_non_gc_bytes
|
|---|
| 191 | - (long)GC_non_gc_bytes_at_gc);
|
|---|
| 192 |
|
|---|
| 193 | /* Don't count what was explicitly freed, or newly allocated for */
|
|---|
| 194 | /* explicit management. Note that deallocating an explicitly */
|
|---|
| 195 | /* managed object should not alter result, assuming the client */
|
|---|
| 196 | /* is playing by the rules. */
|
|---|
| 197 | result = (signed_word)GC_words_allocd
|
|---|
| 198 | - (signed_word)GC_mem_freed
|
|---|
| 199 | + (signed_word)GC_finalizer_mem_freed - expl_managed;
|
|---|
| 200 | if (result > (signed_word)GC_words_allocd) {
|
|---|
| 201 | result = GC_words_allocd;
|
|---|
| 202 | /* probably client bug or unfortunate scheduling */
|
|---|
| 203 | }
|
|---|
| 204 | result += GC_words_finalized;
|
|---|
| 205 | /* We count objects enqueued for finalization as though they */
|
|---|
| 206 | /* had been reallocated this round. Finalization is user */
|
|---|
| 207 | /* visible progress. And if we don't count this, we have */
|
|---|
| 208 | /* stability problems for programs that finalize all objects. */
|
|---|
| 209 | if ((signed_word)(GC_words_wasted >> 3) < result)
|
|---|
| 210 | result += GC_words_wasted;
|
|---|
| 211 | /* This doesn't reflect useful work. But if there is lots of */
|
|---|
| 212 | /* new fragmentation, the same is probably true of the heap, */
|
|---|
| 213 | /* and the collection will be correspondingly cheaper. */
|
|---|
| 214 | if (result < (signed_word)(GC_words_allocd >> 3)) {
|
|---|
| 215 | /* Always count at least 1/8 of the allocations. We don't want */
|
|---|
| 216 | /* to collect too infrequently, since that would inhibit */
|
|---|
| 217 | /* coalescing of free storage blocks. */
|
|---|
| 218 | /* This also makes us partially robust against client bugs. */
|
|---|
| 219 | return(GC_words_allocd >> 3);
|
|---|
| 220 | } else {
|
|---|
| 221 | return(result);
|
|---|
| 222 | }
|
|---|
| 223 | }
|
|---|
| 224 |
|
|---|
| 225 |
|
|---|
| 226 | /* Clear up a few frames worth of garbage left at the top of the stack. */
|
|---|
| 227 | /* This is used to prevent us from accidentally treating garbade left */
|
|---|
| 228 | /* on the stack by other parts of the collector as roots. This */
|
|---|
| 229 | /* differs from the code in misc.c, which actually tries to keep the */
|
|---|
| 230 | /* stack clear of long-lived, client-generated garbage. */
|
|---|
| 231 | void GC_clear_a_few_frames()
|
|---|
| 232 | {
|
|---|
| 233 | # define NWORDS 64
|
|---|
| 234 | word frames[NWORDS];
|
|---|
| 235 | /* Some compilers will warn that frames was set but never used. */
|
|---|
| 236 | /* That's the whole idea ... */
|
|---|
| 237 | register int i;
|
|---|
| 238 |
|
|---|
| 239 | for (i = 0; i < NWORDS; i++) frames[i] = 0;
|
|---|
| 240 | }
|
|---|
| 241 |
|
|---|
| 242 | /* Heap size at which we need a collection to avoid expanding past */
|
|---|
| 243 | /* limits used by blacklisting. */
|
|---|
| 244 | static word GC_collect_at_heapsize = (word)(-1);
|
|---|
| 245 |
|
|---|
| 246 | /* Have we allocated enough to amortize a collection? */
|
|---|
| 247 | GC_bool GC_should_collect()
|
|---|
| 248 | {
|
|---|
| 249 | return(GC_adj_words_allocd() >= min_words_allocd()
|
|---|
| 250 | || GC_heapsize >= GC_collect_at_heapsize);
|
|---|
| 251 | }
|
|---|
| 252 |
|
|---|
| 253 |
|
|---|
| 254 | void GC_notify_full_gc()
|
|---|
| 255 | {
|
|---|
| 256 | if (GC_start_call_back != (void (*) GC_PROTO((void)))0) {
|
|---|
| 257 | (*GC_start_call_back)();
|
|---|
| 258 | }
|
|---|
| 259 | }
|
|---|
| 260 |
|
|---|
| 261 | GC_bool GC_is_full_gc = FALSE;
|
|---|
| 262 |
|
|---|
| 263 | /*
|
|---|
| 264 | * Initiate a garbage collection if appropriate.
|
|---|
| 265 | * Choose judiciously
|
|---|
| 266 | * between partial, full, and stop-world collections.
|
|---|
| 267 | * Assumes lock held, signals disabled.
|
|---|
| 268 | */
|
|---|
| 269 | void GC_maybe_gc()
|
|---|
| 270 | {
|
|---|
| 271 | static int n_partial_gcs = 0;
|
|---|
| 272 |
|
|---|
| 273 | if (GC_should_collect()) {
|
|---|
| 274 | if (!GC_incremental) {
|
|---|
| 275 | GC_gcollect_inner();
|
|---|
| 276 | n_partial_gcs = 0;
|
|---|
| 277 | return;
|
|---|
| 278 | } else {
|
|---|
| 279 | # ifdef PARALLEL_MARK
|
|---|
| 280 | GC_wait_for_reclaim();
|
|---|
| 281 | # endif
|
|---|
| 282 | if (GC_need_full_gc || n_partial_gcs >= GC_full_freq) {
|
|---|
| 283 | # ifdef CONDPRINT
|
|---|
| 284 | if (GC_print_stats) {
|
|---|
| 285 | GC_printf2(
|
|---|
| 286 | "***>Full mark for collection %lu after %ld allocd bytes\n",
|
|---|
| 287 | (unsigned long) GC_gc_no+1,
|
|---|
| 288 | (long)WORDS_TO_BYTES(GC_words_allocd));
|
|---|
| 289 | }
|
|---|
| 290 | # endif
|
|---|
| 291 | GC_promote_black_lists();
|
|---|
| 292 | (void)GC_reclaim_all((GC_stop_func)0, TRUE);
|
|---|
| 293 | GC_clear_marks();
|
|---|
| 294 | n_partial_gcs = 0;
|
|---|
| 295 | GC_notify_full_gc();
|
|---|
| 296 | GC_is_full_gc = TRUE;
|
|---|
| 297 | } else {
|
|---|
| 298 | n_partial_gcs++;
|
|---|
| 299 | }
|
|---|
| 300 | }
|
|---|
| 301 | /* We try to mark with the world stopped. */
|
|---|
| 302 | /* If we run out of time, this turns into */
|
|---|
| 303 | /* incremental marking. */
|
|---|
| 304 | # ifndef NO_CLOCK
|
|---|
| 305 | if (GC_time_limit != GC_TIME_UNLIMITED) { GET_TIME(GC_start_time); }
|
|---|
| 306 | # endif
|
|---|
| 307 | if (GC_stopped_mark(GC_time_limit == GC_TIME_UNLIMITED?
|
|---|
| 308 | GC_never_stop_func : GC_timeout_stop_func)) {
|
|---|
| 309 | # ifdef SAVE_CALL_CHAIN_IN_GC
|
|---|
| 310 | GC_save_callers(GC_last_stack);
|
|---|
| 311 | # endif
|
|---|
| 312 | GC_finish_collection();
|
|---|
| 313 | } else {
|
|---|
| 314 | if (!GC_is_full_gc) {
|
|---|
| 315 | /* Count this as the first attempt */
|
|---|
| 316 | GC_n_attempts++;
|
|---|
| 317 | }
|
|---|
| 318 | }
|
|---|
| 319 | }
|
|---|
| 320 | }
|
|---|
| 321 |
|
|---|
| 322 |
|
|---|
| 323 | /*
|
|---|
| 324 | * Stop the world garbage collection. Assumes lock held, signals disabled.
|
|---|
| 325 | * If stop_func is not GC_never_stop_func, then abort if stop_func returns TRUE.
|
|---|
| 326 | * Return TRUE if we successfully completed the collection.
|
|---|
| 327 | */
|
|---|
| 328 | GC_bool GC_try_to_collect_inner(stop_func)
|
|---|
| 329 | GC_stop_func stop_func;
|
|---|
| 330 | {
|
|---|
| 331 | # ifdef CONDPRINT
|
|---|
| 332 | CLOCK_TYPE start_time, current_time;
|
|---|
| 333 | # endif
|
|---|
| 334 | if (GC_dont_gc) return FALSE;
|
|---|
| 335 | if (GC_incremental && GC_collection_in_progress()) {
|
|---|
| 336 | # ifdef CONDPRINT
|
|---|
| 337 | if (GC_print_stats) {
|
|---|
| 338 | GC_printf0(
|
|---|
| 339 | "GC_try_to_collect_inner: finishing collection in progress\n");
|
|---|
| 340 | }
|
|---|
| 341 | # endif /* CONDPRINT */
|
|---|
| 342 | /* Just finish collection already in progress. */
|
|---|
| 343 | while(GC_collection_in_progress()) {
|
|---|
| 344 | if (stop_func()) return(FALSE);
|
|---|
| 345 | GC_collect_a_little_inner(1);
|
|---|
| 346 | }
|
|---|
| 347 | }
|
|---|
| 348 | if (stop_func == GC_never_stop_func) GC_notify_full_gc();
|
|---|
| 349 | # ifdef CONDPRINT
|
|---|
| 350 | if (GC_print_stats) {
|
|---|
| 351 | if (GC_print_stats) GET_TIME(start_time);
|
|---|
| 352 | GC_printf2(
|
|---|
| 353 | "Initiating full world-stop collection %lu after %ld allocd bytes\n",
|
|---|
| 354 | (unsigned long) GC_gc_no+1,
|
|---|
| 355 | (long)WORDS_TO_BYTES(GC_words_allocd));
|
|---|
| 356 | }
|
|---|
| 357 | # endif
|
|---|
| 358 | GC_promote_black_lists();
|
|---|
| 359 | /* Make sure all blocks have been reclaimed, so sweep routines */
|
|---|
| 360 | /* don't see cleared mark bits. */
|
|---|
| 361 | /* If we're guaranteed to finish, then this is unnecessary. */
|
|---|
| 362 | /* In the find_leak case, we have to finish to guarantee that */
|
|---|
| 363 | /* previously unmarked objects are not reported as leaks. */
|
|---|
| 364 | # ifdef PARALLEL_MARK
|
|---|
| 365 | GC_wait_for_reclaim();
|
|---|
| 366 | # endif
|
|---|
| 367 | if ((GC_find_leak || stop_func != GC_never_stop_func)
|
|---|
| 368 | && !GC_reclaim_all(stop_func, FALSE)) {
|
|---|
| 369 | /* Aborted. So far everything is still consistent. */
|
|---|
| 370 | return(FALSE);
|
|---|
| 371 | }
|
|---|
| 372 | GC_invalidate_mark_state(); /* Flush mark stack. */
|
|---|
| 373 | GC_clear_marks();
|
|---|
| 374 | # ifdef SAVE_CALL_CHAIN_IN_GC
|
|---|
| 375 | GC_save_callers(GC_last_stack);
|
|---|
| 376 | # endif
|
|---|
| 377 | GC_is_full_gc = TRUE;
|
|---|
| 378 | if (!GC_stopped_mark(stop_func)) {
|
|---|
| 379 | if (!GC_incremental) {
|
|---|
| 380 | /* We're partially done and have no way to complete or use */
|
|---|
| 381 | /* current work. Reestablish invariants as cheaply as */
|
|---|
| 382 | /* possible. */
|
|---|
| 383 | GC_invalidate_mark_state();
|
|---|
| 384 | GC_unpromote_black_lists();
|
|---|
| 385 | } /* else we claim the world is already still consistent. We'll */
|
|---|
| 386 | /* finish incrementally. */
|
|---|
| 387 | return(FALSE);
|
|---|
| 388 | }
|
|---|
| 389 | GC_finish_collection();
|
|---|
| 390 | # if defined(CONDPRINT)
|
|---|
| 391 | if (GC_print_stats) {
|
|---|
| 392 | GET_TIME(current_time);
|
|---|
| 393 | GC_printf1("Complete collection took %lu msecs\n",
|
|---|
| 394 | MS_TIME_DIFF(current_time,start_time));
|
|---|
| 395 | }
|
|---|
| 396 | # endif
|
|---|
| 397 | return(TRUE);
|
|---|
| 398 | }
|
|---|
| 399 |
|
|---|
| 400 |
|
|---|
| 401 |
|
|---|
| 402 | /*
|
|---|
| 403 | * Perform n units of garbage collection work. A unit is intended to touch
|
|---|
| 404 | * roughly GC_RATE pages. Every once in a while, we do more than that.
|
|---|
| 405 | * This needs to be a fairly large number with our current incremental
|
|---|
| 406 | * GC strategy, since otherwise we allocate too much during GC, and the
|
|---|
| 407 | * cleanup gets expensive.
|
|---|
| 408 | */
|
|---|
| 409 | # define GC_RATE 10
|
|---|
| 410 | # define MAX_PRIOR_ATTEMPTS 1
|
|---|
| 411 | /* Maximum number of prior attempts at world stop marking */
|
|---|
| 412 | /* A value of 1 means that we finish the second time, no matter */
|
|---|
| 413 | /* how long it takes. Doesn't count the initial root scan */
|
|---|
| 414 | /* for a full GC. */
|
|---|
| 415 |
|
|---|
| 416 | int GC_deficit = 0; /* The number of extra calls to GC_mark_some */
|
|---|
| 417 | /* that we have made. */
|
|---|
| 418 |
|
|---|
| 419 | void GC_collect_a_little_inner(n)
|
|---|
| 420 | int n;
|
|---|
| 421 | {
|
|---|
| 422 | register int i;
|
|---|
| 423 |
|
|---|
| 424 | if (GC_dont_gc) return;
|
|---|
| 425 | if (GC_incremental && GC_collection_in_progress()) {
|
|---|
| 426 | for (i = GC_deficit; i < GC_RATE*n; i++) {
|
|---|
| 427 | if (GC_mark_some((ptr_t)0)) {
|
|---|
| 428 | /* Need to finish a collection */
|
|---|
| 429 | # ifdef SAVE_CALL_CHAIN_IN_GC
|
|---|
| 430 | GC_save_callers(GC_last_stack);
|
|---|
| 431 | # endif
|
|---|
| 432 | # ifdef PARALLEL_MARK
|
|---|
| 433 | GC_wait_for_reclaim();
|
|---|
| 434 | # endif
|
|---|
| 435 | if (GC_n_attempts < MAX_PRIOR_ATTEMPTS
|
|---|
| 436 | && GC_time_limit != GC_TIME_UNLIMITED) {
|
|---|
| 437 | GET_TIME(GC_start_time);
|
|---|
| 438 | if (!GC_stopped_mark(GC_timeout_stop_func)) {
|
|---|
| 439 | GC_n_attempts++;
|
|---|
| 440 | break;
|
|---|
| 441 | }
|
|---|
| 442 | } else {
|
|---|
| 443 | (void)GC_stopped_mark(GC_never_stop_func);
|
|---|
| 444 | }
|
|---|
| 445 | GC_finish_collection();
|
|---|
| 446 | break;
|
|---|
| 447 | }
|
|---|
| 448 | }
|
|---|
| 449 | if (GC_deficit > 0) GC_deficit -= GC_RATE*n;
|
|---|
| 450 | if (GC_deficit < 0) GC_deficit = 0;
|
|---|
| 451 | } else {
|
|---|
| 452 | GC_maybe_gc();
|
|---|
| 453 | }
|
|---|
| 454 | }
|
|---|
| 455 |
|
|---|
| 456 | int GC_collect_a_little GC_PROTO(())
|
|---|
| 457 | {
|
|---|
| 458 | int result;
|
|---|
| 459 | DCL_LOCK_STATE;
|
|---|
| 460 |
|
|---|
| 461 | DISABLE_SIGNALS();
|
|---|
| 462 | LOCK();
|
|---|
| 463 | GC_collect_a_little_inner(1);
|
|---|
| 464 | result = (int)GC_collection_in_progress();
|
|---|
| 465 | UNLOCK();
|
|---|
| 466 | ENABLE_SIGNALS();
|
|---|
| 467 | if (!result && GC_debugging_started) GC_print_all_smashed();
|
|---|
| 468 | return(result);
|
|---|
| 469 | }
|
|---|
| 470 |
|
|---|
| 471 | /*
|
|---|
| 472 | * Assumes lock is held, signals are disabled.
|
|---|
| 473 | * We stop the world.
|
|---|
| 474 | * If stop_func() ever returns TRUE, we may fail and return FALSE.
|
|---|
| 475 | * Increment GC_gc_no if we succeed.
|
|---|
| 476 | */
|
|---|
| 477 | GC_bool GC_stopped_mark(stop_func)
|
|---|
| 478 | GC_stop_func stop_func;
|
|---|
| 479 | {
|
|---|
| 480 | register int i;
|
|---|
| 481 | int dummy;
|
|---|
| 482 | # if defined(PRINTTIMES) || defined(CONDPRINT)
|
|---|
| 483 | CLOCK_TYPE start_time, current_time;
|
|---|
| 484 | # endif
|
|---|
| 485 |
|
|---|
| 486 | # ifdef PRINTTIMES
|
|---|
| 487 | GET_TIME(start_time);
|
|---|
| 488 | # endif
|
|---|
| 489 | # if defined(CONDPRINT) && !defined(PRINTTIMES)
|
|---|
| 490 | if (GC_print_stats) GET_TIME(start_time);
|
|---|
| 491 | # endif
|
|---|
| 492 | # if defined(REGISTER_LIBRARIES_EARLY)
|
|---|
| 493 | GC_cond_register_dynamic_libraries();
|
|---|
| 494 | # endif
|
|---|
| 495 | STOP_WORLD();
|
|---|
| 496 | IF_THREADS(GC_world_stopped = TRUE);
|
|---|
| 497 | # ifdef CONDPRINT
|
|---|
| 498 | if (GC_print_stats) {
|
|---|
| 499 | GC_printf1("--> Marking for collection %lu ",
|
|---|
| 500 | (unsigned long) GC_gc_no + 1);
|
|---|
| 501 | GC_printf2("after %lu allocd bytes + %lu wasted bytes\n",
|
|---|
| 502 | (unsigned long) WORDS_TO_BYTES(GC_words_allocd),
|
|---|
| 503 | (unsigned long) WORDS_TO_BYTES(GC_words_wasted));
|
|---|
| 504 | }
|
|---|
| 505 | # endif
|
|---|
| 506 | # ifdef MAKE_BACK_GRAPH
|
|---|
| 507 | if (GC_print_back_height) {
|
|---|
| 508 | GC_build_back_graph();
|
|---|
| 509 | }
|
|---|
| 510 | # endif
|
|---|
| 511 |
|
|---|
| 512 | /* Mark from all roots. */
|
|---|
| 513 | /* Minimize junk left in my registers and on the stack */
|
|---|
| 514 | GC_clear_a_few_frames();
|
|---|
| 515 | GC_noop(0,0,0,0,0,0);
|
|---|
| 516 | GC_initiate_gc();
|
|---|
| 517 | for(i = 0;;i++) {
|
|---|
| 518 | if ((*stop_func)()) {
|
|---|
| 519 | # ifdef CONDPRINT
|
|---|
| 520 | if (GC_print_stats) {
|
|---|
| 521 | GC_printf0("Abandoned stopped marking after ");
|
|---|
| 522 | GC_printf1("%lu iterations\n",
|
|---|
| 523 | (unsigned long)i);
|
|---|
| 524 | }
|
|---|
| 525 | # endif
|
|---|
| 526 | GC_deficit = i; /* Give the mutator a chance. */
|
|---|
| 527 | IF_THREADS(GC_world_stopped = FALSE);
|
|---|
| 528 | START_WORLD();
|
|---|
| 529 | return(FALSE);
|
|---|
| 530 | }
|
|---|
| 531 | if (GC_mark_some((ptr_t)(&dummy))) break;
|
|---|
| 532 | }
|
|---|
| 533 |
|
|---|
| 534 | GC_gc_no++;
|
|---|
| 535 | # ifdef PRINTSTATS
|
|---|
| 536 | GC_printf2("Collection %lu reclaimed %ld bytes",
|
|---|
| 537 | (unsigned long) GC_gc_no - 1,
|
|---|
| 538 | (long)WORDS_TO_BYTES(GC_mem_found));
|
|---|
| 539 | # else
|
|---|
| 540 | # ifdef CONDPRINT
|
|---|
| 541 | if (GC_print_stats) {
|
|---|
| 542 | GC_printf1("Collection %lu finished", (unsigned long) GC_gc_no - 1);
|
|---|
| 543 | }
|
|---|
| 544 | # endif
|
|---|
| 545 | # endif /* !PRINTSTATS */
|
|---|
| 546 | # ifdef CONDPRINT
|
|---|
| 547 | if (GC_print_stats) {
|
|---|
| 548 | GC_printf1(" ---> heapsize = %lu bytes\n",
|
|---|
| 549 | (unsigned long) GC_heapsize);
|
|---|
| 550 | /* Printf arguments may be pushed in funny places. Clear the */
|
|---|
| 551 | /* space. */
|
|---|
| 552 | GC_printf0("");
|
|---|
| 553 | }
|
|---|
| 554 | # endif /* CONDPRINT */
|
|---|
| 555 |
|
|---|
| 556 | /* Check all debugged objects for consistency */
|
|---|
| 557 | if (GC_debugging_started) {
|
|---|
| 558 | (*GC_check_heap)();
|
|---|
| 559 | }
|
|---|
| 560 |
|
|---|
| 561 | IF_THREADS(GC_world_stopped = FALSE);
|
|---|
| 562 | START_WORLD();
|
|---|
| 563 | # ifdef PRINTTIMES
|
|---|
| 564 | GET_TIME(current_time);
|
|---|
| 565 | GC_printf1("World-stopped marking took %lu msecs\n",
|
|---|
| 566 | MS_TIME_DIFF(current_time,start_time));
|
|---|
| 567 | # else
|
|---|
| 568 | # ifdef CONDPRINT
|
|---|
| 569 | if (GC_print_stats) {
|
|---|
| 570 | GET_TIME(current_time);
|
|---|
| 571 | GC_printf1("World-stopped marking took %lu msecs\n",
|
|---|
| 572 | MS_TIME_DIFF(current_time,start_time));
|
|---|
| 573 | }
|
|---|
| 574 | # endif
|
|---|
| 575 | # endif
|
|---|
| 576 | return(TRUE);
|
|---|
| 577 | }
|
|---|
| 578 |
|
|---|
| 579 | /* Set all mark bits for the free list whose first entry is q */
|
|---|
| 580 | #ifdef __STDC__
|
|---|
| 581 | void GC_set_fl_marks(ptr_t q)
|
|---|
| 582 | #else
|
|---|
| 583 | void GC_set_fl_marks(q)
|
|---|
| 584 | ptr_t q;
|
|---|
| 585 | #endif
|
|---|
| 586 | {
|
|---|
| 587 | ptr_t p;
|
|---|
| 588 | struct hblk * h, * last_h = 0;
|
|---|
| 589 | hdr *hhdr;
|
|---|
| 590 | int word_no;
|
|---|
| 591 |
|
|---|
| 592 | for (p = q; p != 0; p = obj_link(p)){
|
|---|
| 593 | h = HBLKPTR(p);
|
|---|
| 594 | if (h != last_h) {
|
|---|
| 595 | last_h = h;
|
|---|
| 596 | hhdr = HDR(h);
|
|---|
| 597 | }
|
|---|
| 598 | word_no = (((word *)p) - ((word *)h));
|
|---|
| 599 | set_mark_bit_from_hdr(hhdr, word_no);
|
|---|
| 600 | }
|
|---|
| 601 | }
|
|---|
| 602 |
|
|---|
| 603 | /* Clear all mark bits for the free list whose first entry is q */
|
|---|
| 604 | /* Decrement GC_mem_found by number of words on free list. */
|
|---|
| 605 | #ifdef __STDC__
|
|---|
| 606 | void GC_clear_fl_marks(ptr_t q)
|
|---|
| 607 | #else
|
|---|
| 608 | void GC_clear_fl_marks(q)
|
|---|
| 609 | ptr_t q;
|
|---|
| 610 | #endif
|
|---|
| 611 | {
|
|---|
| 612 | ptr_t p;
|
|---|
| 613 | struct hblk * h, * last_h = 0;
|
|---|
| 614 | hdr *hhdr;
|
|---|
| 615 | int word_no;
|
|---|
| 616 |
|
|---|
| 617 | for (p = q; p != 0; p = obj_link(p)){
|
|---|
| 618 | h = HBLKPTR(p);
|
|---|
| 619 | if (h != last_h) {
|
|---|
| 620 | last_h = h;
|
|---|
| 621 | hhdr = HDR(h);
|
|---|
| 622 | }
|
|---|
| 623 | word_no = (((word *)p) - ((word *)h));
|
|---|
| 624 | clear_mark_bit_from_hdr(hhdr, word_no);
|
|---|
| 625 | # ifdef GATHERSTATS
|
|---|
| 626 | GC_mem_found -= hhdr -> hb_sz;
|
|---|
| 627 | # endif
|
|---|
| 628 | }
|
|---|
| 629 | }
|
|---|
| 630 |
|
|---|
| 631 | /* Finish up a collection. Assumes lock is held, signals are disabled, */
|
|---|
| 632 | /* but the world is otherwise running. */
|
|---|
| 633 | void GC_finish_collection()
|
|---|
| 634 | {
|
|---|
| 635 | # ifdef PRINTTIMES
|
|---|
| 636 | CLOCK_TYPE start_time;
|
|---|
| 637 | CLOCK_TYPE finalize_time;
|
|---|
| 638 | CLOCK_TYPE done_time;
|
|---|
| 639 |
|
|---|
| 640 | GET_TIME(start_time);
|
|---|
| 641 | finalize_time = start_time;
|
|---|
| 642 | # endif
|
|---|
| 643 |
|
|---|
| 644 | # ifdef GATHERSTATS
|
|---|
| 645 | GC_mem_found = 0;
|
|---|
| 646 | # endif
|
|---|
| 647 | # if defined(LINUX) && defined(__ELF__) && !defined(SMALL_CONFIG)
|
|---|
| 648 | if (getenv("GC_PRINT_ADDRESS_MAP") != 0) {
|
|---|
| 649 | GC_print_address_map();
|
|---|
| 650 | }
|
|---|
| 651 | # endif
|
|---|
| 652 | COND_DUMP;
|
|---|
| 653 | if (GC_find_leak) {
|
|---|
| 654 | /* Mark all objects on the free list. All objects should be */
|
|---|
| 655 | /* marked when we're done. */
|
|---|
| 656 | {
|
|---|
| 657 | register word size; /* current object size */
|
|---|
| 658 | int kind;
|
|---|
| 659 | ptr_t q;
|
|---|
| 660 |
|
|---|
| 661 | for (kind = 0; kind < GC_n_kinds; kind++) {
|
|---|
| 662 | for (size = 1; size <= MAXOBJSZ; size++) {
|
|---|
| 663 | q = GC_obj_kinds[kind].ok_freelist[size];
|
|---|
| 664 | if (q != 0) GC_set_fl_marks(q);
|
|---|
| 665 | }
|
|---|
| 666 | }
|
|---|
| 667 | }
|
|---|
| 668 | GC_start_reclaim(TRUE);
|
|---|
| 669 | /* The above just checks; it doesn't really reclaim anything. */
|
|---|
| 670 | }
|
|---|
| 671 |
|
|---|
| 672 | GC_finalize();
|
|---|
| 673 | # ifdef STUBBORN_ALLOC
|
|---|
| 674 | GC_clean_changing_list();
|
|---|
| 675 | # endif
|
|---|
| 676 |
|
|---|
| 677 | # ifdef PRINTTIMES
|
|---|
| 678 | GET_TIME(finalize_time);
|
|---|
| 679 | # endif
|
|---|
| 680 |
|
|---|
| 681 | if (GC_print_back_height) {
|
|---|
| 682 | # ifdef MAKE_BACK_GRAPH
|
|---|
| 683 | GC_traverse_back_graph();
|
|---|
| 684 | # else
|
|---|
| 685 | # ifndef SMALL_CONFIG
|
|---|
| 686 | GC_err_printf0("Back height not available: "
|
|---|
| 687 | "Rebuild collector with -DMAKE_BACK_GRAPH\n");
|
|---|
| 688 | # endif
|
|---|
| 689 | # endif
|
|---|
| 690 | }
|
|---|
| 691 |
|
|---|
| 692 | /* Clear free list mark bits, in case they got accidentally marked */
|
|---|
| 693 | /* (or GC_find_leak is set and they were intentionally marked). */
|
|---|
| 694 | /* Also subtract memory remaining from GC_mem_found count. */
|
|---|
| 695 | /* Note that composite objects on free list are cleared. */
|
|---|
| 696 | /* Thus accidentally marking a free list is not a problem; only */
|
|---|
| 697 | /* objects on the list itself will be marked, and that's fixed here. */
|
|---|
| 698 | {
|
|---|
| 699 | register word size; /* current object size */
|
|---|
| 700 | register ptr_t q; /* pointer to current object */
|
|---|
| 701 | int kind;
|
|---|
| 702 |
|
|---|
| 703 | for (kind = 0; kind < GC_n_kinds; kind++) {
|
|---|
| 704 | for (size = 1; size <= MAXOBJSZ; size++) {
|
|---|
| 705 | q = GC_obj_kinds[kind].ok_freelist[size];
|
|---|
| 706 | if (q != 0) GC_clear_fl_marks(q);
|
|---|
| 707 | }
|
|---|
| 708 | }
|
|---|
| 709 | }
|
|---|
| 710 |
|
|---|
| 711 |
|
|---|
| 712 | # ifdef PRINTSTATS
|
|---|
| 713 | GC_printf1("Bytes recovered before sweep - f.l. count = %ld\n",
|
|---|
| 714 | (long)WORDS_TO_BYTES(GC_mem_found));
|
|---|
| 715 | # endif
|
|---|
| 716 | /* Reconstruct free lists to contain everything not marked */
|
|---|
| 717 | GC_start_reclaim(FALSE);
|
|---|
| 718 | if (GC_is_full_gc) {
|
|---|
| 719 | GC_used_heap_size_after_full = USED_HEAP_SIZE;
|
|---|
| 720 | GC_need_full_gc = FALSE;
|
|---|
| 721 | } else {
|
|---|
| 722 | GC_need_full_gc =
|
|---|
| 723 | BYTES_TO_WORDS(USED_HEAP_SIZE - GC_used_heap_size_after_full)
|
|---|
| 724 | > min_words_allocd();
|
|---|
| 725 | }
|
|---|
| 726 |
|
|---|
| 727 | # ifdef PRINTSTATS
|
|---|
| 728 | GC_printf2(
|
|---|
| 729 | "Immediately reclaimed %ld bytes in heap of size %lu bytes",
|
|---|
| 730 | (long)WORDS_TO_BYTES(GC_mem_found),
|
|---|
| 731 | (unsigned long)GC_heapsize);
|
|---|
| 732 | # ifdef USE_MUNMAP
|
|---|
| 733 | GC_printf1("(%lu unmapped)", GC_unmapped_bytes);
|
|---|
| 734 | # endif
|
|---|
| 735 | GC_printf2(
|
|---|
| 736 | "\n%lu (atomic) + %lu (composite) collectable bytes in use\n",
|
|---|
| 737 | (unsigned long)WORDS_TO_BYTES(GC_atomic_in_use),
|
|---|
| 738 | (unsigned long)WORDS_TO_BYTES(GC_composite_in_use));
|
|---|
| 739 | # endif
|
|---|
| 740 |
|
|---|
| 741 | GC_n_attempts = 0;
|
|---|
| 742 | GC_is_full_gc = FALSE;
|
|---|
| 743 | /* Reset or increment counters for next cycle */
|
|---|
| 744 | GC_words_allocd_before_gc += GC_words_allocd;
|
|---|
| 745 | GC_non_gc_bytes_at_gc = GC_non_gc_bytes;
|
|---|
| 746 | GC_words_allocd = 0;
|
|---|
| 747 | GC_words_wasted = 0;
|
|---|
| 748 | GC_mem_freed = 0;
|
|---|
| 749 | GC_finalizer_mem_freed = 0;
|
|---|
| 750 |
|
|---|
| 751 | # ifdef USE_MUNMAP
|
|---|
| 752 | GC_unmap_old();
|
|---|
| 753 | # endif
|
|---|
| 754 | # ifdef PRINTTIMES
|
|---|
| 755 | GET_TIME(done_time);
|
|---|
| 756 | GC_printf2("Finalize + initiate sweep took %lu + %lu msecs\n",
|
|---|
| 757 | MS_TIME_DIFF(finalize_time,start_time),
|
|---|
| 758 | MS_TIME_DIFF(done_time,finalize_time));
|
|---|
| 759 | # endif
|
|---|
| 760 | }
|
|---|
| 761 |
|
|---|
| 762 | /* Externally callable routine to invoke full, stop-world collection */
|
|---|
| 763 | # if defined(__STDC__) || defined(__cplusplus)
|
|---|
| 764 | int GC_try_to_collect(GC_stop_func stop_func)
|
|---|
| 765 | # else
|
|---|
| 766 | int GC_try_to_collect(stop_func)
|
|---|
| 767 | GC_stop_func stop_func;
|
|---|
| 768 | # endif
|
|---|
| 769 | {
|
|---|
| 770 | int result;
|
|---|
| 771 | DCL_LOCK_STATE;
|
|---|
| 772 |
|
|---|
| 773 | if (GC_debugging_started) GC_print_all_smashed();
|
|---|
| 774 | GC_INVOKE_FINALIZERS();
|
|---|
| 775 | DISABLE_SIGNALS();
|
|---|
| 776 | LOCK();
|
|---|
| 777 | ENTER_GC();
|
|---|
| 778 | if (!GC_is_initialized) GC_init_inner();
|
|---|
| 779 | /* Minimize junk left in my registers */
|
|---|
| 780 | GC_noop(0,0,0,0,0,0);
|
|---|
| 781 | result = (int)GC_try_to_collect_inner(stop_func);
|
|---|
| 782 | EXIT_GC();
|
|---|
| 783 | UNLOCK();
|
|---|
| 784 | ENABLE_SIGNALS();
|
|---|
| 785 | if(result) {
|
|---|
| 786 | if (GC_debugging_started) GC_print_all_smashed();
|
|---|
| 787 | GC_INVOKE_FINALIZERS();
|
|---|
| 788 | }
|
|---|
| 789 | return(result);
|
|---|
| 790 | }
|
|---|
| 791 |
|
|---|
| 792 | void GC_gcollect GC_PROTO(())
|
|---|
| 793 | {
|
|---|
| 794 | (void)GC_try_to_collect(GC_never_stop_func);
|
|---|
| 795 | if (GC_have_errors) GC_print_all_errors();
|
|---|
| 796 | }
|
|---|
| 797 |
|
|---|
| 798 | word GC_n_heap_sects = 0; /* Number of sections currently in heap. */
|
|---|
| 799 |
|
|---|
| 800 | /*
|
|---|
| 801 | * Use the chunk of memory starting at p of size bytes as part of the heap.
|
|---|
| 802 | * Assumes p is HBLKSIZE aligned, and bytes is a multiple of HBLKSIZE.
|
|---|
| 803 | */
|
|---|
| 804 | void GC_add_to_heap(p, bytes)
|
|---|
| 805 | struct hblk *p;
|
|---|
| 806 | word bytes;
|
|---|
| 807 | {
|
|---|
| 808 | word words;
|
|---|
| 809 | hdr * phdr;
|
|---|
| 810 |
|
|---|
| 811 | if (GC_n_heap_sects >= MAX_HEAP_SECTS) {
|
|---|
| 812 | ABORT("Too many heap sections: Increase MAXHINCR or MAX_HEAP_SECTS");
|
|---|
| 813 | }
|
|---|
| 814 | phdr = GC_install_header(p);
|
|---|
| 815 | if (0 == phdr) {
|
|---|
| 816 | /* This is extremely unlikely. Can't add it. This will */
|
|---|
| 817 | /* almost certainly result in a 0 return from the allocator, */
|
|---|
| 818 | /* which is entirely appropriate. */
|
|---|
| 819 | return;
|
|---|
| 820 | }
|
|---|
| 821 | GC_heap_sects[GC_n_heap_sects].hs_start = (ptr_t)p;
|
|---|
| 822 | GC_heap_sects[GC_n_heap_sects].hs_bytes = bytes;
|
|---|
| 823 | GC_n_heap_sects++;
|
|---|
| 824 | words = BYTES_TO_WORDS(bytes);
|
|---|
| 825 | phdr -> hb_sz = words;
|
|---|
| 826 | phdr -> hb_map = (unsigned char *)1; /* A value != GC_invalid_map */
|
|---|
| 827 | phdr -> hb_flags = 0;
|
|---|
| 828 | GC_freehblk(p);
|
|---|
| 829 | GC_heapsize += bytes;
|
|---|
| 830 | if ((ptr_t)p <= (ptr_t)GC_least_plausible_heap_addr
|
|---|
| 831 | || GC_least_plausible_heap_addr == 0) {
|
|---|
| 832 | GC_least_plausible_heap_addr = (GC_PTR)((ptr_t)p - sizeof(word));
|
|---|
| 833 | /* Making it a little smaller than necessary prevents */
|
|---|
| 834 | /* us from getting a false hit from the variable */
|
|---|
| 835 | /* itself. There's some unintentional reflection */
|
|---|
| 836 | /* here. */
|
|---|
| 837 | }
|
|---|
| 838 | if ((ptr_t)p + bytes >= (ptr_t)GC_greatest_plausible_heap_addr) {
|
|---|
| 839 | GC_greatest_plausible_heap_addr = (GC_PTR)((ptr_t)p + bytes);
|
|---|
| 840 | }
|
|---|
| 841 | }
|
|---|
| 842 |
|
|---|
| 843 | # if !defined(NO_DEBUGGING)
|
|---|
| 844 | void GC_print_heap_sects()
|
|---|
| 845 | {
|
|---|
| 846 | register unsigned i;
|
|---|
| 847 |
|
|---|
| 848 | GC_printf1("Total heap size: %lu\n", (unsigned long) GC_heapsize);
|
|---|
| 849 | for (i = 0; i < GC_n_heap_sects; i++) {
|
|---|
| 850 | unsigned long start = (unsigned long) GC_heap_sects[i].hs_start;
|
|---|
| 851 | unsigned long len = (unsigned long) GC_heap_sects[i].hs_bytes;
|
|---|
| 852 | struct hblk *h;
|
|---|
| 853 | unsigned nbl = 0;
|
|---|
| 854 |
|
|---|
| 855 | GC_printf3("Section %ld from 0x%lx to 0x%lx ", (unsigned long)i,
|
|---|
| 856 | start, (unsigned long)(start + len));
|
|---|
| 857 | for (h = (struct hblk *)start; h < (struct hblk *)(start + len); h++) {
|
|---|
| 858 | if (GC_is_black_listed(h, HBLKSIZE)) nbl++;
|
|---|
| 859 | }
|
|---|
| 860 | GC_printf2("%lu/%lu blacklisted\n", (unsigned long)nbl,
|
|---|
| 861 | (unsigned long)(len/HBLKSIZE));
|
|---|
| 862 | }
|
|---|
| 863 | }
|
|---|
| 864 | # endif
|
|---|
| 865 |
|
|---|
| 866 | GC_PTR GC_least_plausible_heap_addr = (GC_PTR)ONES;
|
|---|
| 867 | GC_PTR GC_greatest_plausible_heap_addr = 0;
|
|---|
| 868 |
|
|---|
| 869 | ptr_t GC_max(x,y)
|
|---|
| 870 | ptr_t x, y;
|
|---|
| 871 | {
|
|---|
| 872 | return(x > y? x : y);
|
|---|
| 873 | }
|
|---|
| 874 |
|
|---|
| 875 | ptr_t GC_min(x,y)
|
|---|
| 876 | ptr_t x, y;
|
|---|
| 877 | {
|
|---|
| 878 | return(x < y? x : y);
|
|---|
| 879 | }
|
|---|
| 880 |
|
|---|
| 881 | # if defined(__STDC__) || defined(__cplusplus)
|
|---|
| 882 | void GC_set_max_heap_size(GC_word n)
|
|---|
| 883 | # else
|
|---|
| 884 | void GC_set_max_heap_size(n)
|
|---|
| 885 | GC_word n;
|
|---|
| 886 | # endif
|
|---|
| 887 | {
|
|---|
| 888 | GC_max_heapsize = n;
|
|---|
| 889 | }
|
|---|
| 890 |
|
|---|
| 891 | GC_word GC_max_retries = 0;
|
|---|
| 892 |
|
|---|
| 893 | /*
|
|---|
| 894 | * this explicitly increases the size of the heap. It is used
|
|---|
| 895 | * internally, but may also be invoked from GC_expand_hp by the user.
|
|---|
| 896 | * The argument is in units of HBLKSIZE.
|
|---|
| 897 | * Tiny values of n are rounded up.
|
|---|
| 898 | * Returns FALSE on failure.
|
|---|
| 899 | */
|
|---|
| 900 | GC_bool GC_expand_hp_inner(n)
|
|---|
| 901 | word n;
|
|---|
| 902 | {
|
|---|
| 903 | word bytes;
|
|---|
| 904 | struct hblk * space;
|
|---|
| 905 | word expansion_slop; /* Number of bytes by which we expect the */
|
|---|
| 906 | /* heap to expand soon. */
|
|---|
| 907 |
|
|---|
| 908 | if (n < MINHINCR) n = MINHINCR;
|
|---|
| 909 | bytes = n * HBLKSIZE;
|
|---|
| 910 | /* Make sure bytes is a multiple of GC_page_size */
|
|---|
| 911 | {
|
|---|
| 912 | word mask = GC_page_size - 1;
|
|---|
| 913 | bytes += mask;
|
|---|
| 914 | bytes &= ~mask;
|
|---|
| 915 | }
|
|---|
| 916 |
|
|---|
| 917 | if (GC_max_heapsize != 0 && GC_heapsize + bytes > GC_max_heapsize) {
|
|---|
| 918 | /* Exceeded self-imposed limit */
|
|---|
| 919 | return(FALSE);
|
|---|
| 920 | }
|
|---|
| 921 | space = GET_MEM(bytes);
|
|---|
| 922 | if( space == 0 ) {
|
|---|
| 923 | # ifdef CONDPRINT
|
|---|
| 924 | if (GC_print_stats) {
|
|---|
| 925 | GC_printf1("Failed to expand heap by %ld bytes\n",
|
|---|
| 926 | (unsigned long)bytes);
|
|---|
| 927 | }
|
|---|
| 928 | # endif
|
|---|
| 929 | return(FALSE);
|
|---|
| 930 | }
|
|---|
| 931 | # ifdef CONDPRINT
|
|---|
| 932 | if (GC_print_stats) {
|
|---|
| 933 | GC_printf2("Increasing heap size by %lu after %lu allocated bytes\n",
|
|---|
| 934 | (unsigned long)bytes,
|
|---|
| 935 | (unsigned long)WORDS_TO_BYTES(GC_words_allocd));
|
|---|
| 936 | # ifdef UNDEFINED
|
|---|
| 937 | GC_printf1("Root size = %lu\n", GC_root_size);
|
|---|
| 938 | GC_print_block_list(); GC_print_hblkfreelist();
|
|---|
| 939 | GC_printf0("\n");
|
|---|
| 940 | # endif
|
|---|
| 941 | }
|
|---|
| 942 | # endif
|
|---|
| 943 | expansion_slop = WORDS_TO_BYTES(min_words_allocd()) + 4*MAXHINCR*HBLKSIZE;
|
|---|
| 944 | if (GC_last_heap_addr == 0 && !((word)space & SIGNB)
|
|---|
| 945 | || (GC_last_heap_addr != 0 && GC_last_heap_addr < (ptr_t)space)) {
|
|---|
| 946 | /* Assume the heap is growing up */
|
|---|
| 947 | GC_greatest_plausible_heap_addr =
|
|---|
| 948 | (GC_PTR)GC_max((ptr_t)GC_greatest_plausible_heap_addr,
|
|---|
| 949 | (ptr_t)space + bytes + expansion_slop);
|
|---|
| 950 | } else {
|
|---|
| 951 | /* Heap is growing down */
|
|---|
| 952 | GC_least_plausible_heap_addr =
|
|---|
| 953 | (GC_PTR)GC_min((ptr_t)GC_least_plausible_heap_addr,
|
|---|
| 954 | (ptr_t)space - expansion_slop);
|
|---|
| 955 | }
|
|---|
| 956 | # if defined(LARGE_CONFIG)
|
|---|
| 957 | if (((ptr_t)GC_greatest_plausible_heap_addr <= (ptr_t)space + bytes
|
|---|
| 958 | || (ptr_t)GC_least_plausible_heap_addr >= (ptr_t)space)
|
|---|
| 959 | && GC_heapsize > 0) {
|
|---|
| 960 | /* GC_add_to_heap will fix this, but ... */
|
|---|
| 961 | WARN("Too close to address space limit: blacklisting ineffective\n", 0);
|
|---|
| 962 | }
|
|---|
| 963 | # endif
|
|---|
| 964 | GC_prev_heap_addr = GC_last_heap_addr;
|
|---|
| 965 | GC_last_heap_addr = (ptr_t)space;
|
|---|
| 966 | GC_add_to_heap(space, bytes);
|
|---|
| 967 | /* Force GC before we are likely to allocate past expansion_slop */
|
|---|
| 968 | GC_collect_at_heapsize =
|
|---|
| 969 | GC_heapsize + expansion_slop - 2*MAXHINCR*HBLKSIZE;
|
|---|
| 970 | # if defined(LARGE_CONFIG)
|
|---|
| 971 | if (GC_collect_at_heapsize < GC_heapsize /* wrapped */)
|
|---|
| 972 | GC_collect_at_heapsize = (word)(-1);
|
|---|
| 973 | # endif
|
|---|
| 974 | return(TRUE);
|
|---|
| 975 | }
|
|---|
| 976 |
|
|---|
| 977 | /* Really returns a bool, but it's externally visible, so that's clumsy. */
|
|---|
| 978 | /* Arguments is in bytes. */
|
|---|
| 979 | # if defined(__STDC__) || defined(__cplusplus)
|
|---|
| 980 | int GC_expand_hp(size_t bytes)
|
|---|
| 981 | # else
|
|---|
| 982 | int GC_expand_hp(bytes)
|
|---|
| 983 | size_t bytes;
|
|---|
| 984 | # endif
|
|---|
| 985 | {
|
|---|
| 986 | int result;
|
|---|
| 987 | DCL_LOCK_STATE;
|
|---|
| 988 |
|
|---|
| 989 | DISABLE_SIGNALS();
|
|---|
| 990 | LOCK();
|
|---|
| 991 | if (!GC_is_initialized) GC_init_inner();
|
|---|
| 992 | result = (int)GC_expand_hp_inner(divHBLKSZ((word)bytes));
|
|---|
| 993 | if (result) GC_requested_heapsize += bytes;
|
|---|
| 994 | UNLOCK();
|
|---|
| 995 | ENABLE_SIGNALS();
|
|---|
| 996 | return(result);
|
|---|
| 997 | }
|
|---|
| 998 |
|
|---|
| 999 | unsigned GC_fail_count = 0;
|
|---|
| 1000 | /* How many consecutive GC/expansion failures? */
|
|---|
| 1001 | /* Reset by GC_allochblk. */
|
|---|
| 1002 |
|
|---|
| 1003 | GC_bool GC_collect_or_expand(needed_blocks, ignore_off_page)
|
|---|
| 1004 | word needed_blocks;
|
|---|
| 1005 | GC_bool ignore_off_page;
|
|---|
| 1006 | {
|
|---|
| 1007 | if (!GC_incremental && !GC_dont_gc &&
|
|---|
| 1008 | ((GC_dont_expand && GC_words_allocd > 0) || GC_should_collect())) {
|
|---|
| 1009 | GC_gcollect_inner();
|
|---|
| 1010 | } else {
|
|---|
| 1011 | word blocks_to_get = GC_heapsize/(HBLKSIZE*GC_free_space_divisor)
|
|---|
| 1012 | + needed_blocks;
|
|---|
| 1013 |
|
|---|
| 1014 | if (blocks_to_get > MAXHINCR) {
|
|---|
| 1015 | word slop;
|
|---|
| 1016 |
|
|---|
| 1017 | /* Get the minimum required to make it likely that we */
|
|---|
| 1018 | /* can satisfy the current request in the presence of black- */
|
|---|
| 1019 | /* listing. This will probably be more than MAXHINCR. */
|
|---|
| 1020 | if (ignore_off_page) {
|
|---|
| 1021 | slop = 4;
|
|---|
| 1022 | } else {
|
|---|
| 1023 | slop = 2*divHBLKSZ(BL_LIMIT);
|
|---|
| 1024 | if (slop > needed_blocks) slop = needed_blocks;
|
|---|
| 1025 | }
|
|---|
| 1026 | if (needed_blocks + slop > MAXHINCR) {
|
|---|
| 1027 | blocks_to_get = needed_blocks + slop;
|
|---|
| 1028 | } else {
|
|---|
| 1029 | blocks_to_get = MAXHINCR;
|
|---|
| 1030 | }
|
|---|
| 1031 | }
|
|---|
| 1032 | if (!GC_expand_hp_inner(blocks_to_get)
|
|---|
| 1033 | && !GC_expand_hp_inner(needed_blocks)) {
|
|---|
| 1034 | if (GC_fail_count++ < GC_max_retries) {
|
|---|
| 1035 | WARN("Out of Memory! Trying to continue ...\n", 0);
|
|---|
| 1036 | GC_gcollect_inner();
|
|---|
| 1037 | } else {
|
|---|
| 1038 | # if !defined(AMIGA) || !defined(GC_AMIGA_FASTALLOC)
|
|---|
| 1039 | WARN("Out of Memory! Returning NIL!\n", 0);
|
|---|
| 1040 | # endif
|
|---|
| 1041 | return(FALSE);
|
|---|
| 1042 | }
|
|---|
| 1043 | } else {
|
|---|
| 1044 | # ifdef CONDPRINT
|
|---|
| 1045 | if (GC_fail_count && GC_print_stats) {
|
|---|
| 1046 | GC_printf0("Memory available again ...\n");
|
|---|
| 1047 | }
|
|---|
| 1048 | # endif
|
|---|
| 1049 | }
|
|---|
| 1050 | }
|
|---|
| 1051 | return(TRUE);
|
|---|
| 1052 | }
|
|---|
| 1053 |
|
|---|
| 1054 | /*
|
|---|
| 1055 | * Make sure the object free list for sz is not empty.
|
|---|
| 1056 | * Return a pointer to the first object on the free list.
|
|---|
| 1057 | * The object MUST BE REMOVED FROM THE FREE LIST BY THE CALLER.
|
|---|
| 1058 | * Assumes we hold the allocator lock and signals are disabled.
|
|---|
| 1059 | *
|
|---|
| 1060 | */
|
|---|
| 1061 | ptr_t GC_allocobj(sz, kind)
|
|---|
| 1062 | word sz;
|
|---|
| 1063 | int kind;
|
|---|
| 1064 | {
|
|---|
| 1065 | ptr_t * flh = &(GC_obj_kinds[kind].ok_freelist[sz]);
|
|---|
| 1066 | GC_bool tried_minor = FALSE;
|
|---|
| 1067 |
|
|---|
| 1068 | if (sz == 0) return(0);
|
|---|
| 1069 |
|
|---|
| 1070 | while (*flh == 0) {
|
|---|
| 1071 | ENTER_GC();
|
|---|
| 1072 | /* Do our share of marking work */
|
|---|
| 1073 | if(TRUE_INCREMENTAL) GC_collect_a_little_inner(1);
|
|---|
| 1074 | /* Sweep blocks for objects of this size */
|
|---|
| 1075 | GC_continue_reclaim(sz, kind);
|
|---|
| 1076 | EXIT_GC();
|
|---|
| 1077 | if (*flh == 0) {
|
|---|
| 1078 | GC_new_hblk(sz, kind);
|
|---|
| 1079 | }
|
|---|
| 1080 | if (*flh == 0) {
|
|---|
| 1081 | ENTER_GC();
|
|---|
| 1082 | if (GC_incremental && GC_time_limit == GC_TIME_UNLIMITED
|
|---|
| 1083 | && ! tried_minor ) {
|
|---|
| 1084 | GC_collect_a_little_inner(1);
|
|---|
| 1085 | tried_minor = TRUE;
|
|---|
| 1086 | } else {
|
|---|
| 1087 | if (!GC_collect_or_expand((word)1,FALSE)) {
|
|---|
| 1088 | EXIT_GC();
|
|---|
| 1089 | return(0);
|
|---|
| 1090 | }
|
|---|
| 1091 | }
|
|---|
| 1092 | EXIT_GC();
|
|---|
| 1093 | }
|
|---|
| 1094 | }
|
|---|
| 1095 | /* Successful allocation; reset failure count. */
|
|---|
| 1096 | GC_fail_count = 0;
|
|---|
| 1097 |
|
|---|
| 1098 | return(*flh);
|
|---|
| 1099 | }
|
|---|