source: trunk/gcc/libjava/boehm.cc@ 3327

Last change on this file since 3327 was 1392, checked in by bird, 21 years ago

This commit was generated by cvs2svn to compensate for changes in r1391,
which included commits to RCS files with non-trunk default branches.

  • Property cvs2svn:cvs-rev set to 1.1.1.2
  • Property svn:eol-style set to native
  • Property svn:executable set to *
File size: 16.4 KB
Line 
1// boehm.cc - interface between libjava and Boehm GC.
2
3/* Copyright (C) 1998, 1999, 2000, 2001, 2002 Free Software Foundation
4
5 This file is part of libgcj.
6
7This software is copyrighted work licensed under the terms of the
8Libgcj License. Please consult the file "LIBGCJ_LICENSE" for
9details. */
10
11#include <config.h>
12
13#include <stdio.h>
14
15#include <jvm.h>
16#include <gcj/cni.h>
17
18#include <java/lang/Class.h>
19#include <java/lang/reflect/Modifier.h>
20#include <java-interp.h>
21
22// More nastiness: the GC wants to define TRUE and FALSE. We don't
23// need the Java definitions (themselves a hack), so we undefine them.
24#undef TRUE
25#undef FALSE
26
27extern "C"
28{
29#include <private/gc_pmark.h>
30#include <gc_gcj.h>
31
32#ifdef THREAD_LOCAL_ALLOC
33# define GC_REDIRECT_TO_LOCAL
34# include <gc_local_alloc.h>
35#endif
36
37 // These aren't declared in any Boehm GC header.
38 void GC_finalize_all (void);
39 ptr_t GC_debug_generic_malloc (size_t size, int k, GC_EXTRA_PARAMS);
40};
41
42#define MAYBE_MARK(Obj, Top, Limit, Source, Exit) \
43 Top=GC_MARK_AND_PUSH((GC_PTR)Obj, Top, Limit, (GC_PTR *)Source)
44
45// `kind' index used when allocating Java arrays.
46static int array_kind_x;
47
48// Freelist used for Java arrays.
49static ptr_t *array_free_list;
50
51// Lock used to protect access to Boehm's GC_enable/GC_disable functions.
52static _Jv_Mutex_t disable_gc_mutex;
53
54
55
56
57// This is called by the GC during the mark phase. It marks a Java
58// object. We use `void *' arguments and return, and not what the
59// Boehm GC wants, to avoid pollution in our headers.
60void *
61_Jv_MarkObj (void *addr, void *msp, void *msl, void * /* env */)
62{
63 mse *mark_stack_ptr = (mse *) msp;
64 mse *mark_stack_limit = (mse *) msl;
65 jobject obj = (jobject) addr;
66
67 // FIXME: if env is 1, this object was allocated through the debug
68 // interface, and addr points to the beginning of the debug header.
69 // In that case, we should really add the size of the header to addr.
70
71 _Jv_VTable *dt = *(_Jv_VTable **) addr;
72 // The object might not yet have its vtable set, or it might
73 // really be an object on the freelist. In either case, the vtable slot
74 // will either be 0, or it will point to a cleared object.
75 // This assumes Java objects have size at least 3 words,
76 // including the header. But this should remain true, since this
77 // should only be used with debugging allocation or with large objects.
78 if (__builtin_expect (! dt || !(dt -> get_finalizer()), false))
79 return mark_stack_ptr;
80 jclass klass = dt->clas;
81 ptr_t p;
82
83# ifndef JV_HASH_SYNCHRONIZATION
84 // Every object has a sync_info pointer.
85 p = (ptr_t) obj->sync_info;
86 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, obj, o1label);
87# endif
88 // Mark the object's class.
89 p = (ptr_t) klass;
90 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, obj, o2label);
91
92 if (__builtin_expect (klass == &java::lang::Class::class$, false))
93 {
94 // Currently we allocate some of the memory referenced from class objects
95 // as pointerfree memory, and then mark it more intelligently here.
96 // We ensure that the ClassClass mark descriptor forces invocation of
97 // this procedure.
98 // Correctness of this is subtle, but it looks OK to me for now. For the incremental
99 // collector, we need to make sure that the class object is written whenever
100 // any of the subobjects are altered and may need rescanning. This may be tricky
101 // during construction, and this may not be the right way to do this with
102 // incremental collection.
103 // If we overflow the mark stack, we will rescan the class object, so we should
104 // be OK. The same applies if we redo the mark phase because win32 unmapped part
105 // of our root set. - HB
106 jclass c = (jclass) addr;
107
108 p = (ptr_t) c->name;
109 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c3label);
110 p = (ptr_t) c->superclass;
111 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c4label);
112 for (int i = 0; i < c->constants.size; ++i)
113 {
114 /* FIXME: We could make this more precise by using the tags -KKT */
115 p = (ptr_t) c->constants.data[i].p;
116 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c5label);
117 }
118
119#ifdef INTERPRETER
120 if (_Jv_IsInterpretedClass (c))
121 {
122 p = (ptr_t) c->constants.tags;
123 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c5alabel);
124 p = (ptr_t) c->constants.data;
125 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c5blabel);
126 p = (ptr_t) c->vtable;
127 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c5clabel);
128 }
129#endif
130
131 // If the class is an array, then the methods field holds a
132 // pointer to the element class. If the class is primitive,
133 // then the methods field holds a pointer to the array class.
134 p = (ptr_t) c->methods;
135 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c6label);
136
137 // The vtable might have been set, but the rest of the class
138 // could still be uninitialized. If this is the case, then
139 // c.isArray will SEGV. We check for this, and if it is the
140 // case we just return.
141 if (__builtin_expect (c->name == NULL, false))
142 return mark_stack_ptr;
143
144 if (! c->isArray() && ! c->isPrimitive())
145 {
146 // Scan each method in the cases where `methods' really
147 // points to a methods structure.
148 for (int i = 0; i < c->method_count; ++i)
149 {
150 p = (ptr_t) c->methods[i].name;
151 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c,
152 cm1label);
153 p = (ptr_t) c->methods[i].signature;
154 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c,
155 cm2label);
156 }
157 }
158
159 // Mark all the fields.
160 p = (ptr_t) c->fields;
161 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c8label);
162 for (int i = 0; i < c->field_count; ++i)
163 {
164 _Jv_Field* field = &c->fields[i];
165
166#ifndef COMPACT_FIELDS
167 p = (ptr_t) field->name;
168 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c8alabel);
169#endif
170 p = (ptr_t) field->type;
171 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c8blabel);
172
173 // For the interpreter, we also need to mark the memory
174 // containing static members
175 if ((field->flags & java::lang::reflect::Modifier::STATIC))
176 {
177 p = (ptr_t) field->u.addr;
178 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c8clabel);
179
180 // also, if the static member is a reference,
181 // mark also the value pointed to. We check for isResolved
182 // since marking can happen before memory is allocated for
183 // static members.
184 if (JvFieldIsRef (field) && field->isResolved())
185 {
186 jobject val = *(jobject*) field->u.addr;
187 p = (ptr_t) val;
188 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit,
189 c, c8elabel);
190 }
191 }
192 }
193
194 p = (ptr_t) c->vtable;
195 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c9label);
196 p = (ptr_t) c->interfaces;
197 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, cAlabel);
198 for (int i = 0; i < c->interface_count; ++i)
199 {
200 p = (ptr_t) c->interfaces[i];
201 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, cClabel);
202 }
203 p = (ptr_t) c->loader;
204 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, cBlabel);
205 p = (ptr_t) c->arrayclass;
206 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, cDlabel);
207 p = (ptr_t) c->protectionDomain;
208 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, cPlabel);
209
210#ifdef INTERPRETER
211 if (_Jv_IsInterpretedClass (c))
212 {
213 _Jv_InterpClass* ic = (_Jv_InterpClass*) c;
214
215 p = (ptr_t) ic->interpreted_methods;
216 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, ic, cElabel);
217
218 for (int i = 0; i < c->method_count; i++)
219 {
220 p = (ptr_t) ic->interpreted_methods[i];
221 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, ic, \
222 cFlabel);
223
224 // Mark the direct-threaded code.
225 if ((c->methods[i].accflags
226 & java::lang::reflect::Modifier::NATIVE) == 0)
227 {
228 _Jv_InterpMethod *im
229 = (_Jv_InterpMethod *) ic->interpreted_methods[i];
230 if (im)
231 {
232 p = (ptr_t) im->prepared;
233 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, ic, \
234 cFlabel);
235 }
236 }
237
238 // The interpreter installs a heap-allocated trampoline
239 // here, so we'll mark it.
240 p = (ptr_t) c->methods[i].ncode;
241 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c,
242 cm3label);
243 }
244
245 p = (ptr_t) ic->field_initializers;
246 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, ic, cGlabel);
247
248 }
249#endif
250
251 }
252 else
253 {
254 // NOTE: each class only holds information about the class
255 // itself. So we must do the marking for the entire inheritance
256 // tree in order to mark all fields. FIXME: what about
257 // interfaces? We skip Object here, because Object only has a
258 // sync_info, and we handled that earlier.
259 // Note: occasionally `klass' can be null. For instance, this
260 // can happen if a GC occurs between the point where an object
261 // is allocated and where the vtbl slot is set.
262 while (klass && klass != &java::lang::Object::class$)
263 {
264 jfieldID field = JvGetFirstInstanceField (klass);
265 jint max = JvNumInstanceFields (klass);
266
267 for (int i = 0; i < max; ++i)
268 {
269 if (JvFieldIsRef (field))
270 {
271 jobject val = JvGetObjectField (obj, field);
272 p = (ptr_t) val;
273 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit,
274 obj, elabel);
275 }
276 field = field->getNextField ();
277 }
278 klass = klass->getSuperclass();
279 }
280 }
281
282 return mark_stack_ptr;
283}
284
285// This is called by the GC during the mark phase. It marks a Java
286// array (of objects). We use `void *' arguments and return, and not
287// what the Boehm GC wants, to avoid pollution in our headers.
288void *
289_Jv_MarkArray (void *addr, void *msp, void *msl, void * /*env*/)
290{
291 mse *mark_stack_ptr = (mse *) msp;
292 mse *mark_stack_limit = (mse *) msl;
293 jobjectArray array = (jobjectArray) addr;
294
295 _Jv_VTable *dt = *(_Jv_VTable **) addr;
296 // Assumes size >= 3 words. That's currently true since arrays have
297 // a vtable, sync pointer, and size. If the sync pointer goes away,
298 // we may need to round up the size.
299 if (__builtin_expect (! dt || !(dt -> get_finalizer()), false))
300 return mark_stack_ptr;
301 jclass klass = dt->clas;
302 ptr_t p;
303
304# ifndef JV_HASH_SYNCHRONIZATION
305 // Every object has a sync_info pointer.
306 p = (ptr_t) array->sync_info;
307 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, array, e1label);
308# endif
309 // Mark the object's class.
310 p = (ptr_t) klass;
311 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, &(dt -> clas), o2label);
312
313 for (int i = 0; i < JvGetArrayLength (array); ++i)
314 {
315 jobject obj = elements (array)[i];
316 p = (ptr_t) obj;
317 MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, array, e2label);
318 }
319
320 return mark_stack_ptr;
321}
322
323// Generate a GC marking descriptor for a class.
324//
325// We assume that the gcj mark proc has index 0. This is a dubious assumption,
326// since another one could be registered first. But the compiler also
327// knows this, so in that case everything else will break, too.
328#define GCJ_DEFAULT_DESCR GC_MAKE_PROC(GC_GCJ_RESERVED_MARK_PROC_INDEX,0)
329void *
330_Jv_BuildGCDescr(jclass)
331{
332 /* FIXME: We should really look at the class and build the descriptor. */
333 return (void *)(GCJ_DEFAULT_DESCR);
334}
335
336// Allocate some space that is known to be pointer-free.
337void *
338_Jv_AllocBytes (jsize size)
339{
340 void *r = GC_MALLOC_ATOMIC (size);
341 // We have to explicitly zero memory here, as the GC doesn't
342 // guarantee that PTRFREE allocations are zeroed. Note that we
343 // don't have to do this for other allocation types because we set
344 // the `ok_init' flag in the type descriptor.
345 memset (r, 0, size);
346 return r;
347}
348
349// Allocate space for a new Java array.
350// Used only for arrays of objects.
351void *
352_Jv_AllocArray (jsize size, jclass klass)
353{
354 void *obj;
355 const jsize min_heap_addr = 16*1024;
356 // A heuristic. If size is less than this value, the size
357 // stored in the array can't possibly be misinterpreted as
358 // a pointer. Thus we lose nothing by scanning the object
359 // completely conservatively, since no misidentification can
360 // take place.
361
362#ifdef GC_DEBUG
363 // There isn't much to lose by scanning this conservatively.
364 // If we didn't, the mark proc would have to understand that
365 // it needed to skip the header.
366 obj = GC_MALLOC(size);
367#else
368 if (size < min_heap_addr)
369 obj = GC_MALLOC(size);
370 else
371 obj = GC_generic_malloc (size, array_kind_x);
372#endif
373 *((_Jv_VTable **) obj) = klass->vtable;
374 return obj;
375}
376
377/* Allocate space for a new non-Java object, which does not have the usual
378 Java object header but may contain pointers to other GC'ed objects. */
379void *
380_Jv_AllocRawObj (jsize size)
381{
382 return (void *) GC_MALLOC (size);
383}
384
385static void
386call_finalizer (GC_PTR obj, GC_PTR client_data)
387{
388 _Jv_FinalizerFunc *fn = (_Jv_FinalizerFunc *) client_data;
389 jobject jobj = (jobject) obj;
390
391 (*fn) (jobj);
392}
393
394void
395_Jv_RegisterFinalizer (void *object, _Jv_FinalizerFunc *meth)
396{
397 GC_REGISTER_FINALIZER_NO_ORDER (object, call_finalizer, (GC_PTR) meth,
398 NULL, NULL);
399}
400
401void
402_Jv_RunFinalizers (void)
403{
404 GC_invoke_finalizers ();
405}
406
407void
408_Jv_RunAllFinalizers (void)
409{
410 GC_finalize_all ();
411}
412
413void
414_Jv_RunGC (void)
415{
416 GC_gcollect ();
417}
418
419long
420_Jv_GCTotalMemory (void)
421{
422 return GC_get_heap_size ();
423}
424
425long
426_Jv_GCFreeMemory (void)
427{
428 return GC_get_free_bytes ();
429}
430
431void
432_Jv_GCSetInitialHeapSize (size_t size)
433{
434 size_t current = GC_get_heap_size ();
435 if (size > current)
436 GC_expand_hp (size - current);
437}
438
439void
440_Jv_GCSetMaximumHeapSize (size_t size)
441{
442 GC_set_max_heap_size ((GC_word) size);
443}
444
445// From boehm's misc.c
446extern "C" void GC_enable();
447extern "C" void GC_disable();
448
449void
450_Jv_DisableGC (void)
451{
452 _Jv_MutexLock (&disable_gc_mutex);
453 GC_disable();
454 _Jv_MutexUnlock (&disable_gc_mutex);
455}
456
457void
458_Jv_EnableGC (void)
459{
460 _Jv_MutexLock (&disable_gc_mutex);
461 GC_enable();
462 _Jv_MutexUnlock (&disable_gc_mutex);
463}
464
465static void * handle_out_of_memory(size_t)
466{
467 _Jv_ThrowNoMemory();
468}
469
470void
471_Jv_InitGC (void)
472{
473 int proc;
474
475 // Ignore pointers that do not point to the start of an object.
476 GC_all_interior_pointers = 0;
477
478 // Configure the collector to use the bitmap marking descriptors that we
479 // stash in the class vtable.
480 GC_init_gcj_malloc (0, (void *) _Jv_MarkObj);
481
482 // Cause an out of memory error to be thrown from the allocators,
483 // instead of returning 0. This is cheaper than checking on allocation.
484 GC_oom_fn = handle_out_of_memory;
485
486 GC_java_finalization = 1;
487
488 // We use a different mark procedure for object arrays. This code
489 // configures a different object `kind' for object array allocation and
490 // marking. FIXME: see above.
491 array_free_list = (ptr_t *) GC_generic_malloc_inner ((MAXOBJSZ + 1)
492 * sizeof (ptr_t),
493 PTRFREE);
494 memset (array_free_list, 0, (MAXOBJSZ + 1) * sizeof (ptr_t));
495
496 proc = GC_n_mark_procs++;
497 GC_mark_procs[proc] = (GC_mark_proc) _Jv_MarkArray;
498
499 array_kind_x = GC_n_kinds++;
500 GC_obj_kinds[array_kind_x].ok_freelist = array_free_list;
501 GC_obj_kinds[array_kind_x].ok_reclaim_list = 0;
502 GC_obj_kinds[array_kind_x].ok_descriptor = GC_MAKE_PROC (proc, 0);
503 GC_obj_kinds[array_kind_x].ok_relocate_descr = FALSE;
504 GC_obj_kinds[array_kind_x].ok_init = TRUE;
505
506 _Jv_MutexInit (&disable_gc_mutex);
507}
508
509#ifdef JV_HASH_SYNCHRONIZATION
510// Allocate an object with a fake vtable pointer, which causes only
511// the first field (beyond the fake vtable pointer) to be traced.
512// Eventually this should probably be generalized.
513
514static _Jv_VTable trace_one_vtable = {
515 0, // class pointer
516 (void *)(2 * sizeof(void *)),
517 // descriptor; scan 2 words incl. vtable ptr.
518 // Least significant bits must be zero to
519 // identify this as a length descriptor
520 {0} // First method
521};
522
523void *
524_Jv_AllocTraceOne (jsize size /* includes vtable slot */)
525{
526 return GC_GCJ_MALLOC (size, &trace_one_vtable);
527}
528
529// Ditto for two words.
530// the first field (beyond the fake vtable pointer) to be traced.
531// Eventually this should probably be generalized.
532
533static _Jv_VTable trace_two_vtable =
534{
535 0, // class pointer
536 (void *)(3 * sizeof(void *)),
537 // descriptor; scan 3 words incl. vtable ptr.
538 {0} // First method
539};
540
541void *
542_Jv_AllocTraceTwo (jsize size /* includes vtable slot */)
543{
544 return GC_GCJ_MALLOC (size, &trace_two_vtable);
545}
546
547#endif /* JV_HASH_SYNCHRONIZATION */
548
549void
550_Jv_GCInitializeFinalizers (void (*notifier) (void))
551{
552 GC_finalize_on_demand = 1;
553 GC_finalizer_notifier = notifier;
554}
555
556void
557_Jv_GCRegisterDisappearingLink (jobject *objp)
558{
559 GC_general_register_disappearing_link ((GC_PTR *) objp, (GC_PTR) *objp);
560}
561
562jboolean
563_Jv_GCCanReclaimSoftReference (jobject)
564{
565 // For now, always reclaim soft references. FIXME.
566 return true;
567}
Note: See TracBrowser for help on using the repository browser.